diff --git a/.venv/bin/fio b/.venv/bin/fio new file mode 100755 index 00000000..e1ee3692 --- /dev/null +++ b/.venv/bin/fio @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/dadams/CSU Fullerton Dropbox/David Adams/Research Projects/California Equity/california_equity_git/.venv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from fiona.fio.main import main_group +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main_group()) diff --git a/.venv/include/site/python3.12/greenlet/greenlet.h b/.venv/include/site/python3.12/greenlet/greenlet.h new file mode 100644 index 00000000..d02a16e4 --- /dev/null +++ b/.venv/include/site/python3.12/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/LICENSE b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/LICENSE new file mode 100644 index 00000000..967cdc5d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright 2005-2024 SQLAlchemy authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/METADATA b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/METADATA new file mode 100644 index 00000000..20b2b3ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/METADATA @@ -0,0 +1,242 @@ +Metadata-Version: 2.1 +Name: SQLAlchemy +Version: 2.0.35 +Summary: Database Abstraction Library +Home-page: https://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.sqlalchemy.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: typing-extensions >=4.6.0 +Requires-Dist: greenlet !=0.4.17 ; python_version < "3.13" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))) +Requires-Dist: importlib-metadata ; python_version < "3.8" +Provides-Extra: aiomysql +Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql' +Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql' +Provides-Extra: aioodbc +Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc' +Requires-Dist: aioodbc ; extra == 'aioodbc' +Provides-Extra: aiosqlite +Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite' +Requires-Dist: aiosqlite ; extra == 'aiosqlite' +Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite' +Provides-Extra: asyncio +Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio' +Provides-Extra: asyncmy +Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy' +Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy' +Provides-Extra: mariadb_connector +Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector' +Provides-Extra: mssql +Requires-Dist: pyodbc ; extra == 'mssql' +Provides-Extra: mssql_pymssql +Requires-Dist: pymssql ; extra == 'mssql_pymssql' +Provides-Extra: mssql_pyodbc +Requires-Dist: pyodbc ; extra == 'mssql_pyodbc' +Provides-Extra: mypy +Requires-Dist: mypy >=0.910 ; extra == 'mypy' +Provides-Extra: mysql +Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql' +Provides-Extra: mysql_connector +Requires-Dist: mysql-connector-python ; extra == 'mysql_connector' +Provides-Extra: oracle +Requires-Dist: cx-oracle >=8 ; extra == 'oracle' +Provides-Extra: oracle_oracledb +Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb' +Provides-Extra: postgresql +Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql' +Provides-Extra: postgresql_asyncpg +Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg' +Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg' +Provides-Extra: postgresql_pg8000 +Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000' +Provides-Extra: postgresql_psycopg +Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg' +Provides-Extra: postgresql_psycopg2binary +Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary' +Provides-Extra: postgresql_psycopg2cffi +Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi' +Provides-Extra: postgresql_psycopgbinary +Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary' +Provides-Extra: pymysql +Requires-Dist: pymysql ; extra == 'pymysql' +Provides-Extra: sqlcipher +Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher' + +SQLAlchemy +========== + +|PyPI| |Python| |Downloads| + +.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI + +.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI - Python Version + +.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month + :target: https://pepy.tech/project/sqlalchemy + :alt: PyPI - Downloads + + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer-initiated + decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +https://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +Code of Conduct +--------------- + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/RECORD b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/RECORD new file mode 100644 index 00000000..8840a000 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/RECORD @@ -0,0 +1,530 @@ +SQLAlchemy-2.0.35.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +SQLAlchemy-2.0.35.dist-info/LICENSE,sha256=PA9Zq4h9BB3mpOUv_j6e212VIt6Qn66abNettue-MpM,1100 +SQLAlchemy-2.0.35.dist-info/METADATA,sha256=nz-ukujebAhwlZl0txMOxlkTwDyPwTn3tWuKTcw8OMU,9632 +SQLAlchemy-2.0.35.dist-info/RECORD,, +SQLAlchemy-2.0.35.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +SQLAlchemy-2.0.35.dist-info/WHEEL,sha256=7B4nnId14TToQHuAKpxbDLCJbNciqBsV-mvXE2hVLJc,151 +SQLAlchemy-2.0.35.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=xHEMNoAWZaZfX2Zdm2gnVT8EWjaBfg6HpzG4ncSmPrk,13033 +sqlalchemy/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/__pycache__/events.cpython-312.pyc,, +sqlalchemy/__pycache__/exc.cpython-312.pyc,, +sqlalchemy/__pycache__/inspection.cpython-312.pyc,, +sqlalchemy/__pycache__/log.cpython-312.pyc,, +sqlalchemy/__pycache__/schema.cpython-312.pyc,, +sqlalchemy/__pycache__/types.cpython-312.pyc,, +sqlalchemy/connectors/__init__.py,sha256=PzXPqZqi3BzEnrs1eW0DcsR4lyknAzhhN9rWcQ97hb4,476 +sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,, +sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,, +sqlalchemy/connectors/aioodbc.py,sha256=GSTiNMO9h0qjPxgqaxDwWZ8HvhWMFNVR6MJQnN1oc40,5288 +sqlalchemy/connectors/asyncio.py,sha256=Hq2bkXmG6-KO_RfCrwMqx4oGH-uH1Z1WWKqPWNjz8p4,6138 +sqlalchemy/connectors/pyodbc.py,sha256=t7AjyxIOnaWg3CrlUEpBs4Y5l0HFdNt3P_cSSKhbi0Y,8501 +sqlalchemy/cyextension/__init__.py,sha256=GzhhN8cjMnDTE0qerlUlpbrNmFPHQWCZ4Gk74OAxl04,244 +sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=wiVAQHnu9pZiSNMZmfxQQs6cS19-erq0DXux92G4VSo,1932256 +sqlalchemy/cyextension/collections.pyx,sha256=L7DZ3DGKpgw2MT2ZZRRxCnrcyE5pU1NAFowWgAzQPEc,12571 +sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=rl6xddEXaRQGh9OlPP2LDlAmSP_8OrLrVv4gJbcPiCc,805632 +sqlalchemy/cyextension/immutabledict.pxd,sha256=3x3-rXG5eRQ7bBnktZ-OJ9-6ft8zToPmTDOd92iXpB0,291 +sqlalchemy/cyextension/immutabledict.pyx,sha256=KfDTYbTfebstE8xuqAtuXsHNAK0_b5q_ymUiinUe_xs,3535 +sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=7JFk5KtZ2DQzB4TKTz7-Zb7EWTUHOVTw7-sfKHQm4MY,530680 +sqlalchemy/cyextension/processors.pyx,sha256=R1rHsGLEaGeBq5VeCydjClzYlivERIJ9B-XLOJlf2MQ,1792 +sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=05hidqye-dCbUfWu9RYtdtftx0Fo9tTsBaO6Tp5sSQg,621328 +sqlalchemy/cyextension/resultproxy.pyx,sha256=eWLdyBXiBy_CLQrF5ScfWJm7X0NeelscSXedtj1zv9Q,2725 +sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=dfsFk5a4zF5Z9Afrxu6JB8GBoRcfVSp9KuSwFIgtmlU,950928 +sqlalchemy/cyextension/util.pyx,sha256=B85orxa9LddLuQEaDoVSq1XmAXIbLKxrxpvuB8ogV_o,2530 +sqlalchemy/dialects/__init__.py,sha256=Kos9Gf5JZg1Vg6GWaCqEbD6e0r1jCwCmcnJIfcxDdcY,1770 +sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,, +sqlalchemy/dialects/_typing.py,sha256=hyv0nKucX2gI8ispB1IsvaUgrEPn9zEcq9hS7kfstEw,888 +sqlalchemy/dialects/mssql/__init__.py,sha256=r5t8wFRNtBQoiUWh0WfIEWzXZW6f3D0uDt6NZTW_7Cc,1880 +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,, +sqlalchemy/dialects/mssql/aioodbc.py,sha256=UQd9ecSMIML713TDnLAviuBVJle7P7i1FtqGZZePk2Y,2022 +sqlalchemy/dialects/mssql/base.py,sha256=TQekFOsor8Rc-cnDPmbq_JLQekKrHPtUdBdNsCYFw7w,132447 +sqlalchemy/dialects/mssql/information_schema.py,sha256=HswjDc6y0mPXCf_x6VyylHlBdBa4PSY6Evxmmlch700,8084 +sqlalchemy/dialects/mssql/json.py,sha256=evUACW2O62TAPq8B7QIPagz7jfc664ql9ms68JqiYzg,4816 +sqlalchemy/dialects/mssql/provision.py,sha256=ZAtt6Div9NLIngMs8kyloxfphw0KDNMsnRCAVd7-esE,5593 +sqlalchemy/dialects/mssql/pymssql.py,sha256=LAv43q4vBCB85OsAwHQItaQUYTYIO0QJ-jvzaBrswmY,4097 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=vwM-vBlmRwrqxOc73P0sFOrBSwn24wzc5IkEOpalbXQ,27056 +sqlalchemy/dialects/mysql/__init__.py,sha256=bxbi4hkysUK2OOVvr1F49akUj1cky27kKb07tgFzI9U,2153 +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,, +sqlalchemy/dialects/mysql/aiomysql.py,sha256=-oMZnCqNsSki8mlQRTWIwiQPT1OVdZIuANkb90q8LAs,9999 +sqlalchemy/dialects/mysql/asyncmy.py,sha256=YpuuOh8VknEeqHqUXQGfQ3jhfO3Xb-vZv78Jq5cscJ0,10067 +sqlalchemy/dialects/mysql/base.py,sha256=8shrZRSnVwDXtl6ybaO6cvul_RZ34zEWqvpYW5sNfOQ,120986 +sqlalchemy/dialects/mysql/cymysql.py,sha256=eXT1ry0w_qRxjiO24M980c-8PZ9qSsbhqBHntjEiKB0,2300 +sqlalchemy/dialects/mysql/dml.py,sha256=HXJMAvimJsqvhj3UZO4vW_6LkF5RqaKbHvklAjor7yU,7645 +sqlalchemy/dialects/mysql/enumerated.py,sha256=ipEPPQqoXfFwcywNdcLlZCEzHBtnitHRah1Gn6nItcg,8448 +sqlalchemy/dialects/mysql/expression.py,sha256=lsmQCHKwfPezUnt27d2kR6ohk4IRFCA64KBS16kx5dc,4097 +sqlalchemy/dialects/mysql/json.py,sha256=l6MEZ0qp8FgiRrIQvOMhyEJq0q6OqiEnvDTx5Cbt9uQ,2269 +sqlalchemy/dialects/mysql/mariadb.py,sha256=kTfBLioLKk4JFFst4TY_iWqPtnvvQXFHknLfm89H2N8,853 +sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=_S1aV93kyP52Nvj7HR9weThML4oUvSLsLqiVFdoLR2o,8623 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=oq3mtsNOMldUjs32JbJG2u3Hy3DObyVzUUMYfOkwkHg,5729 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=qUBbA6STeYGozutyTxHCo5p1W3p59QFFS2FwCgPrjBA,9503 +sqlalchemy/dialects/mysql/provision.py,sha256=Jnk8UO9_Apd2odR2IQFLrscCfAmYxuBKcB8giS3bBog,3575 +sqlalchemy/dialects/mysql/pymysql.py,sha256=GUnSHd2M2uKjmN46Hheymtm26g7phEgwYOXrX0zLY8M,4083 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=072crI4qVyPhajYvHnsfFeSrNjLFVPIjBQKo5uyz5yk,4297 +sqlalchemy/dialects/mysql/reflection.py,sha256=3u34YwT1JJh3uThGZJZ3FKdnUcT7v08QB-tAl1r7VRk,22834 +sqlalchemy/dialects/mysql/reserved_words.py,sha256=ucKX2p2c3UnMq2ayZuOHuf73eXhu7SKsOsTlIN1Q83I,9258 +sqlalchemy/dialects/mysql/types.py,sha256=L5cTCsMT1pTedszNEM3jSxFNZEMcHQLprYCZ0vmfsnA,24343 +sqlalchemy/dialects/oracle/__init__.py,sha256=p4-2gw7TT0bX_MoJXTGD4i8WHctYsK9kCRbkpzykBrc,1493 +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,, +sqlalchemy/dialects/oracle/base.py,sha256=HykzMCkN1LSRteoEttWb_ln9Q0YmOJ5tLbJan7TuWZ4,119699 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=q8Nyj15UZCE2TWOmxuWp5ZsxiCiGMzqfd_9UkmjIja0,55235 +sqlalchemy/dialects/oracle/dictionary.py,sha256=7WMrbPkqo8ZdGjaEZyQr-5f2pajSOF1OTGb8P97z8-g,19519 +sqlalchemy/dialects/oracle/oracledb.py,sha256=fZRKGqNIwW9LG4i8yDOXABrucbfzn_yC86Od-BJ3PcM,13619 +sqlalchemy/dialects/oracle/provision.py,sha256=O9ZpF4OG6Cx4mMzLRfZwhs8dZjrJETWR402n9c7726A,8304 +sqlalchemy/dialects/oracle/types.py,sha256=QK3hJvWzKnnCe3oD3rItwEEIwcoBze8qGg7VFOvVlIk,8231 +sqlalchemy/dialects/postgresql/__init__.py,sha256=wwnNAq4wDQzrlPRzDNB06ayuq3L2HNO99nzeEvq-YcU,3892 +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=7TudtgsPiSB8O5kX8W8KxcNYR8t5h_UHb86b_ChL0P8,5696 +sqlalchemy/dialects/postgresql/array.py,sha256=bWcame7ntmI_Kx6gmBX0-chwADFdLHeCvaDQ4iX8id8,13734 +sqlalchemy/dialects/postgresql/asyncpg.py,sha256=se1HXkikKQFcR2D-15jS6iKIaQs-M8Y8ke99ih0Y4NM,41132 +sqlalchemy/dialects/postgresql/base.py,sha256=ed49Ode09deJF3jXr8V5sSHTwVoFVq0dlvNdP3mJaBQ,178989 +sqlalchemy/dialects/postgresql/dml.py,sha256=Pc69Le6qzmUHHb1FT5zeUSD31dWm6SBgdCAGW89cs3s,11212 +sqlalchemy/dialects/postgresql/ext.py,sha256=1bZ--iNh2O9ym7l2gXZX48yP3yMO4dqb9RpYro2Mj2Q,16262 +sqlalchemy/dialects/postgresql/hstore.py,sha256=otAx-RTDfpi_tcXkMuQV0JOIXtYgevgnsikLKKOkI6U,11541 +sqlalchemy/dialects/postgresql/json.py,sha256=73nmtG-7TN01DytjIJnG8CHa8Q7KwlRdN0bhx4fEAT0,11590 +sqlalchemy/dialects/postgresql/named_types.py,sha256=3IV1ufo7zJjKmX4VtGDEnoXE6xEqLJAtGG82IiqHXwY,17594 +sqlalchemy/dialects/postgresql/operators.py,sha256=NsAaWun_tL3d_be0fs9YL6T4LPKK6crnmFxxIJHgyeY,2808 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=3yoekiWSF-xnaWMqG76XrYPMqerg-42TdmfsW_ivK9E,18640 +sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=hY3NXEUHxTWD4umhd2aowNu3laC-61Q_qQ_pReyXTUM,9254 +sqlalchemy/dialects/postgresql/provision.py,sha256=t6TZj0XaWG9zrpCjNr0oJRjAC_WQzaNdp3kaKJIbS8I,5770 +sqlalchemy/dialects/postgresql/psycopg.py,sha256=ACkfuT87vUJEW4kJyUqDYTwlnl5u0FZFlDcvLzxNzFQ,23226 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=kwEnflz5bAqJcuO_20eYiCtha_a4m_tg5_lppdDnaeU,31998 +sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=M7wAYSL6Pvt-4nbfacAHGyyw4XMKJ_bQZ1tc1pBtIdg,1756 +sqlalchemy/dialects/postgresql/ranges.py,sha256=6CgV7qkxEMJ9AQsiibo_XBLJYzGh-2ZxpG83sRaesVY,32949 +sqlalchemy/dialects/postgresql/types.py,sha256=Jfxqw9JaKNOq29JRWBublywgb3lLMyzx8YZI7CXpS2s,7300 +sqlalchemy/dialects/sqlite/__init__.py,sha256=lp9DIggNn349M-7IYhUA8et8--e8FRExWD2V_r1LJk4,1182 +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=g3qGV6jmiXabWyb3282g_Nmxtj1jThxGSe9C9yalb-U,12345 +sqlalchemy/dialects/sqlite/base.py,sha256=P15AcHoVWS20mMNQqnCfj94owUsTEX2ULtRn290AoF0,97837 +sqlalchemy/dialects/sqlite/dml.py,sha256=9GE55WvwoktKy2fHeT-Wbc9xPHgsbh5oBfd_fckMH5Q,8443 +sqlalchemy/dialects/sqlite/json.py,sha256=Eoplbb_4dYlfrtmQaI8Xddd2suAIHA-IdbDQYM-LIhs,2777 +sqlalchemy/dialects/sqlite/provision.py,sha256=UCpmwxf4IWlrpb2eLHGbPTpCFVbdI_KAh2mKtjiLYao,5632 +sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=OL2S_05DK9kllZj6DOz7QtEl7jI7syxjW6woS725ii4,5356 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=aDp47n0J509kl2hDchoaBKXEQVZtkux54DwfKytUAe4,28068 +sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239 +sqlalchemy/engine/__init__.py,sha256=Stb2oV6l8w65JvqEo6J4qtKoApcmOpXy3AAxQud4C1o,2818 +sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/create.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/events.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/row.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-312.pyc,, +sqlalchemy/engine/_py_processors.py,sha256=j9i_lcYYQOYJMcsDerPxI0sVFBIlX5sqoYMdMJlgWPI,3744 +sqlalchemy/engine/_py_row.py,sha256=wSqoUFzLOJ1f89kgDb6sJm9LUrF5LMFpXPcK1vUsKcs,3787 +sqlalchemy/engine/_py_util.py,sha256=f2DI3AN1kv6EplelowesCVpwS8hSXNufRkZoQmJtSH8,2484 +sqlalchemy/engine/base.py,sha256=frWSMmt3dlentYH4QNN3cijdGzp8NbunColUZwWsWgI,122958 +sqlalchemy/engine/characteristics.py,sha256=N3kbvw_ApMh86wb5yAGnxtPYD4YRhYMWion1H_aVZBI,4765 +sqlalchemy/engine/create.py,sha256=mYJtOG2ZKM8sgyfjpGpamW15RDU7JXi5s6iibbJHMIs,33206 +sqlalchemy/engine/cursor.py,sha256=cFq61yrw76k-QR_xNUBWuL-Zeyb14ltG-6jo2Q2iuuw,76392 +sqlalchemy/engine/default.py,sha256=2wwKKdsagb3QTajRSEw8Hl-EnQ-LmRxy822xOGyenHc,84648 +sqlalchemy/engine/events.py,sha256=c0unNFFiHzTAvkUtXoJaxzMFMDwurBkHiiUhuN8qluc,37381 +sqlalchemy/engine/interfaces.py,sha256=fcVHOmnMo7JZLHzgSKoK3QsdVHH7kJ_AmrDvwW9Ka3k,112936 +sqlalchemy/engine/mock.py,sha256=yvpxgFmRw5G4QsHeF-ZwQGHKES-HqQOucTxFtN1uzdk,4179 +sqlalchemy/engine/processors.py,sha256=XyfINKbo-2fjN-mW55YybvFyQMOil50_kVqsunahkNs,2379 +sqlalchemy/engine/reflection.py,sha256=gwGs8y7x6py5z-ZWx3hQqQrwpHepMCTJyQcFwWJjPlw,75364 +sqlalchemy/engine/result.py,sha256=j6BI4Wj2bziQNQG5OlG_Cm4KcNWY9AoYvTXVlJUU-D8,77603 +sqlalchemy/engine/row.py,sha256=9AAQo9zYDL88GcZ3bjcQTwMT-YIcuGTSMAyTfmBJ_yM,12032 +sqlalchemy/engine/strategies.py,sha256=DqFSWaXJPL-29Omot9O0aOcuGL8KmCGyOvnPGDkAJoE,442 +sqlalchemy/engine/url.py,sha256=8eWkUaIUyDExOcJ2D4xJXRcn4OY1GQJ3Q2duSX6UGAg,30784 +sqlalchemy/engine/util.py,sha256=bNirO8k1S8yOW61uNH-a9QrWtAJ9VGFgbiR0lk1lUQU,5682 +sqlalchemy/event/__init__.py,sha256=KBrp622xojnC3FFquxa2JsMamwAbfkvzfv6Op0NKiYc,997 +sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/event/__pycache__/api.cpython-312.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-312.pyc,, +sqlalchemy/event/__pycache__/base.cpython-312.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-312.pyc,, +sqlalchemy/event/api.py,sha256=DtDVgjKSorOfp9MGJ7fgMWrj4seC_hkwF4D8CW1RFZU,8226 +sqlalchemy/event/attr.py,sha256=X8QeHGK4ioSYht1vkhc11f606_mq_t91jMNIT314ubs,20751 +sqlalchemy/event/base.py,sha256=270OShTD17-bSFUFnPtKdVnB0NFJZ2AouYPo1wT0aJw,15127 +sqlalchemy/event/legacy.py,sha256=teMPs00fO-4g8a_z2omcVKkYce5wj_1uvJO2n2MIeuo,8227 +sqlalchemy/event/registry.py,sha256=nfTSSyhjZZXc5wseWB4sXn-YibSc0LKX8mg17XlWmAo,10835 +sqlalchemy/events.py,sha256=k-ZD38aSPD29LYhED7CBqttp5MDVVx_YSaWC2-cu9ec,525 +sqlalchemy/exc.py,sha256=M_8-O1hd8i6gbyx-TapV400p_Lxq2QqTGMXUAO-YgCc,23976 +sqlalchemy/ext/__init__.py,sha256=S1fGKAbycnQDV01gs-JWGaFQ9GCD4QHwKcU2wnugg_o,322 +sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,, +sqlalchemy/ext/associationproxy.py,sha256=5O5ANHARO8jytvqBQmOu-QjNVE4Hh3tfYquqKAj5ajs,65771 +sqlalchemy/ext/asyncio/__init__.py,sha256=1OqSxEyIUn7RWLGyO12F-jAUIvk1I6DXlVy80-Gvkds,1317 +sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,, +sqlalchemy/ext/asyncio/base.py,sha256=fl7wxZD9KjgFiCtG3WXrYjHEvanamcsodCqq9pH9lOk,8905 +sqlalchemy/ext/asyncio/engine.py,sha256=S_IRWX4QAjj2veLSu4Y3gKBIXkKQt7_2StJAK2_KUDY,48190 +sqlalchemy/ext/asyncio/exc.py,sha256=8sII7VMXzs2TrhizhFQMzSfcroRtiesq8o3UwLfXSgQ,639 +sqlalchemy/ext/asyncio/result.py,sha256=ID2eh-NHW-lnNFTxbKhje8fr-tnsucUsiw_jcpGcSPc,30409 +sqlalchemy/ext/asyncio/scoping.py,sha256=UxHAFxtWKqA7TEozyN2h7MJyzSspTCrS-1SlgQLTExo,52608 +sqlalchemy/ext/asyncio/session.py,sha256=mkFFC1C2mPuopz3BwkfSVTlp3vuIDc8hxiQx5ky5rvc,63103 +sqlalchemy/ext/automap.py,sha256=r0mUSyogNyqdBL4m9AA1NXbLiTLQmtvyQymsssNEipo,61581 +sqlalchemy/ext/baked.py,sha256=H6T1il7GY84BhzPFj49UECSpZh_eBuiHomA-QIsYOYQ,17807 +sqlalchemy/ext/compiler.py,sha256=ONPoxoKD2yUS9R2-oOhmPsA7efm-Bs0BXo7HE1dGlsU,20391 +sqlalchemy/ext/declarative/__init__.py,sha256=20psLdFQbbOWfpdXHZ0CTY6I1k4UqXvKemNVu1LvPOI,1818 +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,, +sqlalchemy/ext/declarative/extensions.py,sha256=uCjN1GisQt54AjqYnKYzJdUjnGd2pZBW47WWdPlS7FE,19547 +sqlalchemy/ext/horizontal_shard.py,sha256=wuwAPnHymln0unSBnyx-cpX0AfESKSsypaSQTYCvzDk,16750 +sqlalchemy/ext/hybrid.py,sha256=IYkCaPZ29gm2cPKPg0cWMkLCEqMykD8-JJTvgacGbmc,52458 +sqlalchemy/ext/indexable.py,sha256=UkTelbydKCdKelzbv3HWFFavoET9WocKaGRPGEOVfN8,11032 +sqlalchemy/ext/instrumentation.py,sha256=sg8ghDjdHSODFXh_jAmpgemnNX1rxCeeXEG3-PMdrNk,15707 +sqlalchemy/ext/mutable.py,sha256=L5ZkHBGYhMaqO75Xtyrk2DBR44RDk0g6Rz2HzHH0F8Q,37355 +sqlalchemy/ext/mypy/__init__.py,sha256=0WebDIZmqBD0OTq5JLtd_PmfF9JGxe4d4Qv3Ml3PKUg,241 +sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,, +sqlalchemy/ext/mypy/apply.py,sha256=Aek_-XA1eXihT4attxhfE43yBKtCgsxBSb--qgZKUqc,10550 +sqlalchemy/ext/mypy/decl_class.py,sha256=1vVJRII2apnLTUbc5HkJS6Z2GueaUv_eKvhbqh7Wik4,17384 +sqlalchemy/ext/mypy/infer.py,sha256=KVnmLFEVS33Al8pUKI7MJbJQu3KeveBUMl78EluBORw,19369 +sqlalchemy/ext/mypy/names.py,sha256=Q3ef8XQBgVm9WUwlItqlYCXDNi_kbV5DdLEgbtEMEI8,10479 +sqlalchemy/ext/mypy/plugin.py,sha256=74ML8LI9xar0V86oCxnPFv5FQGEEfUzK64vOay4BKFs,9750 +sqlalchemy/ext/mypy/util.py,sha256=DKRaurkXHI2lAMAAcEO5GLXbX_m2Xqy7l_juh8Byf5U,9960 +sqlalchemy/ext/orderinglist.py,sha256=TGYbsGH72wEZcFNQDYDsZg9OSPuzf__P8YX8_2HtYUo,14384 +sqlalchemy/ext/serializer.py,sha256=D0g4jMZkRk0Gjr0L-FZe81SR63h0Zs-9JzuWtT_SD7k,6140 +sqlalchemy/future/__init__.py,sha256=q2mw-gxk_xoxJLEvRoyMha3vO1xSRHrslcExOHZwmPA,512 +sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/future/__pycache__/engine.cpython-312.pyc,, +sqlalchemy/future/engine.py,sha256=AgIw6vMsef8W6tynOTkxsjd6o_OQDwGjLdbpoMD8ue8,495 +sqlalchemy/inspection.py,sha256=MF-LE358wZDUEl1IH8-Uwt2HI65EsQpQW5o5udHkZwA,5063 +sqlalchemy/log.py,sha256=8x9UR3nj0uFm6or6bQF-JWb4fYv2zOeQjG_w-0wOJFA,8607 +sqlalchemy/orm/__init__.py,sha256=ZYys5nL3RFUDCMOLFDBrRI52F6er3S1U1OY9TeORuKs,8463 +sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/context.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,, +sqlalchemy/orm/_orm_constructors.py,sha256=c4RMiYKIXzOTjuPIBbyRIzpizYjUBm2sd7k0n1vqb2o,100717 +sqlalchemy/orm/_typing.py,sha256=DVBfpHmDVK4x1zxaGJPY2GoTrAsyR6uexv20Lzf1afc,4973 +sqlalchemy/orm/attributes.py,sha256=lorOHBJvJJYndOuafWJhHBbQ1pR6FAyimhqz-mErBRQ,92534 +sqlalchemy/orm/base.py,sha256=re6A8ooMxLyfCAeQfhBwwxeJQkiH7EtzzOZIVIiTPlw,27466 +sqlalchemy/orm/bulk_persistence.py,sha256=gNp2gFsGvV-5NhOPBL51TqKgR3ZRrS43jSwqUF-SwgY,71430 +sqlalchemy/orm/clsregistry.py,sha256=IjoDZwWpjG42ji59L4M1EZvjBEoXPZykzENDtKWxU8A,17974 +sqlalchemy/orm/collections.py,sha256=WEKuUCRgLhDhJEIBhZ21UrE0pBOyRm2zxD20GvbgA9g,52243 +sqlalchemy/orm/context.py,sha256=FMPyw07OA9OXWQ32RQx52AEa2xTLSkqdYgx9R_yN1x0,112955 +sqlalchemy/orm/decl_api.py,sha256=_WPKQ_vSE5k2TLtNmkaxxYmvbhZvkRMrrvCeDxdqDQE,63998 +sqlalchemy/orm/decl_base.py,sha256=FTHf3bCVlg52KR6JWVEwuG3XjOaGQ5KJMYFYXastGhw,82832 +sqlalchemy/orm/dependency.py,sha256=hgjksUWhgbmgHK5GdJdiDCBgDAIGQXIrY-Tj79tbL2k,47631 +sqlalchemy/orm/descriptor_props.py,sha256=dR_h4Gvdtpcdp4sj_ZOR4P5Nng2J2vhsvFHouRLlntc,37244 +sqlalchemy/orm/dynamic.py,sha256=rWAZ-nfAkREuNjt8e_FRdqYrvHDdbODn1CcfyP8Y18k,9816 +sqlalchemy/orm/evaluator.py,sha256=tRETz4dNZ71VsEA8nG0hpefByB-W0zBt02IxcSR5H2g,12353 +sqlalchemy/orm/events.py,sha256=1PiGT7JMUWTDAb3X1T79P02BMVDmcWEpatz1FwpLqoA,127777 +sqlalchemy/orm/exc.py,sha256=IP40P-wOeXhkYk0YizuTC3wqm6W9cPTaQU08f5MMaQ0,7413 +sqlalchemy/orm/identity.py,sha256=jHdCxCpCyda_8mFOfGmN_Pr0XZdKiU-2hFZshlNxbHs,9249 +sqlalchemy/orm/instrumentation.py,sha256=M-kZmkUvHUxtf-0mCA8RIM5QmMH1hWlYR_pKMwaidjA,24321 +sqlalchemy/orm/interfaces.py,sha256=Hmf1BjbfOarZRgMlruqghR7cgH2xyugA9v5t0x-a-wU,48502 +sqlalchemy/orm/loading.py,sha256=9RacpzFOWbuKgPRWHFmyIvD4fYCLAnkpwBFASyQ2CoI,58277 +sqlalchemy/orm/mapped_collection.py,sha256=zK3d3iozORzDruBUrAmkVC0RR3Orj5szk-TSQ24xzIU,19682 +sqlalchemy/orm/mapper.py,sha256=bfoRzNKKnjF-CDvr2Df7HZC9TepvtuQ49LRz_fW7DGQ,171088 +sqlalchemy/orm/path_registry.py,sha256=sJZMv_WPqUpHfQtKWaX3WYFeKBcNJ8C3wOM2mkBGkTE,25920 +sqlalchemy/orm/persistence.py,sha256=dzyB2JOXNwQgaCbN8kh0sEz00WFePr48qf8NWVCUZH8,61701 +sqlalchemy/orm/properties.py,sha256=eDPFzxYUgdM3uWjHywnb1XW-i0tVKKyx7A2MCD31GQU,29306 +sqlalchemy/orm/query.py,sha256=Cf0e94-u1XyoXJoOAmr4iFvtCwNY98kxUYyMPenaWTE,117708 +sqlalchemy/orm/relationships.py,sha256=dS5SY0v1MiD7iCNnAQlHaI6prUQhL5EkXT7ijc8FR8E,128644 +sqlalchemy/orm/scoping.py,sha256=rJVc7_Lic4V00HZ-UvYFWkVpXqdrMayRmIs4fIwH1UA,78688 +sqlalchemy/orm/session.py,sha256=CZJTQ-wPwIy0c3AMFxgJnBgaft6eEf4JzcCLcaaCSjg,195979 +sqlalchemy/orm/state.py,sha256=327-F4TG29s6mLC8oWRiO2PuvYIUZzY1MqUPjtUy7M4,37670 +sqlalchemy/orm/state_changes.py,sha256=qKYg7NxwrDkuUY3EPygAztym6oAVUFcP2wXn7QD3Mz4,6815 +sqlalchemy/orm/strategies.py,sha256=U5EL1FBXOmkIV5HsryIlTvfQfoajYGCnvTbuKBnH0pQ,116224 +sqlalchemy/orm/strategy_options.py,sha256=oeDl_rMDNAC_90N7ytsni-psXWAeQMhABQFyKBSmai0,85353 +sqlalchemy/orm/sync.py,sha256=g7iZfSge1HgxMk9SKRgUgtHEbpbZ1kP_CBqOIdTOXqc,5779 +sqlalchemy/orm/unitofwork.py,sha256=fiVaqcymbDDHRa1NjS90N9Z466nd5pkJOEi1dHO6QLY,27033 +sqlalchemy/orm/util.py,sha256=Q0JT2JydyGiNiYT8AVe9B6jOdBRVHSOVDIcWOsXXIUQ,80929 +sqlalchemy/orm/writeonly.py,sha256=SYu2sAaHZONk2pW4PmtE871LG-O0P_bjidvKzY1H_zI,22305 +sqlalchemy/pool/__init__.py,sha256=qiDdq4r4FFAoDrK6ncugF_i6usi_X1LeJt-CuBHey0s,1804 +sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/pool/__pycache__/base.cpython-312.pyc,, +sqlalchemy/pool/__pycache__/events.cpython-312.pyc,, +sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,, +sqlalchemy/pool/base.py,sha256=WF4az4ZKuzQGuKeSJeyexaYjmWZUvYdC6KIi8zTGodw,52236 +sqlalchemy/pool/events.py,sha256=xGjkIUZl490ZDtCHqnQF9ZCwe2Jv93eGXmnQxftB11E,13147 +sqlalchemy/pool/impl.py,sha256=JwpALSkH-pCoO_6oENbkHYY00Jx9nlttyoI61LivRNc,18944 +sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/schema.py,sha256=dKiWmgHYjcKQ4TiiD6vD0UMmIsD8u0Fsor1M9AAeGUs,3194 +sqlalchemy/sql/__init__.py,sha256=UNa9EUiYWoPayf-FzNcwVgQvpsBdInPZfpJesAStN9o,5820 +sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/events.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,, +sqlalchemy/sql/_dml_constructors.py,sha256=YdBJex0MCVACv4q2nl_ii3uhxzwU6aDB8zAsratX5UQ,3867 +sqlalchemy/sql/_elements_constructors.py,sha256=1SX6o1ezeB8C9DAa2m0WxmfhM3ji3FeCprXFQkNerNY,63048 +sqlalchemy/sql/_orm_types.py,sha256=T-vjcry4C1y0GToFKVxQCnmly_-Zsq4IO4SHN6bvUF4,625 +sqlalchemy/sql/_py_util.py,sha256=hiM9ePbRSGs60bAMxPFuJCIC_p9SQ1VzqXGiPchiYwE,2173 +sqlalchemy/sql/_selectable_constructors.py,sha256=wjE6HrLm9cR7bxvZXT8sFLUqT6t_J9G1XyQCnYmBDl0,18780 +sqlalchemy/sql/_typing.py,sha256=oqwrYHVMtK-AuKGH9c4SgfiOEJUt5vjkzSEzzscMHkM,12771 +sqlalchemy/sql/annotation.py,sha256=aqbbVz9kfbCT3_66CZ9GEirVN197Cukoqt8rq48FgkQ,18245 +sqlalchemy/sql/base.py,sha256=M1b-Tg49ikUW2mnZv0aI38oASG6dgeo4jBNWDgJgAg8,73925 +sqlalchemy/sql/cache_key.py,sha256=0Db8mR8IrpBgdzXs4TGTt98LOpL3c7KABd72MAPKUQQ,33668 +sqlalchemy/sql/coercions.py,sha256=KjrVc2ks6cx_o6-_Zpu6uPuNGjJy2xvFzXy8z5oshQg,40628 +sqlalchemy/sql/compiler.py,sha256=hrTptbOKIgVIHapywj4Lk5OMwpXvHS-KGg3odFwlo-I,274687 +sqlalchemy/sql/crud.py,sha256=HBX4QPtW_PYYJmIKfNr-wE8IdEr963N24WXzFBUZOo0,56514 +sqlalchemy/sql/ddl.py,sha256=CIqMilCKfuQnF0lrZsQdTxgrbXqcTauKr0Ojzj77PFQ,45602 +sqlalchemy/sql/default_comparator.py,sha256=utXWsZVGEjflhFfCT4ywa6RnhORc1Rryo87Hga71Rps,16707 +sqlalchemy/sql/dml.py,sha256=pn0Lm1ofC5qVZzwGWFW73lPCiNba8OsTeemurJgwRyg,65614 +sqlalchemy/sql/elements.py,sha256=YfccXzQc9DlgF8q15kDf-zKBUY_vpIe0FGaVDBPoic4,176544 +sqlalchemy/sql/events.py,sha256=iC_Q1Htm1Aobt5tOYxWfHHqNpoytrULORmUKcusH_-E,18290 +sqlalchemy/sql/expression.py,sha256=VMX-dLpsZYnVRJpYNDozDUgaj7iQ0HuewUKVefD57PE,7586 +sqlalchemy/sql/functions.py,sha256=kMMYplvuIHFAPwxBI03SizwaLcYEHzysecWk-R1V-JM,63762 +sqlalchemy/sql/lambdas.py,sha256=DP0Qz7Ypo8QhzMwygGHYgRhwJMx-rNezO1euouH3iYU,49292 +sqlalchemy/sql/naming.py,sha256=ZHs1qSV3ou8TYmZ92uvU3sfdklUQlIz4uhe330n05SU,6858 +sqlalchemy/sql/operators.py,sha256=himArRqBzrljob3Zfhi_ZS-Jleg1u6YFp0g3d7Co6IM,76106 +sqlalchemy/sql/roles.py,sha256=pOsVn_OZD7mF2gJByHf24Rjopt0_Hu3dUCEOK5t4KS8,7662 +sqlalchemy/sql/schema.py,sha256=iFleWHkxi-3mKGiK_N1TzUqxnNwOpypB4bWDuAVQe8c,229717 +sqlalchemy/sql/selectable.py,sha256=cgyV0AsPy4CXAFdhMiTCkbgaHiFilW9sclzxlHJKH3o,236460 +sqlalchemy/sql/sqltypes.py,sha256=fajBVE_CCJykrQpOOnb8_HFOKCSoUF48AxljJymvGk4,127330 +sqlalchemy/sql/traversals.py,sha256=3ScTC1fh1-y8Y478h_2Azmd2xdQdWPWkDve4YgrwMf8,33664 +sqlalchemy/sql/type_api.py,sha256=q_FieFRALHiRkhy5Bj-cVgFHa7DXF1dP23FaRRYKeho,83717 +sqlalchemy/sql/util.py,sha256=qGHQF-tPCj-m1FBerzT7weCanGcXU7dK5m-W7NHio-4,48077 +sqlalchemy/sql/visitors.py,sha256=71wdVvhhZL4nJvVwFAs6ssaW-qZgNRSmKjpAcOzF_TA,36317 +sqlalchemy/testing/__init__.py,sha256=zgitAYzsCWT_U48ZiifXHHLJFo8nZBYmI-5TueA4_lE,3160 +sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,, +sqlalchemy/testing/assertions.py,sha256=gL0rA7CCZJbcVgvWOPV91tTZTRwQc1_Ta0-ykBn83Ew,31439 +sqlalchemy/testing/assertsql.py,sha256=IgQG7l94WaiRP8nTbilJh1ZHZl125g7GPq-S5kmQZN0,16817 +sqlalchemy/testing/asyncio.py,sha256=kM8uuOqDBagZF0r9xvGmsiirUVLUQ_KBzjUFU67W-b8,3830 +sqlalchemy/testing/config.py,sha256=AqyH1qub_gDqX0BvlL-JBQe7N-t2wo8655FtwblUNOY,12090 +sqlalchemy/testing/engines.py,sha256=HFJceEBD3Q_TTFQMTtIV5wGWO_a7oUgoKtUF_z636SM,13481 +sqlalchemy/testing/entities.py,sha256=IphFegPKbff3Un47jY6bi7_MQXy6qkx_50jX2tHZJR4,3354 +sqlalchemy/testing/exclusions.py,sha256=T8B01hmm8WVs-EKcUOQRzabahPqblWJfOidi6bHJ6GA,12460 +sqlalchemy/testing/fixtures/__init__.py,sha256=dMClrIoxqlYIFpk2ia4RZpkbfxsS_3EBigr9QsPJ66g,1198 +sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,, +sqlalchemy/testing/fixtures/base.py,sha256=9r_J2ksiTzClpUxW0TczICHrWR7Ny8PV8IsBz6TsGFI,12256 +sqlalchemy/testing/fixtures/mypy.py,sha256=gdxiwNFIzDlNGSOdvM3gbwDceVCC9t8oM5kKbwyhGBk,11973 +sqlalchemy/testing/fixtures/orm.py,sha256=8EFbnaBbXX_Bf4FcCzBUaAHgyVpsLGBHX16SGLqE3Fg,6095 +sqlalchemy/testing/fixtures/sql.py,sha256=KZMjco9_3dsuspmkew5Ejp88Wlr9PsSBB1qeJGFxQAk,15900 +sqlalchemy/testing/pickleable.py,sha256=U9mIqk-zaxq9Xfy7HErP7UrKgTov-A3QFnhZh-NiOjI,2833 +sqlalchemy/testing/plugin/__init__.py,sha256=79F--BIY_NTBzVRIlJGgAY5LNJJ3cD19XvrAo4X0W9A,247 +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,, +sqlalchemy/testing/plugin/bootstrap.py,sha256=oYScMbEW4pCnWlPEAq1insFruCXFQeEVBwo__i4McpU,1685 +sqlalchemy/testing/plugin/plugin_base.py,sha256=BgNzWNEmgpK4CwhyblQQKnH-7FDKVi_Uul5vw8fFjBU,21578 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=6jkQHH2VQMD75k2As9CuWXmEy9jrscoFRhCNg6-PaTw,27656 +sqlalchemy/testing/profiling.py,sha256=PbuPhRFbauFilUONeY3tV_Y_5lBkD7iCa8VVyH2Sk9Y,10148 +sqlalchemy/testing/provision.py,sha256=3qFor_sN1FFlS7odUGkKqLUxGmQZC9XM67I9vQ_zeXo,14626 +sqlalchemy/testing/requirements.py,sha256=Z__o-1Rj9B7dI8E_l3qsKTvsg0rK198vB0A1p7A5dcM,52832 +sqlalchemy/testing/schema.py,sha256=lr4GkGrGwagaHMuSGzWdzkMaj3HnS7dgfLLWfxt__-U,6513 +sqlalchemy/testing/suite/__init__.py,sha256=Y5DRNG0Yl1u3ypt9zVF0Z9suPZeuO_UQGLl-wRgvTjU,722 +sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,, +sqlalchemy/testing/suite/test_cte.py,sha256=6zBC3W2OwX1Xs-HedzchcKN2S7EaLNkgkvV_JSZ_Pq0,6451 +sqlalchemy/testing/suite/test_ddl.py,sha256=1Npkf0C_4UNxphthAGjG078n0vPEgnSIHpDu5MfokxQ,12031 +sqlalchemy/testing/suite/test_deprecations.py,sha256=BcJxZTcjYqeOAENVElCg3hVvU6fkGEW3KGBMfnW8bng,5337 +sqlalchemy/testing/suite/test_dialect.py,sha256=EH4ZQWbnGdtjmx5amZtTyhYmrkXJCvW1SQoLahoE7uk,22923 +sqlalchemy/testing/suite/test_insert.py,sha256=9azifj6-OCD7s8h_tAO1uPw100ibQv8YoKc_VA3hn3c,18824 +sqlalchemy/testing/suite/test_reflection.py,sha256=7sML8-owubSQeEM7Ve6LbnB8uIVlNV00WWepKwII2a8,109648 +sqlalchemy/testing/suite/test_results.py,sha256=X720GafdA4p75SOGS93j-dXkt6QDEnnJbU2bh18VCcg,16914 +sqlalchemy/testing/suite/test_rowcount.py,sha256=3KDTlRgjpQ1OVfp__1cv8Hvq4CsDKzmrhJQ_WIJWoJg,7900 +sqlalchemy/testing/suite/test_select.py,sha256=ulRZQJlzkwwcewEyisuBEXVWFR0Wshz9MEDxYYiYLwQ,61732 +sqlalchemy/testing/suite/test_sequence.py,sha256=66bCoy4xo99GBSaX6Hxb88foANAykLGRz1YEKbvpfuA,9923 +sqlalchemy/testing/suite/test_types.py,sha256=K4MGHvnTtgqeksoQOBCZRVQYC7HoYO6Z6rVt5vj2t9o,67805 +sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=c3_eIxLyORuSOhNDP0jWKxPyUf3SwMFpdalxtquwqlM,6141 +sqlalchemy/testing/suite/test_update_delete.py,sha256=yTiM2unnfOK9rK8ZkqeTTU_MkT-RsKFLmdYliniZfAY,3994 +sqlalchemy/testing/util.py,sha256=qldXKw8gRJ4I2x3uXsBssYMqwatmcMFMTOveRQCmfDU,14469 +sqlalchemy/testing/warnings.py,sha256=fJ-QJUY2zY2PPxZJKv9medW-BKKbCNbA4Ns_V3YwFXM,1546 +sqlalchemy/types.py,sha256=cQFM-hFRmaf1GErun1qqgEs6QxufvzMuwKqj9tuMPpE,3168 +sqlalchemy/util/__init__.py,sha256=B3bedg-LSQEscwqgmYYU-VENUX8_zAE3q9vb7tkfJNY,8277 +sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-312.pyc,, +sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,, +sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-312.pyc,, +sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-312.pyc,, +sqlalchemy/util/__pycache__/typing.cpython-312.pyc,, +sqlalchemy/util/_collections.py,sha256=aZoSAVOXnHBoYEsxDOi0O9odg9wqLbGb7PGjaWQKiyY,20078 +sqlalchemy/util/_concurrency_py3k.py,sha256=zb0Bow2Y_QjTdaACEviBEEaFvqDuVvpJfmwCjaw8xNE,9170 +sqlalchemy/util/_has_cy.py,sha256=wCQmeSjT3jaH_oxfCEtGk-1g0gbSpt5MCK5UcWdMWqk,1247 +sqlalchemy/util/_py_collections.py,sha256=U6L5AoyLdgSv7cdqB4xxQbw1rpeJjyOZVXffgxgga8I,16714 +sqlalchemy/util/compat.py,sha256=fJWqZVkW7qPY2l0DxK-7fHKjyp7b-wa-5wOlmuZlick,8724 +sqlalchemy/util/concurrency.py,sha256=9lT_cMoO1fZNdY8QTUZ22oeSf-L5I-79Ke7chcBNPA0,3304 +sqlalchemy/util/deprecations.py,sha256=YBwvvYhSB8LhasIZRKvg_-WNoVhPUcaYI1ZrnjDn868,11971 +sqlalchemy/util/langhelpers.py,sha256=uIK3szZuq9aMnO-vEpSlNekNWv4I-E391e56bkTnUm0,65090 +sqlalchemy/util/preloaded.py,sha256=az7NmLJLsqs0mtM9uBkIu10-841RYDq8wOyqJ7xXvqE,5904 +sqlalchemy/util/queue.py,sha256=CaeSEaYZ57YwtmLdNdOIjT5PK_LCuwMFiO0mpp39ybM,10185 +sqlalchemy/util/tool_support.py,sha256=9braZyidaiNrZVsWtGmkSmus50-byhuYrlAqvhjcmnA,6135 +sqlalchemy/util/topological.py,sha256=N3M3Le7KzGHCmqPGg0ZBqixTDGwmFLhOZvBtc4rHL_g,3458 +sqlalchemy/util/typing.py,sha256=lFcGo1dJbZIZ9drAnvef-PzP0cX4LMxMSwgk3lJBb0g,18182 diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/WHEEL new file mode 100644 index 00000000..3e811828 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.1.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/top_level.txt new file mode 100644 index 00000000..39fb2bef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/SQLAlchemy-2.0.35.dist-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/.venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc new file mode 100644 index 00000000..0d5388f2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__init__.py b/.venv/lib/python3.12/site-packages/attr/__init__.py new file mode 100644 index 00000000..51b1c255 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/__init__.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: MIT + +""" +Classes Without Boilerplate +""" + +from functools import partial +from typing import Callable + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._compat import Protocol +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Converter, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance(Protocol): + pass + + +__all__ = [ + "Attribute", + "AttrsInstance", + "Converter", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + + +def _make_getattr(mod_name: str) -> Callable: + """ + Create a metadata proxy for packaging information that uses *mod_name* in + its warnings and errors. + """ + + def __getattr__(name: str) -> str: + if name not in ("__version__", "__version_info__"): + msg = f"module {mod_name} has no attribute {name}" + raise AttributeError(msg) + + try: + from importlib.metadata import metadata + except ImportError: + from importlib_metadata import metadata + + meta = metadata("attrs") + + if name == "__version_info__": + return VersionInfo._from_version_string(meta["version"]) + + return meta["version"] + + return __getattr__ + + +__getattr__ = _make_getattr(__name__) diff --git a/.venv/lib/python3.12/site-packages/attr/__init__.pyi b/.venv/lib/python3.12/site-packages/attr/__init__.pyi new file mode 100644 index 00000000..6ae0a83d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/__init__.pyi @@ -0,0 +1,388 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Generic, + Mapping, + Protocol, + Sequence, + TypeVar, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo +from attrs import ( + define as define, + field as field, + mutable as mutable, + frozen as frozen, + _EqOrderType, + _ValidatorType, + _ConverterType, + _ReprArgType, + _OnSetAttrType, + _OnSetAttrArgType, + _FieldTransformer, + _ValidatorArgType, +) + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_FilterType = Callable[["Attribute[_T]", _T], bool] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +_A = TypeVar("_A", bound=type[AttrsInstance]) + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +In = TypeVar("In") +Out = TypeVar("Out") + +class Converter(Generic[In, Out]): + @overload + def __init__(self, converter: Callable[[In], Out]) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, AttrsInstance, Attribute], Out], + *, + takes_self: Literal[True], + takes_field: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, Attribute], Out], + *, + takes_field: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, AttrsInstance], Out], + *, + takes_self: Literal[True], + ) -> None: ... + +class Attribute(Generic[_T]): + name: str + default: _T | None + validator: _ValidatorType[_T] | None + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: bool | None + init: bool + converter: _ConverterType | Converter[Any, _T] | None + metadata: dict[Any, Any] + type: type[_T] | None + kw_only: bool + on_setattr: _OnSetAttrType + alias: str | None + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: type[_T] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: type[_T] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: _T | None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: object = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> Any: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: _C, + these: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., + unsafe_hash: bool | None = ..., +) -> _C: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., + unsafe_hash: bool | None = ..., +) -> Callable[[_C], _C]: ... +def fields(cls: type[AttrsInstance]) -> Any: ... +def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _A, + globalns: dict[str, Any] | None = ..., + localns: dict[str, Any] | None = ..., + attribs: list[Attribute[Any]] | None = ..., + include_extras: bool = ..., +) -> _A: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: list[str] | tuple[str, ...] | dict[str, Any], + bases: tuple[type, ...] = ..., + class_body: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + collect_by_mro: bool = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: _FilterType[Any] | None = ..., + dict_factory: type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ..., + tuple_keys: bool | None = ..., +) -> dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: _FilterType[Any] | None = ..., + tuple_factory: type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..c8baa30b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_cmp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_cmp.cpython-312.pyc new file mode 100644 index 00000000..d1b453a4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_cmp.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_compat.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 00000000..d044e0cd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_compat.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_config.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_config.cpython-312.pyc new file mode 100644 index 00000000..df76e551 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_config.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_funcs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_funcs.cpython-312.pyc new file mode 100644 index 00000000..0f218254 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_funcs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_make.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_make.cpython-312.pyc new file mode 100644 index 00000000..cf1d53f9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_make.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc new file mode 100644 index 00000000..236368c4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/_version_info.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/_version_info.cpython-312.pyc new file mode 100644 index 00000000..3eb9fe15 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/_version_info.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/converters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/converters.cpython-312.pyc new file mode 100644 index 00000000..cc67e471 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/converters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/exceptions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..2d1f7a6e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/exceptions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/filters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/filters.cpython-312.pyc new file mode 100644 index 00000000..b150afbb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/filters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/setters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/setters.cpython-312.pyc new file mode 100644 index 00000000..6f46e9ed Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/setters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/__pycache__/validators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attr/__pycache__/validators.cpython-312.pyc new file mode 100644 index 00000000..a1b9c300 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attr/__pycache__/validators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attr/_cmp.py b/.venv/lib/python3.12/site-packages/attr/_cmp.py new file mode 100644 index 00000000..f367bb3a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_cmp.py @@ -0,0 +1,160 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attrs.field`'s ``eq``, ``order``, + and ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if at least + one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + Args: + eq (typing.Callable | None): + Callable used to evaluate equality of two objects. + + lt (typing.Callable | None): + Callable used to evaluate whether one object is less than another + object. + + le (typing.Callable | None): + Callable used to evaluate whether one object is less than or equal + to another object. + + gt (typing.Callable | None): + Callable used to evaluate whether one object is greater than + another object. + + ge (typing.Callable | None): + Callable used to evaluate whether one object is greater than or + equal to another object. + + require_same_type (bool): + When `True`, equality and ordering methods will return + `NotImplemented` if objects are not of the same type. + + class_name (str | None): Name of class. Defaults to "Comparable". + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + msg = "eq must be define is order to complete ordering from lt, le, gt, ge." + raise ValueError(msg) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + return all(func(self, other) for func in self._requirements) + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/.venv/lib/python3.12/site-packages/attr/_cmp.pyi b/.venv/lib/python3.12/site-packages/attr/_cmp.pyi new file mode 100644 index 00000000..cc7893b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: _CompareWithType | None = ..., + lt: _CompareWithType | None = ..., + le: _CompareWithType | None = ..., + gt: _CompareWithType | None = ..., + ge: _CompareWithType | None = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> type: ... diff --git a/.venv/lib/python3.12/site-packages/attr/_compat.py b/.venv/lib/python3.12/site-packages/attr/_compat.py new file mode 100644 index 00000000..104eeb07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_compat.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: MIT + +import inspect +import platform +import sys +import threading + +from collections.abc import Mapping, Sequence # noqa: F401 +from typing import _GenericAlias + + +PYPY = platform.python_implementation() == "PyPy" +PY_3_8_PLUS = sys.version_info[:2] >= (3, 8) +PY_3_9_PLUS = sys.version_info[:2] >= (3, 9) +PY_3_10_PLUS = sys.version_info[:2] >= (3, 10) +PY_3_11_PLUS = sys.version_info[:2] >= (3, 11) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) +PY_3_13_PLUS = sys.version_info[:2] >= (3, 13) +PY_3_14_PLUS = sys.version_info[:2] >= (3, 14) + + +if sys.version_info < (3, 8): + try: + from typing_extensions import Protocol + except ImportError: # pragma: no cover + Protocol = object +else: + from typing import Protocol # noqa: F401 + +if PY_3_14_PLUS: # pragma: no cover + import annotationlib + + _get_annotations = annotationlib.get_annotations + +else: + + def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + return cls.__dict__.get("__annotations__", {}) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() + + +def get_generic_base(cl): + """If this is a generic class (A[str]), return the generic base for it.""" + if cl.__class__ is _GenericAlias: + return cl.__origin__ + return None diff --git a/.venv/lib/python3.12/site-packages/attr/_config.py b/.venv/lib/python3.12/site-packages/attr/_config.py new file mode 100644 index 00000000..9c245b14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + msg = "'run' must be bool." + raise TypeError(msg) + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/.venv/lib/python3.12/site-packages/attr/_funcs.py b/.venv/lib/python3.12/site-packages/attr/_funcs.py new file mode 100644 index 00000000..355cef44 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_funcs.py @@ -0,0 +1,522 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._compat import PY_3_9_PLUS, get_generic_base +from ._make import _OBJ_SETATTR, NOTHING, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the *attrs* attribute values of *inst* as a dict. + + Optionally recurse into other *attrs*-decorated classes. + + Args: + inst: Instance of an *attrs*-decorated class. + + recurse (bool): Recurse into classes that are also *attrs*-decorated. + + filter (~typing.Callable): + A callable whose return code determines whether an attribute or + element is included (`True`) or dropped (`False`). Is called with + the `attrs.Attribute` as the first argument and the value as the + second argument. + + dict_factory (~typing.Callable): + A callable to produce dictionaries from. For example, to produce + ordered dictionaries instead of normal Python dictionaries, pass in + ``collections.OrderedDict``. + + retain_collection_types (bool): + Do not convert to `list` when encountering an attribute whose type + is `tuple` or `set`. Only meaningful if *recurse* is `True`. + + value_serializer (typing.Callable | None): + A hook that is called for every attribute or dict key/value. It + receives the current instance, field and value and must return the + (updated) value. The hook is run *after* the optional *filter* has + been applied. + + Returns: + Return type of *dict_factory*. + + Raises: + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 + If a dict has a collection for a key, it is serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + items = [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + try: + rv[a.name] = cf(items) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv[a.name] = cf(*items) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the *attrs* attribute values of *inst* as a tuple. + + Optionally recurse into other *attrs*-decorated classes. + + Args: + inst: Instance of an *attrs*-decorated class. + + recurse (bool): + Recurse into classes that are also *attrs*-decorated. + + filter (~typing.Callable): + A callable whose return code determines whether an attribute or + element is included (`True`) or dropped (`False`). Is called with + the `attrs.Attribute` as the first argument and the value as the + second argument. + + tuple_factory (~typing.Callable): + A callable to produce tuples from. For example, to produce lists + instead of tuples. + + retain_collection_types (bool): + Do not convert to `list` or `dict` when encountering an attribute + which type is `tuple`, `dict` or `set`. Only meaningful if + *recurse* is `True`. + + Returns: + Return type of *tuple_factory* + + Raises: + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + items = [ + ( + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + ) + for j in v + ] + try: + rv.append(cf(items)) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv.append(cf(*items)) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk + ), + ( + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv + ), + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with *attrs* attributes. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + Returns: + bool: + """ + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is not None: + return True + + # No attrs, maybe it's a specialized generic (A[str])? + generic_base = get_generic_base(cls) + if generic_base is not None: + generic_attrs = getattr(generic_base, "__attrs_attrs__", None) + if generic_attrs is not None: + # Stick it on here for speed next time. + cls.__attrs_attrs__ = generic_attrs + return generic_attrs is not None + return False + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + This is different from `evolve` that applies the changes to the arguments + that create the new instance. + + `evolve`'s behavior is preferable, but there are `edge cases`_ where it + doesn't work. Therefore `assoc` is deprecated, but will not be removed. + + .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251 + + Args: + inst: Instance of a class with *attrs* attributes. + + changes: Keyword changes in the new copy. + + Returns: + A copy of inst with *changes* incorporated. + + Raises: + attrs.exceptions.AttrsAttributeNotFoundError: + If *attr_name* couldn't be found on *cls*. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. This function will not be + removed du to the slightly different approach compared to + `attrs.evolve`, though. + """ + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + msg = f"{k} is not an attrs attribute on {new.__class__}." + raise AttrsAttributeNotFoundError(msg) + _OBJ_SETATTR(new, k, v) + return new + + +def evolve(*args, **changes): + """ + Create a new instance, based on the first positional argument with + *changes* applied. + + Args: + + inst: + Instance of a class with *attrs* attributes. *inst* must be passed + as a positional argument. + + changes: + Keyword changes in the new copy. + + Returns: + A copy of inst with *changes* incorporated. + + Raises: + TypeError: + If *attr_name* couldn't be found in the class ``__init__``. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 17.1.0 + .. deprecated:: 23.1.0 + It is now deprecated to pass the instance using the keyword argument + *inst*. It will raise a warning until at least April 2024, after which + it will become an error. Always pass the instance as a positional + argument. + .. versionchanged:: 24.1.0 + *inst* can't be passed as a keyword argument anymore. + """ + try: + (inst,) = args + except ValueError: + msg = ( + f"evolve() takes 1 positional argument, but {len(args)} were given" + ) + raise TypeError(msg) from None + + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types( + cls, globalns=None, localns=None, attribs=None, include_extras=True +): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in :class:`Attribute`'s + *type* field. In other words, you don't need to resolve your types if you + only use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, for example, if the name only + exists inside a method, you may pass *globalns* or *localns* to specify + other dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + Args: + cls (type): Class to resolve. + + globalns (dict | None): Dictionary containing global variables. + + localns (dict | None): Dictionary containing local variables. + + attribs (list | None): + List of attribs for the given class. This is necessary when calling + from inside a ``field_transformer`` since *cls* is not an *attrs* + class yet. + + include_extras (bool): + Resolve more accurately, if possible. Pass ``include_extras`` to + ``typing.get_hints``, if supported by the typing module. On + supported Python versions (3.9+), this resolves the types more + accurately. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class and you didn't pass any attribs. + + NameError: If types cannot be resolved because of missing variables. + + Returns: + *cls* so you can use this function also as a class decorator. Please + note that you have to apply it **after** `attrs.define`. That means the + decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + .. versionadded:: 23.1.0 *include_extras* + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + kwargs = {"globalns": globalns, "localns": localns} + + if PY_3_9_PLUS: + kwargs["include_extras"] = include_extras + + hints = typing.get_type_hints(cls, **kwargs) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _OBJ_SETATTR(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/.venv/lib/python3.12/site-packages/attr/_make.py b/.venv/lib/python3.12/site-packages/attr/_make.py new file mode 100644 index 00000000..bf00c5f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_make.py @@ -0,0 +1,2960 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import abc +import contextlib +import copy +import enum +import functools +import inspect +import itertools +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + PY_3_8_PLUS, + PY_3_10_PLUS, + PY_3_11_PLUS, + _AnnotationExtractor, + _get_annotations, + get_generic_base, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_OBJ_SETATTR = object.__setattr__ +_INIT_FACTORY_PAT = "__attr_factory_%s" +_CLASSVAR_PREFIXES = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_HASH_CACHE_FIELD = "_attrs_cached_hash" + +_EMPTY_METADATA_SINGLETON = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_SENTINEL = object() + +_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when `None` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when `None` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since `None` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new field / attribute on a class. + + Identical to `attrs.field`, except it's not keyword-only. + + Consider using `attrs.field` in new code (``attr.ib`` will *never* go away, + though). + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attr.s` (or similar)! + + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is `None` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 + *converter* as a replacement for the deprecated *convert* to achieve + consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if factory is not None: + if default is not NOTHING: + msg = ( + "The `default` and `factory` arguments are mutually exclusive." + ) + raise ValueError(msg) + if not callable(factory): + msg = "The `factory` argument must be a callable." + raise ValueError(msg) + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + Evaluate the script with the given global (globs) and local (locs) + variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs, locals=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} if locals is None else locals + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_CLASSVAR_PREFIXES) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + return attrib_name in cls.__dict__ + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + If *collect_by_mro* is True, collect them in the correct MRO order, + otherwise use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = list(these.items()) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + a = attrib() if a is NOTHING else attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" + raise ValueError(msg) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +def _make_cached_property_getattr(cached_properties, original_getattr, cls): + lines = [ + # Wrapped to get `__class__` into closure cell for super() + # (It will be replaced with the newly constructed class after construction). + "def wrapper(_cls):", + " __class__ = _cls", + " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", + " func = cached_properties.get(item)", + " if func is not None:", + " result = func(self)", + " _setter = _cached_setattr_get(self)", + " _setter(item, result)", + " return result", + ] + if original_getattr is not None: + lines.append( + " return original_getattr(self, item)", + ) + else: + lines.extend( + [ + " try:", + " return super().__getattribute__(item)", + " except AttributeError:", + " if not hasattr(super(), '__getattr__'):", + " raise", + " return super().__getattr__(item)", + " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", + " raise AttributeError(original_error)", + ] + ) + + lines.extend( + [ + " return __getattr__", + "__getattr__ = wrapper(_cls)", + ] + ) + + unique_filename = _generate_unique_filename(cls, "getattr") + + glob = { + "cached_properties": cached_properties, + "_cached_setattr_get": _OBJ_SETATTR.__get__, + "original_getattr": original_getattr, + } + + return _make_method( + "__getattr__", + "\n".join(lines), + unique_filename, + glob, + locals={ + "_cls": cls, + }, + ) + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + "__traceback__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_pre_init_has_args", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._pre_init_has_args = False + if self._has_pre_init: + # Check if the pre init method has more arguments than just `self` + # We want to pass arguments if pre init expects arguments + pre_init_func = cls.__attrs_pre_init__ + pre_init_signature = inspect.signature(pre_init_func) + self._pre_init_has_args = len(pre_init_signature.parameters) > 1 + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _DEFAULT_ON_SETATTR, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _DEFAULT_ON_SETATTR + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + cls = self._create_slots_class() + else: + cls = self._patch_original_class() + if PY_3_10_PLUS: + cls = abc.update_abstractmethods(cls) + + # The method gets only called if it's not inherited from a base class. + # _has_own_attribute does NOT work properly for classmethods. + if ( + getattr(cls, "__attrs_init_subclass__", None) + and "__attrs_init_subclass__" not in cls.__dict__ + ): + cls.__attrs_init_subclass__() + + return cls + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _SENTINEL) is not _SENTINEL + ): + # An AttributeError can happen if a base class defines a + # class variable and we want to set an attribute with the + # same name by using only a type annotation. + with contextlib.suppress(AttributeError): + delattr(cls, name) + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _OBJ_SETATTR + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _OBJ_SETATTR + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = {} + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + if PY_3_8_PLUS: + cached_properties = { + name: cached_property.func + for name, cached_property in cd.items() + if isinstance(cached_property, functools.cached_property) + } + else: + # `functools.cached_property` was introduced in 3.8. + # So can't be used before this. + cached_properties = {} + + # Collect methods with a `__class__` reference that are shadowed in the new class. + # To know to update them. + additional_closure_functions_to_update = [] + if cached_properties: + class_annotations = _get_annotations(self._cls) + for name, func in cached_properties.items(): + # Add cached properties to names for slotting. + names += (name,) + # Clear out function from class to avoid clashing. + del cd[name] + additional_closure_functions_to_update.append(func) + annotation = inspect.signature(func).return_annotation + if annotation is not inspect.Parameter.empty: + class_annotations[name] = annotation + + original_getattr = cd.get("__getattr__") + if original_getattr is not None: + additional_closure_functions_to_update.append(original_getattr) + + cd["__getattr__"] = _make_cached_property_getattr( + cached_properties, original_getattr, self._cls + ) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_HASH_CACHE_FIELD) + + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in itertools.chain( + cls.__dict__.values(), additional_closure_functions_to_update + ): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # noqa: PERF203 + # ValueError: Cell is empty + pass + else: + if match: + cell.cell_contents = cls + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + msg = "__str__ can only be generated if a __repr__ exists." + raise ValueError(msg) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _OBJ_SETATTR.__get__(self) + if isinstance(state, tuple): + # Backward compatibility with attrs instances pickled with + # attrs versions before v22.2.0 which stored tuples. + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + else: + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_HASH_CACHE_FIELD, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + msg = "Can't combine custom __setattr__ with on_setattr hooks." + raise ValueError(msg) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _OBJ_SETATTR(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + with contextlib.suppress(AttributeError): + method.__module__ = self._cls.__module__ + + with contextlib.suppress(AttributeError): + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" + + with contextlib.suppress(AttributeError): + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will + *never* go away, though). + + Args: + repr_ns (str): + When using nested classes, there was no way in Python 2 to + automatically detect that. This argument allows to set a custom + name for a more meaningful ``repr`` output. This argument is + pointless in Python 3 and is therefore deprecated. + + .. caution:: + Refer to `attrs.define` for the rest of the parameters, but note that they + can have different defaults. + + Notably, leaving *on_setattr* as `None` will **not** add any hooks. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports `None` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. deprecated:: 24.1.0 *repr_ns* + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + """ + if repr_ns is not None: + import warnings + + warnings.warn( + DeprecationWarning( + "The `repr_ns` argument is deprecated and will be removed in or after August 2025." + ), + stacklevel=2, + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + msg = "Can't freeze a class with a custom __setattr__." + raise ValueError(msg) + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, init must be True." + raise TypeError(msg) + + if ( + PY_3_10_PLUS + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_HASH_CACHE_FIELD} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})" + ) + else: + lines.append(f" self.{a.name} == other.{a.name}") + if a is not attrs[-1]: + lines[-1] = f"{lines[-1]} and" + lines.append(" )") + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of *attrs* attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + .. versionchanged:: 23.1.0 Add support for generic classes. + """ + generic_base = get_generic_base(cls) + + if generic_base is None and not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + + attrs = getattr(cls, "__attrs_attrs__", None) + + if attrs is None: + if generic_base is not None: + attrs = getattr(generic_base, "__attrs_attrs__", None) + if attrs is not None: + # Even though this is global state, stick it on here to speed + # it up. We rely on `cls` being cached for this to be + # efficient. + cls.__attrs_attrs__ = attrs + return attrs + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of *attrs* attributes for a class, whose keys + are the attribute names. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + dict[str, attrs.Attribute]: Dict of attribute name to definition + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + Args: + inst: Instance of a class with *attrs* attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + cls = base_attr_map.get(a_name) + return cls and "__slots__" in cls.__dict__ + + +def _make_init( + cls, + attrs, + pre_init, + pre_init_has_args, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + pre_init_has_args, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + "__attrs_init__" if attrs_init else "__init__", + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})" + + +def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True, converter) + + return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}" + + +def _determine_setters( + frozen: bool, slots: bool, base_attr_map: dict[str, type] +): + """ + Determine the correct setter functions based on whether a class is frozen + and/or slotted. + """ + if frozen is True: + if slots is True: + return (), _setattr, _setattr_with_converter + + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + + def fmt_setter( + attr_name: str, value_var: str, has_on_setattr: bool + ) -> str: + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name: str, + value_var: str, + has_on_setattr: bool, + converter: Converter, + ) -> str: + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr, converter + ) + + return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}" + + return ( + ("_inst_dict = self.__dict__",), + fmt_setter, + fmt_setter_with_converter, + ) + + # Not frozen -- we can just assign directly. + return (), _assign, _assign_with_converter + + +def _attrs_to_init_script( + attrs: list[Attribute], + is_frozen: bool, + is_slotted: bool, + call_pre_init: bool, + pre_init_has_args: bool, + call_post_init: bool, + does_cache_hash: bool, + base_attr_map: dict[str, type], + is_exc: bool, + needs_cached_setattr: bool, + has_cls_on_setattr: bool, + method_name: str, +) -> tuple[str, dict, dict]: + """ + Return a script of an initializer for *attrs*, a dict of globals, and + annotations for the initializer. + + The globals are required by the generated script. + """ + lines = ["self.__attrs_pre_init__()"] if call_pre_init else [] + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. Note _setattr will be used again below if + # does_cache_hash is True. + "_setattr = _cached_setattr_get(self)" + ) + + extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters( + is_frozen, is_slotted, base_attr_map + ) + lines.extend(extra_lines) + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + maybe_self = "self" if has_factory and a.default.takes_self else "" + + if a.converter and not isinstance(a.converter, Converter): + converter = Converter(a.converter) + else: + converter = a.converter + + if a.init is False: + if has_factory: + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + elif converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and converter is None: + annotations[arg_name] = a.type + elif converter is not None and converter._first_param_type: + # Use the type from the converter if present. + annotations[arg_name] = converter._first_param_type + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if call_post_init: + lines.append("self.__attrs_post_init__()") + + # Because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to field + # values during post-init combined with post-init accessing the hash code + # would result in silent bugs. + if does_cache_hash: + if is_frozen: + if is_slotted: + init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)" + else: + init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None" + else: + init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None" + lines.append(init_hash_cache) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + pre_init_args = args + if kw_only_args: + # leading comma & kw_only args + args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}" + pre_init_kw_only_args = ", ".join( + [ + f"{kw_arg_name}={kw_arg_name}" + # We need to remove the defaults from the kw_only_args. + for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args) + ] + ) + pre_init_args += ", " if pre_init_args else "" + pre_init_args += pre_init_kw_only_args + + if call_pre_init and pre_init_has_args: + # If pre init method has arguments, pass same arguments as `__init__`. + lines[0] = f"self.__attrs_pre_init__({pre_init_args})" + + # Python 3.7 doesn't allow backslashes in f strings. + NL = "\n " + return ( + f"""def {method_name}(self, {args}): + {NL.join(lines) if lines else 'pass'} +""", + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + .. warning:: + + You should never instantiate this class yourself. + + The class has *all* arguments of `attr.ib` (except for ``factory`` which is + only syntactic sugar for ``default=Factory(...)`` plus the following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The + callables that are used for comparing and ordering objects by this + attribute, respectively. These are set by passing a callable to + `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also + :ref:`comparison customization `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _OBJ_SETATTR.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _EMPTY_METADATA_SINGLETON + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + msg = "Type annotation and type argument cannot both be present" + raise ValueError(msg) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attrs.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attrs.evolve` but that function does not work + with {class}`Attribute`. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _OBJ_SETATTR.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + ( + types.MappingProxyType(dict(value)) + if value + else _EMPTY_METADATA_SINGLETON + ), + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = ( + *tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ), + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + Raises: + DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + Args: + factory (typing.Callable): + A callable that takes either none or exactly one mandatory + positional argument depending on *takes_self*. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +class Converter: + """ + Stores a converter callable. + + Allows for the wrapped converter to take additional arguments. The + arguments are passed in the order they are documented. + + Args: + converter (Callable): A callable that converts the passed value. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. (default: `False`) + + takes_field (bool): + Pass the field definition (an :class:`Attribute`) into the + converter as a positional argument. (default: `False`) + + .. versionadded:: 24.1.0 + """ + + __slots__ = ( + "converter", + "takes_self", + "takes_field", + "_first_param_type", + "_global_name", + "__call__", + ) + + def __init__(self, converter, *, takes_self=False, takes_field=False): + self.converter = converter + self.takes_self = takes_self + self.takes_field = takes_field + + ex = _AnnotationExtractor(converter) + self._first_param_type = ex.get_first_param_type() + + if not (self.takes_self or self.takes_field): + self.__call__ = lambda value, _, __: self.converter(value) + elif self.takes_self and not self.takes_field: + self.__call__ = lambda value, instance, __: self.converter( + value, instance + ) + elif not self.takes_self and self.takes_field: + self.__call__ = lambda value, __, field: self.converter( + value, field + ) + else: + self.__call__ = lambda value, instance, field: self.converter( + value, instance, field + ) + + rt = ex.get_return_type() + if rt is not None: + self.__call__.__annotations__["return"] = rt + + @staticmethod + def _get_global_name(attr_name: str) -> str: + """ + Return the name that a converter for an attribute name *attr_name* + would have. + """ + return f"__attr_converter_{attr_name}" + + def _fmt_converter_call(self, attr_name: str, value_var: str) -> str: + """ + Return a string that calls the converter for an attribute name + *attr_name* and the value in variable named *value_var* according to + `self.takes_self` and `self.takes_field`. + """ + if not (self.takes_self or self.takes_field): + return f"{self._get_global_name(attr_name)}({value_var})" + + if self.takes_self and self.takes_field: + return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])" + + if self.takes_self: + return f"{self._get_global_name(attr_name)}({value_var}, self)" + + return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])" + + def __getstate__(self): + """ + Return a dict containing only converter and takes_self -- the rest gets + computed when loading. + """ + return { + "converter": self.converter, + "takes_self": self.takes_self, + "takes_field": self.takes_field, + } + + def __setstate__(self, state): + """ + Load instance from state. + """ + self.__init__(**state) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in ("converter", "takes_self", "takes_field") +] + +Converter = _add_hash( + _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f +) + + +def make_class( + name, attrs, bases=(object,), class_body=None, **attributes_arguments +): + r""" + A quick way to create a new class called *name* with *attrs*. + + Args: + name (str): The name for the new class. + + attrs( list | dict): + A list of names or a dictionary of mappings of names to `attr.ib`\ + s / `attrs.field`\ s. + + The order is deduced from the order of the names or attributes + inside *attrs*. Otherwise the order of the definition of the + attributes is used. + + bases (tuple[type, ...]): Classes that the new class will subclass. + + class_body (dict): + An optional dictionary of class attributes for the new class. + + attributes_arguments: Passed unmodified to `attr.s`. + + Returns: + type: A new class with *attrs*. + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + .. versionchanged:: 23.2.0 *class_body* + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + msg = "attrs argument must be a dict or a list." + raise TypeError(msg) + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if class_body is not None: + body.update(class_body) + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + with contextlib.suppress(AttributeError, ValueError): + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + cls = _attrs(these=cls_dict, **attributes_arguments)(type_) + # Only add type annotations now or "_attrs()" will complain: + cls.__annotations__ = { + k: v.type for k, v in cls_dict.items() if v.type is not None + } + return cls + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, unsafe_hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if they + have any. + + converters (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val, inst, field): + for c in converters: + val = c(val, inst, field) if isinstance(c, Converter) else c(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__.update({"val": A, "return": A}) + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + last = converters[-1] + if not PY_3_11_PLUS and isinstance(last, Converter): + last = last.__call__ + + # Get return type from last converter. + rt = _AnnotationExtractor(last).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return Converter(pipe_converter, takes_self=True, takes_field=True) diff --git a/.venv/lib/python3.12/site-packages/attr/_next_gen.py b/.venv/lib/python3.12/site-packages/attr/_next_gen.py new file mode 100644 index 00000000..dbb65cc9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_next_gen.py @@ -0,0 +1,631 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + _DEFAULT_ON_SETATTR, + NOTHING, + _frozen_setattrs, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds :term:`dunder methods` according to + :term:`fields ` specified using :doc:`type annotations `, + `field()` calls, or the *these* argument. + + Since *attrs* patches or replaces an existing class, you cannot use + `object.__init_subclass__` with *attrs* classes, because it runs too early. + As a replacement, you can define ``__attrs_init_subclass__`` on your class. + It will be called by *attrs* classes that subclass it after they're + created. See also :ref:`init-subclass`. + + Args: + slots (bool): + Create a :term:`slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, + so we encourage you to read the :term:`glossary entry `. + + auto_detect (bool): + Instead of setting the *init*, *repr*, *eq*, and *hash* arguments + explicitly, assume they are set to True **unless any** of the + involved methods for one of the arguments is implemented in the + *current* class (meaning, it is *not* inherited from some base + class). + + So, for example by implementing ``__eq__`` on a class yourself, + *attrs* will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a + sensible ``__ne__`` by default, so it *should* be enough to only + implement ``__eq__`` in most cases). + + Passing True or False` to *init*, *repr*, *eq*, *cmp*, or *hash* + overrides whatever *auto_detect* would determine. + + auto_exc (bool): + If the class subclasses `BaseException` (which implicitly includes + any subclass of any exception), the following happens to behave + like a well-behaved Python exception class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids [#]_ , + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the + ``args`` attribute, + - the value of *str* is ignored leaving ``__str__`` to base + classes. + + .. [#] + Note that *attrs* will *not* remove existing implementations of + ``__hash__`` or the equality methods. It just won't add own + ones. + + on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): + A callable that is run whenever the user attempts to set an + attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same + arguments as validators: the instance, the attribute that is being + modified, and the new value. + + If no exception is raised, the attribute is set to the return value + of the callable. + + If a list of callables is passed, they're automatically wrapped in + an `attrs.setters.pipe`. + + If left None, the default behavior is to run converters and + validators whenever an attribute is set. + + init (bool): + Create a ``__init__`` method that initializes the *attrs* + attributes. Leading underscores are stripped for the argument name, + unless an alias is set on the attribute. + + .. seealso:: + `init` shows advanced ways to customize the generated + ``__init__`` method, including executing code before and after. + + repr(bool): + Create a ``__repr__`` method with a human readable representation + of *attrs* attributes. + + str (bool): + Create a ``__str__`` method that is identical to ``__repr__``. This + is usually not necessary except for `Exception`\ s. + + eq (bool | None): + If True or None (default), add ``__eq__`` and ``__ne__`` methods + that check two instances for equality. + + .. seealso:: + `comparison` describes how to customize the comparison behavior + going as far comparing NumPy arrays. + + order (bool | None): + If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` + methods that behave like *eq* above and allow instances to be + ordered. + + They compare the instances as if they were tuples of their *attrs* + attributes if and only if the types of both classes are + *identical*. + + If `None` mirror value of *eq*. + + .. seealso:: `comparison` + + cmp (bool | None): + Setting *cmp* is equivalent to setting *eq* and *order* to the same + value. Must not be mixed with *eq* or *order*. + + unsafe_hash (bool | None): + If None (default), the ``__hash__`` method is generated according + how *eq* and *frozen* are set. + + 1. If *both* are True, *attrs* will generate a ``__hash__`` for + you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set + to None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning + the ``__hash__`` method of the base class will be used. If the + base class is `object`, this means it will fall back to id-based + hashing. + + Although not recommended, you can decide for yourself and force + *attrs* to create one (for example, if the class is immutable even + though you didn't freeze it programmatically) by passing True or + not. Both of these cases are rather special and should be used + carefully. + + .. seealso:: + + - Our documentation on `hashing`, + - Python's documentation on `object.__hash__`, + - and the `GitHub issue that led to the default \ behavior + `_ for more + details. + + hash (bool | None): + Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence. + + cache_hash (bool): + Ensure that the object's hash code is computed only once and stored + on the object. If this is set to True, hashing must be either + explicitly or implicitly enabled for this class. If the hash code + is cached, avoid any reassignments of fields involved in hash code + computation or mutations of the objects those fields point to after + object creation. If such changes occur, the behavior of the + object's hash code is undefined. + + frozen (bool): + Make instances immutable after initialization. If someone attempts + to modify a frozen instance, `attrs.exceptions.FrozenInstanceError` + is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` + method on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other + words: ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You + can circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + kw_only (bool): + Make all attributes keyword-only in the generated ``__init__`` (if + *init* is False, this parameter is ignored). + + weakref_slot (bool): + Make instances weak-referenceable. This has no effect unless + *slots* is True. + + field_transformer (~typing.Callable | None): + A function that is called with the original class object and all + fields right before *attrs* finalizes the class. You can use this, + for example, to automatically add converters or validators to + fields based on their types. + + .. seealso:: `transform-fields` + + match_args (bool): + If True (default), set ``__match_args__`` on the class to support + :pep:`634` (*Structural Pattern Matching*). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and + later. Ignored on older Python versions. + + collect_by_mro (bool): + If True, *attrs* collects attributes from base classes correctly + according to the `method resolution order + `_. If False, *attrs* + will mimic the (wrong) behavior of `dataclasses` and :pep:`681`. + + See also `issue #428 + `_. + + getstate_setstate (bool | None): + .. note:: + + This is usually only interesting for slotted classes and you + should probably just set *auto_detect* to True. + + If True, ``__getstate__`` and ``__setstate__`` are generated and + attached to the class. This is necessary for slotted classes to be + pickleable. If left None, it's True by default for slotted classes + and False for dict classes. + + If *auto_detect* is True, and *getstate_setstate* is left None, and + **either** ``__getstate__`` or ``__setstate__`` is detected + directly on the class (meaning: not inherited), it is set to False + (this is usually what you want). + + auto_attribs (bool | None): + If True, look at type annotations to determine which attributes to + use, like `dataclasses`. If False, it will only look for explicit + :func:`field` class attributes, like classic *attrs*. + + If left None, it will guess: + + 1. If any attributes are annotated and no unannotated + `attrs.field`\ s are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.field`\ s. + + If *attrs* decides to look at type annotations, **all** fields + **must** be annotated. If *attrs* encounters a field that is set to + a :func:`field` / `attr.ib` but lacks a type annotation, an + `attrs.exceptions.UnannotatedAttributeError` is raised. Use + ``field_name: typing.Any = field(...)`` if you don't want to set a + type. + + .. warning:: + + For features that use the attribute name to create decorators + (for example, :ref:`validators `), you still *must* + assign :func:`field` / `attr.ib` to them. Otherwise Python will + either not find the name or try to use the default value to + call, for example, ``validator`` on it. + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `field()` are **ignored**. + + these (dict[str, object]): + A dictionary of name to the (private) return value of `field()` + mappings. This is useful to avoid the definition of your attributes + within the class body because you can't (for example, if you want + to add ``__repr__`` methods to Django models) or don't want to. + + If *these* is not `None`, *attrs* will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside + *these*. + + Arguably, this is a rather obscure feature. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + + .. note:: + + The main differences to the classic `attr.s` are: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - Converters and validators run when attributes are set by default -- + if *frozen* is `False`. + - *slots=True* + + Usually, this has only upsides and few visible effects in everyday + programming. But it *can* lead to some surprising behaviors, so + please make sure to read :term:`slotted classes`. + + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around + for backwards-compatibility have been removed. + + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _DEFAULT_ON_SETATTR + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." + raise ValueError(msg) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new :term:`field` / :term:`attribute` on a class. + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attrs.define` (or similar)! + + Args: + default: + A value that is used if an *attrs*-generated ``__init__`` is used + and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will + be used to construct a new value (useful for mutable data types + like lists or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a + value *must* be supplied when instantiating; otherwise a + `TypeError` will be raised. + + .. seealso:: `defaults` + + factory (~typing.Callable): + Syntactic sugar for ``default=attr.Factory(factory)``. + + validator (~typing.Callable | list[~typing.Callable]): + Callable that is called by *attrs*-generated ``__init__`` methods + after the instance has been initialized. They receive the + initialized instance, the :func:`~attrs.Attribute`, and the passed + value. + + The return value is *not* inspected so the validator has to throw + an exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `attrs.validators.get_disabled` / `attrs.validators.set_disabled`. + + The validator can also be set using decorator notation as shown + below. + + .. seealso:: :ref:`validators` + + repr (bool | ~typing.Callable): + Include this attribute in the generated ``__repr__`` method. If + True, include the attribute; if False, omit it. By default, the + built-in ``repr()`` function is used. To override how the attribute + value is formatted, pass a ``callable`` that takes a single value + and returns a string. Note that the resulting string is used as-is, + which means it will be used directly *instead* of calling + ``repr()`` (the default). + + eq (bool | ~typing.Callable): + If True (default), include this attribute in the generated + ``__eq__`` and ``__ne__`` methods that check two instances for + equality. To override how the attribute value is compared, pass a + callable that takes a single value and returns the value to be + compared. + + .. seealso:: `comparison` + + order (bool | ~typing.Callable): + If True (default), include this attributes in the generated + ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To + override how the attribute value is ordered, pass a callable that + takes a single value and returns the value to be ordered. + + .. seealso:: `comparison` + + cmp(bool | ~typing.Callable): + Setting *cmp* is equivalent to setting *eq* and *order* to the same + value. Must not be mixed with *eq* or *order*. + + .. seealso:: `comparison` + + hash (bool | None): + Include this attribute in the generated ``__hash__`` method. If + None (default), mirror *eq*'s value. This is the correct behavior + according the Python spec. Setting this value to anything else + than None is *discouraged*. + + .. seealso:: `hashing` + + init (bool): + Include this attribute in the generated ``__init__`` method. + + It is possible to set this to False and set a default value. In + that case this attributed is unconditionally initialized with the + specified default value or factory. + + .. seealso:: `init` + + converter (typing.Callable | Converter): + A callable that is called by *attrs*-generated ``__init__`` methods + to convert attribute's value to the desired format. + + If a vanilla callable is passed, it is given the passed-in value as + the only positional argument. It is possible to receive additional + arguments by wrapping the callable in a `Converter`. + + Either way, the returned value will be used as the new value of the + attribute. The value is converted before being passed to the + validator, if any. + + .. seealso:: :ref:`converters` + + metadata (dict | None): + An arbitrary mapping, to be used by third-party code. + + .. seealso:: `extending-metadata`. + + type (type): + The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). + This argument is provided for backwards-compatibility and for usage + with `make_class`. Regardless of the approach used, the type will + be stored on ``Attribute.type``. + + Please note that *attrs* doesn't do anything with this metadata by + itself. You can use it as part of your own code or for `static type + checking `. + + kw_only (bool): + Make this attribute keyword-only in the generated ``__init__`` (if + ``init`` is False, this parameter is ignored). + + on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): + Allows to overwrite the *on_setattr* setting from `attr.s`. If left + None, the *on_setattr* value from `attr.s` is used. Set to + `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `define()`. + + alias (str | None): + Override this attribute's parameter name in the generated + ``__init__`` method. If left None, default to ``name`` stripped + of leading underscores. See `private-attributes`. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionadded:: 22.2.0 *alias* + .. versionadded:: 23.1.0 + The *type* parameter has been re-added; mostly for `attrs.make_class`. + Please note that type checkers ignore this metadata. + + .. seealso:: + + `attr.ib` + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + type=type, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi b/.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi new file mode 100644 index 00000000..ca7b71e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/.venv/lib/python3.12/site-packages/attr/_version_info.py b/.venv/lib/python3.12/site-packages/attr/_version_info.py new file mode 100644 index 00000000..51a1312f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/.venv/lib/python3.12/site-packages/attr/_version_info.pyi b/.venv/lib/python3.12/site-packages/attr/_version_info.pyi new file mode 100644 index 00000000..45ced086 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/attr/converters.py b/.venv/lib/python3.12/site-packages/attr/converters.py new file mode 100644 index 00000000..92383110 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/converters.py @@ -0,0 +1,151 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to `None`. + + Type annotations will be inferred from the wrapped converter's, if it has + any. + + Args: + converter (typing.Callable): + the converter that is used for non-`None` values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace `None` values by *default* or the result + of *factory*. + + Args: + default: + Value to be used if `None` is passed. Passing an instance of + `attrs.Factory` is supported, however the ``takes_self`` option is + *not*. + + factory (typing.Callable): + A callable that takes no parameters whose result is used if `None` + is passed. + + Raises: + TypeError: If **neither** *default* or *factory* is passed. + + TypeError: If **both** *default* and *factory* are passed. + + ValueError: + If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + msg = "Must pass either `default` or `factory`." + raise TypeError(msg) + + if default is not NOTHING and factory is not None: + msg = "Must pass either `default` or `factory` but not both." + raise TypeError(msg) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + msg = "`takes_self` is not supported by default_if_none." + raise ValueError(msg) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (for example, from environment variables) to real + booleans. + + Values mapping to `True`: + + - ``True`` + - ``"true"`` / ``"t"`` + - ``"yes"`` / ``"y"`` + - ``"on"`` + - ``"1"`` + - ``1`` + + Values mapping to `False`: + + - ``False`` + - ``"false"`` / ``"f"`` + - ``"no"`` / ``"n"`` + - ``"off"`` + - ``"0"`` + - ``0`` + + Raises: + ValueError: For any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + + if val in (True, "true", "t", "yes", "y", "on", "1", 1): + return True + if val in (False, "false", "f", "no", "n", "off", "0", 0): + return False + + msg = f"Cannot convert value to bool: {val!r}" + raise ValueError(msg) diff --git a/.venv/lib/python3.12/site-packages/attr/converters.pyi b/.venv/lib/python3.12/site-packages/attr/converters.pyi new file mode 100644 index 00000000..9ef478f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, TypeVar, overload + +from attrs import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/attr/exceptions.py b/.venv/lib/python3.12/site-packages/attr/exceptions.py new file mode 100644 index 00000000..3b7abb81 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/exceptions.py @@ -0,0 +1,95 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +from typing import ClassVar + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args: ClassVar[tuple[str]] = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An *attrs* function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-*attrs* class has been passed into an *attrs* function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set when defining the field and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has a field without a type annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an *attrs* feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A field requiring a callable has been set with a value that is not + callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/.venv/lib/python3.12/site-packages/attr/exceptions.pyi b/.venv/lib/python3.12/site-packages/attr/exceptions.pyi new file mode 100644 index 00000000..f2680118 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/attr/filters.py b/.venv/lib/python3.12/site-packages/attr/filters.py new file mode 100644 index 00000000..689b1705 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/filters.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attrs.asdict` and `attrs.astuple`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, str)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Create a filter that only allows *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to include. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.1.0 Accept strings with field names. + """ + cls, names, attrs = _split_what(what) + + def include_(attribute, value): + return ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return include_ + + +def exclude(*what): + """ + Create a filter that does **not** allow *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to exclude. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.3.0 Accept field name string as input argument + """ + cls, names, attrs = _split_what(what) + + def exclude_(attribute, value): + return not ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return exclude_ diff --git a/.venv/lib/python3.12/site-packages/attr/filters.pyi b/.venv/lib/python3.12/site-packages/attr/filters.pyi new file mode 100644 index 00000000..974abdcd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any + +from . import Attribute, _FilterType + +def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... +def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... diff --git a/.venv/lib/python3.12/site-packages/attr/py.typed b/.venv/lib/python3.12/site-packages/attr/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/attr/setters.py b/.venv/lib/python3.12/site-packages/attr/setters.py new file mode 100644 index 00000000..a9ce0169 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + # This can be removed once we drop 3.8 and use attrs.Converter instead. + from ._make import Converter + + if not isinstance(c, Converter): + return c(new_value) + + return c(new_value, instance, attrib) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# Sphinx's autodata stopped working, so the docstring is inlined in the API +# docs. +NO_OP = object() diff --git a/.venv/lib/python3.12/site-packages/attr/setters.pyi b/.venv/lib/python3.12/site-packages/attr/setters.pyi new file mode 100644 index 00000000..73abf36e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/setters.pyi @@ -0,0 +1,20 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute +from attrs import _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/.venv/lib/python3.12/site-packages/attr/validators.py b/.venv/lib/python3.12/site-packages/attr/validators.py new file mode 100644 index 00000000..8a56717d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/validators.py @@ -0,0 +1,711 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager +from re import Pattern + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "or_", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + Args: + disabled (bool): If `True`, disable running all validators. + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + Returns: + bool:`True` if validators are currently disabled. + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called with a + wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + Args: + type (type | tuple[type]): The type to check for. + + Raises: + TypeError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected type, and the value it got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)" + raise ValueError( + msg, + attr, + self.pattern, + value, + ) + + def __repr__(self): + return f"" + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called with a + string that doesn't match *regex*. + + Args: + regex (str, re.Pattern): + A regex string or precompiled pattern to match against + + flags (int): + Flags that will be passed to the underlying re function (default 0) + + func (typing.Callable): + Which underlying `re` function to call. Valid options are + `re.fullmatch`, `re.search`, and `re.match`; the default `None` + means `re.fullmatch`. For performance reasons, the pattern is + always precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + msg = "'func' must be one of {}.".format( + ", ".join( + sorted(e and e.__name__ or "None" for e in set(valid_funcs)) + ) + ) + raise ValueError(msg) + + if isinstance(regex, Pattern): + if flags: + msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" + raise TypeError(msg) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return f"" + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to `None` in addition to satisfying the requirements of + the sub-validator. + + Args: + validator + (typing.Callable | tuple[typing.Callable] | list[typing.Callable]): + A validator (or validators) that is used for non-`None` values. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators. + """ + if isinstance(validator, (list, tuple)): + return _OptionalValidator(_AndValidator(validator)) + + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _InValidator: + options = attrib() + _original_options = attrib(hash=False) + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})" + raise ValueError( + msg, + attr, + self._original_options, + value, + ) + + def __repr__(self): + return f"" + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called with a + value that does not belong in the *options* provided. + + The check is performed using ``value in options``, so *options* has to + support that operation. + + To keep the validator hashable, dicts, lists, and sets are transparently + transformed into a `tuple`. + + Args: + options: Allowed options. + + Raises: + ValueError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected options, and the value it got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + .. versionchanged:: 24.1.0 + *options* that are a list, dict, or a set are now transformed into a + tuple to keep the validator hashable. + """ + repr_options = options + if isinstance(options, (list, dict, set)): + options = tuple(options) + + return _InValidator(options, repr_options) + + +@attrs(repr=False, slots=False, unsafe_hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attrs.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute that is + not callable. + + .. versionadded:: 19.1.0 + + Raises: + attrs.exceptions.NotCallableError: + With a human readable error message containing the attribute + (`attrs.Attribute`) name, and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + f"" + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + Args: + member_validator: Validator to apply to iterable members. + + iterable_validator: + Validator to apply to iterable itself (optional). + + Raises + TypeError: if any sub-validators fail + + .. versionadded:: 19.1.0 + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return f"" + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + Args: + key_validator: Validator to apply to dictionary keys. + + value_validator: Validator to apply to dictionary values. + + mapping_validator: + Validator to apply to top-level mapping attribute (optional). + + .. versionadded:: 19.1.0 + + Raises: + TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number larger or equal to *val*. + + The validator uses `operator.lt` to compare the values. + + Args: + val: Exclusive upper bound for values. + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number greater than *val*. + + The validator uses `operator.le` to compare the values. + + Args: + val: Inclusive upper bound for values. + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number smaller than *val*. + + The validator uses `operator.ge` to compare the values. + + Args: + val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number smaller or equal to *val*. + + The validator uses `operator.ge` to compare the values. + + Args: + val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + Args: + length (int): Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + Args: + length (int): Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called with a + wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + Args: + type (type | tuple[type, ...]): The type(s) to check for. + + Raises: + TypeError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected type, and the value it got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return f"" + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + Args: + validator: A validator to be logically inverted. + + msg (str): + Message to raise if validator fails. Formatted with keys + ``exc_types`` and ``validator``. + + exc_types (tuple[type, ...]): + Exception type(s) to capture. Other types raised by child + validators will not be intercepted and pass through. + + Raises: + ValueError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the validator that failed to raise an + exception, the value it got, and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _OrValidator: + validators = attrib() + + def __call__(self, inst, attr, value): + for v in self.validators: + try: + v(inst, attr, value) + except Exception: # noqa: BLE001, PERF203, S112 + continue + else: + return + + msg = f"None of {self.validators!r} satisfied for value {value!r}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def or_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators until one of them is + satisfied. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + Raises: + ValueError: + If no validator is satisfied. Raised with a human-readable error + message listing all the wrapped validators and the value that + failed all of them. + + .. versionadded:: 24.1.0 + """ + vals = [] + for v in validators: + vals.extend(v.validators if isinstance(v, _OrValidator) else [v]) + + return _OrValidator(tuple(vals)) diff --git a/.venv/lib/python3.12/site-packages/attr/validators.pyi b/.venv/lib/python3.12/site-packages/attr/validators.pyi new file mode 100644 index 00000000..a314110e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attr/validators.pyi @@ -0,0 +1,83 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + Mapping, + Match, + Pattern, + TypeVar, + overload, +) + +from attrs import _ValidatorType +from attrs import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: tuple[type[_T1], type[_T2]] +) -> _ValidatorType[_T1 | _T2]: ... +@overload +def instance_of( + type: tuple[type[_T1], type[_T2], type[_T3]] +) -> _ValidatorType[_T1 | _T2 | _T3]: ... +@overload +def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ... +def optional( + validator: ( + _ValidatorType[_T] + | list[_ValidatorType[_T]] + | tuple[_ValidatorType[_T]] + ), +) -> _ValidatorType[_T | None]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Pattern[AnyStr] | AnyStr, + flags: int = ..., + func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: _ValidatorType[_I] | None = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: _ValidatorType[_M] | None = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: str | None = None, + exc_types: type[Exception] | Iterable[type[Exception]] = ..., +) -> _ValidatorType[_T]: ... +def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/METADATA new file mode 100644 index 00000000..a85b3786 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/METADATA @@ -0,0 +1,242 @@ +Metadata-Version: 2.3 +Name: attrs +Version: 24.2.0 +Summary: Classes Without Boilerplate +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: GitHub, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Author-email: Hynek Schlawack +License-Expression: MIT +License-File: LICENSE +Keywords: attribute,boilerplate,class +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.7 +Requires-Dist: importlib-metadata; python_version < '3.8' +Provides-Extra: benchmark +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'benchmark' +Requires-Dist: hypothesis; extra == 'benchmark' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'benchmark' +Requires-Dist: pympler; extra == 'benchmark' +Requires-Dist: pytest-codspeed; extra == 'benchmark' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'benchmark' +Requires-Dist: pytest-xdist[psutil]; extra == 'benchmark' +Requires-Dist: pytest>=4.3.0; extra == 'benchmark' +Provides-Extra: cov +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'cov' +Requires-Dist: coverage[toml]>=5.3; extra == 'cov' +Requires-Dist: hypothesis; extra == 'cov' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'cov' +Requires-Dist: pympler; extra == 'cov' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'cov' +Requires-Dist: pytest-xdist[psutil]; extra == 'cov' +Requires-Dist: pytest>=4.3.0; extra == 'cov' +Provides-Extra: dev +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'dev' +Requires-Dist: hypothesis; extra == 'dev' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'dev' +Requires-Dist: pre-commit; extra == 'dev' +Requires-Dist: pympler; extra == 'dev' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'dev' +Requires-Dist: pytest-xdist[psutil]; extra == 'dev' +Requires-Dist: pytest>=4.3.0; extra == 'dev' +Provides-Extra: docs +Requires-Dist: cogapp; extra == 'docs' +Requires-Dist: furo; extra == 'docs' +Requires-Dist: myst-parser; extra == 'docs' +Requires-Dist: sphinx; extra == 'docs' +Requires-Dist: sphinx-notfound-page; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier; extra == 'docs' +Requires-Dist: towncrier<24.7; extra == 'docs' +Provides-Extra: tests +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'tests' +Requires-Dist: hypothesis; extra == 'tests' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'tests' +Requires-Dist: pympler; extra == 'tests' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'tests' +Requires-Dist: pytest-xdist[psutil]; extra == 'tests' +Requires-Dist: pytest>=4.3.0; extra == 'tests' +Provides-Extra: tests-mypy +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'tests-mypy' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'tests-mypy' +Description-Content-Type: text/markdown + +

+ + attrs + +

+ + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + + + +

+ + + + + + + + +

+ + + +

+ Please consider joining them to help make attrs’s maintenance more sustainable! +

+ + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation! + + +### Hate Type Annotations!? + +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types: + +```python +from attrs import define, field + +@define +class SomeClass: + a_number = field(default=42) + list_of_numbers = field(factory=list) +``` + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger. + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice. + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs) + + +### *attrs* for Enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +[Learn more](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek). + +## Release Information + +### Deprecations + +- Given the amount of warnings raised in the broader ecosystem, we've decided to only soft-deprecate the *hash* argument to `@define` / `@attr.s`. + Please don't use it in new code, but we don't intend to remove it anymore. + [#1330](https://github.com/python-attrs/attrs/issues/1330) + + +### Changes + +- `attrs.converters.pipe()` (and its syntactic sugar of passing a list for `attrs.field()`'s / `attr.ib()`'s *converter* argument) works again when passing `attrs.setters.convert` to *on_setattr* (which is default for `attrs.define`). + [#1328](https://github.com/python-attrs/attrs/issues/1328) +- Restored support for PEP [649](https://peps.python.org/pep-0649/) / [749](https://peps.python.org/pep-0749/)-implementing Pythons -- currently 3.14-dev. + [#1329](https://github.com/python-attrs/attrs/issues/1329) + + + +--- + +[Full changelog →](https://www.attrs.org/en/stable/changelog.html) diff --git a/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/RECORD new file mode 100644 index 00000000..1b019796 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/RECORD @@ -0,0 +1,55 @@ +attr/__init__.py,sha256=l8Ewh5KZE7CCY0i1iDfSCnFiUTIkBVoqsXjX9EZnIVA,2087 +attr/__init__.pyi,sha256=aTVHBPX6krCGvbQvOl_UKqEzmi2HFsaIVm2WKmAiqVs,11434 +attr/__pycache__/__init__.cpython-312.pyc,, +attr/__pycache__/_cmp.cpython-312.pyc,, +attr/__pycache__/_compat.cpython-312.pyc,, +attr/__pycache__/_config.cpython-312.pyc,, +attr/__pycache__/_funcs.cpython-312.pyc,, +attr/__pycache__/_make.cpython-312.pyc,, +attr/__pycache__/_next_gen.cpython-312.pyc,, +attr/__pycache__/_version_info.cpython-312.pyc,, +attr/__pycache__/converters.cpython-312.pyc,, +attr/__pycache__/exceptions.cpython-312.pyc,, +attr/__pycache__/filters.cpython-312.pyc,, +attr/__pycache__/setters.cpython-312.pyc,, +attr/__pycache__/validators.cpython-312.pyc,, +attr/_cmp.py,sha256=3umHiBtgsEYtvNP_8XrQwTCdFoZIX4DEur76N-2a3X8,4123 +attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368 +attr/_compat.py,sha256=n2Uk3c-ywv0PkFfGlvqR7SzDXp4NOhWmNV_ZK6YfWoM,2958 +attr/_config.py,sha256=z81Vt-GeT_2taxs1XZfmHx9TWlSxjPb6eZH1LTGsS54,843 +attr/_funcs.py,sha256=SGDmNlED1TM3tgO9Ap2mfRfVI24XEAcrNQs7o2eBXHQ,17386 +attr/_make.py,sha256=BjENJz5eJoojJVbCoupWjXLLEZJ7VID89lisLbQUlmQ,91479 +attr/_next_gen.py,sha256=dhGb96VFg4kXBkS9Zdz1A2uxVJ99q_RT1hw3kLA9-uI,24630 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=vNa58pZi9V6uxBzl4t1QrHbQfkT4iRFAodyXe7lcgg0,3506 +attr/converters.pyi,sha256=mpDoVFO3Cpx8xYSSV0iZFl7IAHuoNBglxKfxHvLj_sY,410 +attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795 +attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=faMQeiBo_nbXYnPaQ1pq8PXeA7Zr-uNsVsPMiKCmxhc,1619 +attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584 +attr/validators.py,sha256=985eTP6RHyon61YEauMJgyNy1rEOhJWiSXMJgRxPtrQ,20045 +attr/validators.pyi,sha256=LjKf7AoXZfvGSfT3LRs61Qfln94konYyMUPoJJjOxK4,2502 +attrs-24.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-24.2.0.dist-info/METADATA,sha256=3Jgk4lr9Y1SAqAcwOLPN_mpW0wc6VOGm-yHt1LsPIHw,11524 +attrs-24.2.0.dist-info/RECORD,, +attrs-24.2.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +attrs-24.2.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs/__init__.py,sha256=5FHo-EMFOX-g4ialSK4fwOjuoHzLISJDZCwoOl02Ty8,1071 +attrs/__init__.pyi,sha256=o3l92VsD9kHz8sldEtb_tllBTs3TeL-vIBMTxo2Zc_4,7703 +attrs/__pycache__/__init__.cpython-312.pyc,, +attrs/__pycache__/converters.cpython-312.pyc,, +attrs/__pycache__/exceptions.cpython-312.pyc,, +attrs/__pycache__/filters.cpython-312.pyc,, +attrs/__pycache__/setters.cpython-312.pyc,, +attrs/__pycache__/validators.cpython-312.pyc,, +attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76 +attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76 +attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73 +attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76 diff --git a/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/WHEEL new file mode 100644 index 00000000..cdd68a49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..2bd6453d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs-24.2.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/attrs/__init__.py b/.venv/lib/python3.12/site-packages/attrs/__init__.py new file mode 100644 index 00000000..963b1972 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/__init__.py @@ -0,0 +1,67 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Converter, + Factory, + _make_getattr, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "AttrsInstance", + "cmp_using", + "Converter", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] + +__getattr__ = _make_getattr(__name__) diff --git a/.venv/lib/python3.12/site-packages/attrs/__init__.pyi b/.venv/lib/python3.12/site-packages/attrs/__init__.pyi new file mode 100644 index 00000000..b2670de2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/__init__.pyi @@ -0,0 +1,252 @@ +import sys + +from typing import ( + Any, + Callable, + Mapping, + Sequence, + overload, + TypeVar, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import Converter as Converter +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import has as has +from attr import make_class as make_class +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators +from attr import attrib, asdict as asdict, astuple as astuple + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = bool | Callable[[Any], Any] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_ConverterType = Callable[[Any], Any] +_ReprType = Callable[[Any], str] +_ReprArgType = bool | _ReprType +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType +_FieldTransformer = Callable[ + [type, list["Attribute[Any]"]], list["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]] + +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: _T | None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> Any: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define + +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: _C, + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: None = ..., + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... diff --git a/.venv/lib/python3.12/site-packages/attrs/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attrs/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..b1362039 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attrs/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attrs/__pycache__/converters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attrs/__pycache__/converters.cpython-312.pyc new file mode 100644 index 00000000..48a30261 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attrs/__pycache__/converters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..d11d6e41 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attrs/__pycache__/filters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attrs/__pycache__/filters.cpython-312.pyc new file mode 100644 index 00000000..59e4a569 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attrs/__pycache__/filters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attrs/__pycache__/setters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attrs/__pycache__/setters.cpython-312.pyc new file mode 100644 index 00000000..d897692a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attrs/__pycache__/setters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attrs/__pycache__/validators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/attrs/__pycache__/validators.cpython-312.pyc new file mode 100644 index 00000000..e7456a79 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/attrs/__pycache__/validators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/attrs/converters.py b/.venv/lib/python3.12/site-packages/attrs/converters.py new file mode 100644 index 00000000..7821f6c0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa: F403 diff --git a/.venv/lib/python3.12/site-packages/attrs/exceptions.py b/.venv/lib/python3.12/site-packages/attrs/exceptions.py new file mode 100644 index 00000000..3323f9d2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa: F403 diff --git a/.venv/lib/python3.12/site-packages/attrs/filters.py b/.venv/lib/python3.12/site-packages/attrs/filters.py new file mode 100644 index 00000000..3080f483 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa: F403 diff --git a/.venv/lib/python3.12/site-packages/attrs/py.typed b/.venv/lib/python3.12/site-packages/attrs/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/attrs/setters.py b/.venv/lib/python3.12/site-packages/attrs/setters.py new file mode 100644 index 00000000..f3d73bb7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa: F403 diff --git a/.venv/lib/python3.12/site-packages/attrs/validators.py b/.venv/lib/python3.12/site-packages/attrs/validators.py new file mode 100644 index 00000000..037e124f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa: F403 diff --git a/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/LICENSE.rst b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/LICENSE.rst new file mode 100644 index 00000000..d12a8491 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/METADATA b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/METADATA new file mode 100644 index 00000000..7a6bbb24 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: click +Version: 8.1.7 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/click/ +Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: colorama ; platform_system == "Windows" +Requires-Dist: importlib-metadata ; python_version < "3.8" + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://click.palletsprojects.com/ +- Changes: https://click.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/click/ +- Source Code: https://github.com/pallets/click +- Issue Tracker: https://github.com/pallets/click/issues +- Chat: https://discord.gg/pallets diff --git a/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/RECORD b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/RECORD new file mode 100644 index 00000000..497ee45a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/RECORD @@ -0,0 +1,39 @@ +click-8.1.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.1.7.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.7.dist-info/METADATA,sha256=qIMevCxGA9yEmJOM_4WHuUJCwWpsIEVbCPOhs45YPN4,3014 +click-8.1.7.dist-info/RECORD,, +click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 +click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138 +click/__pycache__/__init__.cpython-312.pyc,, +click/__pycache__/_compat.cpython-312.pyc,, +click/__pycache__/_termui_impl.cpython-312.pyc,, +click/__pycache__/_textwrap.cpython-312.pyc,, +click/__pycache__/_winconsole.cpython-312.pyc,, +click/__pycache__/core.cpython-312.pyc,, +click/__pycache__/decorators.cpython-312.pyc,, +click/__pycache__/exceptions.cpython-312.pyc,, +click/__pycache__/formatting.cpython-312.pyc,, +click/__pycache__/globals.cpython-312.pyc,, +click/__pycache__/parser.cpython-312.pyc,, +click/__pycache__/shell_completion.cpython-312.pyc,, +click/__pycache__/termui.cpython-312.pyc,, +click/__pycache__/testing.cpython-312.pyc,, +click/__pycache__/types.cpython-312.pyc,, +click/__pycache__/utils.cpython-312.pyc,, +click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744 +click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086 +click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719 +click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 +click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=Ty3VM_ts0sQhj6u7eFTiLwHPoTgcXTGEAUg2OpLqYKw,18460 +click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324 +click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084 +click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391 +click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298 diff --git a/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/WHEEL new file mode 100644 index 00000000..2c08da08 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/top_level.txt new file mode 100644 index 00000000..dca9a909 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.1.7.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/.venv/lib/python3.12/site-packages/click/__init__.py b/.venv/lib/python3.12/site-packages/click/__init__.py new file mode 100644 index 00000000..9a1dab04 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/__init__.py @@ -0,0 +1,73 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.7" diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..4db2e01a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 00000000..603e7d29 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc new file mode 100644 index 00000000..d895ab57 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc new file mode 100644 index 00000000..b09151ae Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc new file mode 100644 index 00000000..d5ea0afe Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc new file mode 100644 index 00000000..1e7c5f7a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc new file mode 100644 index 00000000..2017d3fc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..de93cad5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc new file mode 100644 index 00000000..4965a1d5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc new file mode 100644 index 00000000..60875062 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc new file mode 100644 index 00000000..3da87c1f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc new file mode 100644 index 00000000..7b0d297c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc new file mode 100644 index 00000000..7a82430d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc new file mode 100644 index 00000000..9ee254f4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..4ffbbc94 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc new file mode 100644 index 00000000..07cbdc6d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click/_compat.py b/.venv/lib/python3.12/site-packages/click/_compat.py new file mode 100644 index 00000000..23f88665 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_compat.py @@ -0,0 +1,623 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, "os.PathLike[str]", int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: "t.Union[str, os.PathLike[str]]", + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( # noqa: F811 + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.Optional[t.TextIO]], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.Optional[t.TextIO]]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.Optional[t.TextIO]: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/.venv/lib/python3.12/site-packages/click/_termui_impl.py b/.venv/lib/python3.12/site-packages/click/_termui_impl.py new file mode 100644 index 00000000..f7446577 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_termui_impl.py @@ -0,0 +1,739 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter: t.Iterable[V] = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: t.Optional[int] = None + self.entered: bool = False + self.current_item: t.Optional[V] = None + self.is_hidden: bool = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar[V]": + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/.venv/lib/python3.12/site-packages/click/_textwrap.py b/.venv/lib/python3.12/site-packages/click/_textwrap.py new file mode 100644 index 00000000..b47dcbd4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/.venv/lib/python3.12/site-packages/click/_winconsole.py b/.venv/lib/python3.12/site-packages/click/_winconsole.py new file mode 100644 index 00000000..6b20df31 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam BartoÅ¡ who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/.venv/lib/python3.12/site-packages/click/core.py b/.venv/lib/python3.12/site-packages/click/core.py new file mode 100644 index 00000000..cc65e896 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/core.py @@ -0,0 +1,3042 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "t.Callable[..., V]", + *args: t.Any, + **kwargs: t.Any, + ) -> V: + ... + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "Command", + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + ... + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", "t.Callable[..., V]"], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Union[t.Any, V]: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.MutableMapping[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invocable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + :param attrs: Other command arguments described in :class:`Command`. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[ + t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] + ] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.MutableMapping[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: + ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": + ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + + See :class:`MultiCommand` and :class:`Command` for the description of + ``name`` and ``attrs``. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name: t.Optional[str] + self.opts: t.List[str] + self.secondary_opts: t.List[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + self.default: t.Union[t.Any, t.Callable[[], t.Any]] + + if is_flag and default_is_missing and not self.required: + if multiple: + self.default = () + else: + self.default = False + + if flag_value is None: + flag_value = not self.default + + self.type: types.ParamType + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return t.cast(Option, param).flag_value + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/.venv/lib/python3.12/site-packages/click/decorators.py b/.venv/lib/python3.12/site-packages/click/decorators.py new file mode 100644 index 00000000..d9bba950 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/decorators.py @@ -0,0 +1,561 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) + + +def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: t.Type[T], ensure: bool = False +) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + ctx = get_current_context() + + obj: t.Optional[T] + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator # type: ignore[return-value] + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator # type: ignore[return-value] + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: t.Optional[str], + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: + ... + + +def command( + name: t.Union[t.Optional[str], _AnyCallable] = None, + cls: t.Optional[t.Type[CmdType]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast(t.Type[CmdType], Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + cmd = cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: t.Optional[str], + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: + ... + + +def group( + name: t.Union[str, _AnyCallable, None] = None, + cls: t.Optional[t.Type[GrpType]] = None, + **attrs: t.Any, +) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast(t.Type[GrpType], Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/click/exceptions.py b/.venv/lib/python3.12/site-packages/click/exceptions.py new file mode 100644 index 00000000..fe68a361 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/exceptions.py @@ -0,0 +1,288 @@ +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/.venv/lib/python3.12/site-packages/click/formatting.py b/.venv/lib/python3.12/site-packages/click/formatting.py new file mode 100644 index 00000000..ddd2a2f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/.venv/lib/python3.12/site-packages/click/globals.py b/.venv/lib/python3.12/site-packages/click/globals.py new file mode 100644 index 00000000..480058f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/globals.py @@ -0,0 +1,68 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/.venv/lib/python3.12/site-packages/click/parser.py b/.venv/lib/python3.12/site-packages/click/parser.py new file mode 100644 index 00000000..5fa7adfa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: t.Set[str] = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/.venv/lib/python3.12/site-packages/click/py.typed b/.venv/lib/python3.12/site-packages/click/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/click/shell_completion.py b/.venv/lib/python3.12/site-packages/click/shell_completion.py new file mode 100644 index 00000000..dc9e00b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/shell_completion.py @@ -0,0 +1,596 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: t.Optional[str] = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: t.Optional[str] = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + args: t.List[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/.venv/lib/python3.12/site-packages/click/termui.py b/.venv/lib/python3.12/site-packages/click/termui.py new file mode 100644 index 00000000..db7a4b28 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/termui.py @@ -0,0 +1,784 @@ +import inspect +import io +import itertools +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/.venv/lib/python3.12/site-packages/click/testing.py b/.venv/lib/python3.12/site-packages/click/testing.py new file mode 100644 index 00000000..e0df0d2a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env: t.Mapping[str, t.Optional[str]] = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: # noqa: B014 + pass diff --git a/.venv/lib/python3.12/site-packages/click/types.py b/.venv/lib/python3.12/site-packages/click/types.py new file mode 100644 index 00000000..2b1d1797 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/types.py @@ -0,0 +1,1089 @@ +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats: t.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type[t.Any]] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("t.Union[str, os.PathLike[str]]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast(t.IO[t.Any], lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type[t.Any]] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: "t.Union[str, os.PathLike[str]]" + ) -> "t.Union[str, bytes, os.PathLike[str]]": + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: "t.Union[str, os.PathLike[str]]", + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> "t.Union[str, bytes, os.PathLike[str]]": + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} '{filename}' is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: + self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/.venv/lib/python3.12/site-packages/click/utils.py b/.venv/lib/python3.12/site-packages/click/utils.py new file mode 100644 index 00000000..d536434f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/utils.py @@ -0,0 +1,624 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: t.Union[str, "os.PathLike[str]"], + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO[t.Any]] + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO[t.Any], KeepOpenFile(f)) + + return f + + +def format_filename( + filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict". This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/AUTHORS.txt b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/AUTHORS.txt new file mode 100644 index 00000000..17b68caa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/AUTHORS.txt @@ -0,0 +1,5 @@ +Authors +======= + +Kevin Wurster +Sean Gillies diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/LICENSE.txt b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/LICENSE.txt new file mode 100644 index 00000000..8fbd3537 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/LICENSE.txt @@ -0,0 +1,29 @@ +New BSD License + +Copyright (c) 2015-2019, Kevin D. Wurster, Sean C. Gillies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither click-plugins nor the names of its contributors may not be used to + endorse or promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/METADATA new file mode 100644 index 00000000..11df8ed8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/METADATA @@ -0,0 +1,210 @@ +Metadata-Version: 2.1 +Name: click-plugins +Version: 1.1.1 +Summary: An extension module for click to enable registering CLI commands via setuptools entry-points. +Home-page: https://github.com/click-contrib/click-plugins +Author: Kevin Wurster, Sean Gillies +Author-email: wursterk@gmail.com, sean.gillies@gmail.com +License: New BSD +Keywords: click plugin setuptools entry-point +Platform: UNKNOWN +Classifier: Topic :: Utilities +Classifier: Intended Audience :: Developers +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Requires-Dist: click (>=4.0) +Provides-Extra: dev +Requires-Dist: pytest (>=3.6) ; extra == 'dev' +Requires-Dist: pytest-cov ; extra == 'dev' +Requires-Dist: wheel ; extra == 'dev' +Requires-Dist: coveralls ; extra == 'dev' + +============= +click-plugins +============= + +.. image:: https://travis-ci.org/click-contrib/click-plugins.svg?branch=master + :target: https://travis-ci.org/click-contrib/click-plugins?branch=master + +.. image:: https://coveralls.io/repos/click-contrib/click-plugins/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/click-contrib/click-plugins?branch=master + +An extension module for `click `_ to register +external CLI commands via setuptools entry-points. + + +Why? +---- + +Lets say you develop a commandline interface and someone requests a new feature +that is absolutely related to your project but would have negative consequences +like additional dependencies, major refactoring, or maybe its just too domain +specific to be supported directly. Rather than developing a separate standalone +utility you could offer up a `setuptools entry point `_ +that allows others to use your commandline utility as a home for their related +sub-commands. You get to choose where these sub-commands or sub-groups CAN be +registered but the plugin developer gets to choose they ARE registered. You +could have all plugins register alongside the core commands, in a special +sub-group, across multiple sub-groups, or some combination. + + +Enabling Plugins +---------------- + +For a more detailed example see the `examples `_ section. + +The only requirement is decorating ``click.group()`` with ``click_plugins.with_plugins()`` +which handles attaching external commands and groups. In this case the core CLI developer +registers CLI plugins from ``core_package.cli_plugins``. + +.. code-block:: python + + from pkg_resources import iter_entry_points + + import click + from click_plugins import with_plugins + + + @with_plugins(iter_entry_points('core_package.cli_plugins')) + @click.group() + def cli(): + """Commandline interface for yourpackage.""" + + @cli.command() + def subcommand(): + """Subcommand that does something.""" + + +Developing Plugins +------------------ + +Plugin developers need to register their sub-commands or sub-groups to an +entry-point in their ``setup.py`` that is loaded by the core package. + +.. code-block:: python + + from setuptools import setup + + setup( + name='yourscript', + version='0.1', + py_modules=['yourscript'], + install_requires=[ + 'click', + ], + entry_points=''' + [core_package.cli_plugins] + cool_subcommand=yourscript.cli:cool_subcommand + another_subcommand=yourscript.cli:another_subcommand + ''', + ) + + +Broken and Incompatible Plugins +------------------------------- + +Any sub-command or sub-group that cannot be loaded is caught and converted to +a ``click_plugins.core.BrokenCommand()`` rather than just crashing the entire +CLI. The short-help is converted to a warning message like: + +.. code-block:: console + + Warning: could not load plugin. See `` --help``. + +and if the sub-command or group is executed the entire traceback is printed. + + +Best Practices and Extra Credit +------------------------------- + +Opening a CLI to plugins encourages other developers to independently extend +functionality independently but there is no guarantee these new features will +be "on brand". Plugin developers are almost certainly already using features +in the core package the CLI belongs to so defining commonly used arguments and +options in one place lets plugin developers reuse these flags to produce a more +cohesive CLI. If the CLI is simple maybe just define them at the top of +``yourpackage/cli.py`` or for more complex packages something like +``yourpackage/cli/options.py``. These common options need to be easy to find +and be well documented so that plugin developers know what variable to give to +their sub-command's function and what object they can expect to receive. Don't +forget to document non-obvious callbacks. + +Keep in mind that plugin developers also have access to the parent group's +``ctx.obj``, which is very useful for passing things like verbosity levels or +config values around to sub-commands. + +Here's some code that sub-commands could re-use: + +.. code-block:: python + + from multiprocessing import cpu_count + + import click + + jobs_opt = click.option( + '-j', '--jobs', metavar='CORES', type=click.IntRange(min=1, max=cpu_count()), default=1, + show_default=True, help="Process data across N cores." + ) + +Plugin developers can access this with: + +.. code-block:: python + + import click + import parent_cli_package.cli.options + + + @click.command() + @parent_cli_package.cli.options.jobs_opt + def subcommand(jobs): + """I do something domain specific.""" + + +Installation +------------ + +With ``pip``: + +.. code-block:: console + + $ pip install click-plugins + +From source: + +.. code-block:: console + + $ git clone https://github.com/click-contrib/click-plugins.git + $ cd click-plugins + $ python setup.py install + + +Developing +---------- + +.. code-block:: console + + $ git clone https://github.com/click-contrib/click-plugins.git + $ cd click-plugins + $ pip install -e .\[dev\] + $ pytest tests --cov click_plugins --cov-report term-missing + + +Changelog +--------- + +See ``CHANGES.txt`` + + +Authors +------- + +See ``AUTHORS.txt`` + + +License +------- + +See ``LICENSE.txt`` + diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/RECORD new file mode 100644 index 00000000..66777ff5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/RECORD @@ -0,0 +1,12 @@ +click_plugins-1.1.1.dist-info/AUTHORS.txt,sha256=FUhD9wZxX5--d9KS7hUB-wnHgyS67pdnWvADk8lrLeE,90 +click_plugins-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click_plugins-1.1.1.dist-info/LICENSE.txt,sha256=ovxTmp55udvfNAMB8D-Wci6bCbFy-kiV9mnwwmQrj3o,1517 +click_plugins-1.1.1.dist-info/METADATA,sha256=LFOtPppAX0RN1Wwn7A2Pq46juwOppLlvUGR2VNRgAPk,6390 +click_plugins-1.1.1.dist-info/RECORD,, +click_plugins-1.1.1.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 +click_plugins-1.1.1.dist-info/top_level.txt,sha256=oB_GDZcOeOKX1eKKCfqSMR4tfJS6iL3zJshaJJPSQUI,14 +click_plugins-1.1.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +click_plugins/__init__.py,sha256=lAwJ0n4PqZCv7hk5Fz6yNL7TRrXKuhynDBGiaNSUNvo,2247 +click_plugins/__pycache__/__init__.cpython-312.pyc,, +click_plugins/__pycache__/core.cpython-312.pyc,, +click_plugins/core.py,sha256=4hhmUpFi6MSYsvxogksNu5dlKEWNscbiE9ynUy5dPdE,2475 diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/WHEEL new file mode 100644 index 00000000..c8240f03 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..22e5b9b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +click_plugins diff --git a/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/zip-safe b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/zip-safe new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins-1.1.1.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/.venv/lib/python3.12/site-packages/click_plugins/__init__.py b/.venv/lib/python3.12/site-packages/click_plugins/__init__.py new file mode 100644 index 00000000..6bdfe38e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins/__init__.py @@ -0,0 +1,61 @@ +""" +An extension module for click to enable registering CLI commands via setuptools +entry-points. + + + from pkg_resources import iter_entry_points + + import click + from click_plugins import with_plugins + + + @with_plugins(iter_entry_points('entry_point.name')) + @click.group() + def cli(): + '''Commandline interface for something.''' + + @cli.command() + @click.argument('arg') + def subcommand(arg): + '''A subcommand for something else''' +""" + + +from click_plugins.core import with_plugins + + +__version__ = '1.1.1' +__author__ = 'Kevin Wurster, Sean Gillies' +__email__ = 'wursterk@gmail.com, sean.gillies@gmail.com' +__source__ = 'https://github.com/click-contrib/click-plugins' +__license__ = ''' +New BSD License + +Copyright (c) 2015-2019, Kevin D. Wurster, Sean C. Gillies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither click-plugins nor the names of its contributors may not be used to + endorse or promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +''' diff --git a/.venv/lib/python3.12/site-packages/click_plugins/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click_plugins/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..f5d7ee9b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click_plugins/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click_plugins/__pycache__/core.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click_plugins/__pycache__/core.cpython-312.pyc new file mode 100644 index 00000000..020c41c9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/click_plugins/__pycache__/core.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/click_plugins/core.py b/.venv/lib/python3.12/site-packages/click_plugins/core.py new file mode 100644 index 00000000..0d7f5e97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click_plugins/core.py @@ -0,0 +1,92 @@ +""" +Core components for click_plugins +""" + + +import click + +import os +import sys +import traceback + + +def with_plugins(plugins): + + """ + A decorator to register external CLI commands to an instance of + `click.Group()`. + + Parameters + ---------- + plugins : iter + An iterable producing one `pkg_resources.EntryPoint()` per iteration. + attrs : **kwargs, optional + Additional keyword arguments for instantiating `click.Group()`. + + Returns + ------- + click.Group() + """ + + def decorator(group): + if not isinstance(group, click.Group): + raise TypeError("Plugins can only be attached to an instance of click.Group()") + + for entry_point in plugins or (): + try: + group.add_command(entry_point.load()) + except Exception: + # Catch this so a busted plugin doesn't take down the CLI. + # Handled by registering a dummy command that does nothing + # other than explain the error. + group.add_command(BrokenCommand(entry_point.name)) + + return group + + return decorator + + +class BrokenCommand(click.Command): + + """ + Rather than completely crash the CLI when a broken plugin is loaded, this + class provides a modified help message informing the user that the plugin is + broken and they should contact the owner. If the user executes the plugin + or specifies `--help` a traceback is reported showing the exception the + plugin loader encountered. + """ + + def __init__(self, name): + + """ + Define the special help messages after instantiating a `click.Command()`. + """ + + click.Command.__init__(self, name) + + util_name = os.path.basename(sys.argv and sys.argv[0] or __file__) + + if os.environ.get('CLICK_PLUGINS_HONESTLY'): # pragma no cover + icon = u'\U0001F4A9' + else: + icon = u'\u2020' + + self.help = ( + "\nWarning: entry point could not be loaded. Contact " + "its author for help.\n\n\b\n" + + traceback.format_exc()) + self.short_help = ( + icon + " Warning: could not load plugin. See `%s %s --help`." + % (util_name, self.name)) + + def invoke(self, ctx): + + """ + Print the traceback instead of doing nothing. + """ + + click.echo(self.help, color=ctx.color) + ctx.exit(1) + + def parse_args(self, ctx, args): + return args diff --git a/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/LICENSE b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/LICENSE new file mode 100644 index 00000000..effb9456 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2014, Mapbox +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of cligj nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/METADATA b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/METADATA new file mode 100644 index 00000000..a6e35161 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/METADATA @@ -0,0 +1,170 @@ +Metadata-Version: 2.1 +Name: cligj +Version: 0.7.2 +Summary: Click params for commmand line interfaces to GeoJSON +Home-page: https://github.com/mapbox/cligj +Author: Sean Gillies +Author-email: sean@mapbox.com +License: BSD +Platform: UNKNOWN +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4 +License-File: LICENSE +Requires-Dist: click (>=4.0) +Provides-Extra: test +Requires-Dist: pytest-cov ; extra == 'test' + +cligj +====== + +.. image:: https://travis-ci.com/mapbox/cligj.svg + :target: https://travis-ci.com/mapbox/cligj + +.. image:: https://coveralls.io/repos/mapbox/cligj/badge.png?branch=master + :target: https://coveralls.io/r/mapbox/cligj?branch=master + +Common arguments and options for GeoJSON processing commands, using Click. + +`cligj` is for Python developers who create command line interfaces for geospatial data. +`cligj` allows you to quickly build consistent, well-tested and interoperable CLIs for handling GeoJSON. + + +Arguments +--------- + +``files_in_arg`` +Multiple files + +``files_inout_arg`` +Multiple files, last of which is an output file. + +``features_in_arg`` +GeoJSON Features input which accepts multiple representations of GeoJSON features +and returns the input data as an iterable of GeoJSON Feature-like dictionaries + +Options +-------- + +``verbose_opt`` + +``quiet_opt`` + +``format_opt`` + +JSON formatting options +~~~~~~~~~~~~~~~~~~~~~~~ + +``indent_opt`` + +``compact_opt`` + +Coordinate precision option +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``precision_opt`` + +Geographic (default), projected, or Mercator switch +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``projection_geographic_opt`` + +``projection_projected_opt`` + +``projection_mercator_opt`` + +Feature collection or feature sequence switch +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``sequence_opt`` + +``use_rs_opt`` + +GeoJSON output mode option +~~~~~~~~~~~~~~~~~~~~~~~~~~ +``geojson_type_collection_opt`` + +``geojson_type_feature_opt`` + +``def geojson_type_bbox_opt`` + +Example +------- + +Here's an example of a command that writes out GeoJSON features as a collection +or, optionally, a sequence of individual features. Since most software that +reads and writes GeoJSON expects a text containing a single feature collection, +that's the default, and a LF-delimited sequence of texts containing one GeoJSON +feature each is a feature that is turned on using the ``--sequence`` option. +To write sequences of feature texts that conform to the `GeoJSON Text Sequences +standard `__ (and might contain +pretty-printed JSON) with the ASCII Record Separator (0x1e) as a delimiter, use +the ``--rs`` option + +.. warning:: Future change warning + GeoJSON sequences (`--sequence`), not collections (`--no-sequence`), will be + the default in version 1.0.0. + + +.. code-block:: python + + import click + import cligj + import json + + def process_features(features): + for feature in features: + # TODO process feature here + yield feature + + @click.command() + @cligj.features_in_arg + @cligj.sequence_opt + @cligj.use_rs_opt + def pass_features(features, sequence, use_rs): + if sequence: + for feature in process_features(features): + if use_rs: + click.echo(u'\x1e', nl=False) + click.echo(json.dumps(feature)) + else: + click.echo(json.dumps( + {'type': 'FeatureCollection', + 'features': list(process_features(features))})) + +On the command line, the generated help text explains the usage + +.. code-block:: console + + Usage: pass_features [OPTIONS] FEATURES... + + Options: + --sequence / --no-sequence Write a LF-delimited sequence of texts + containing individual objects or write a single + JSON text containing a feature collection object + (the default). + --rs / --no-rs Use RS (0x1E) as a prefix for individual texts + in a sequence as per http://tools.ietf.org/html + /draft-ietf-json-text-sequence-13 (default is + False). + --help Show this message and exit. + +And can be used like this + +.. code-block:: console + + $ cat data.geojson + {'type': 'FeatureCollection', 'features': [{'type': 'Feature', 'id': '1'}, {'type': 'Feature', 'id': '2'}]} + + $ pass_features data.geojson + {'type': 'FeatureCollection', 'features': [{'type': 'Feature', 'id': '1'}, {'type': 'Feature', 'id': '2'}]} + + $ cat data.geojson | pass_features + {'type': 'FeatureCollection', 'features': [{'type': 'Feature', 'id': '1'}, {'type': 'Feature', 'id': '2'}]} + + $ cat data.geojson | pass_features --sequence + {'type': 'Feature', 'id': '1'} + {'type': 'Feature', 'id': '2'} + + $ cat data.geojson | pass_features --sequence --rs + ^^{'type': 'Feature', 'id': '1'} + ^^{'type': 'Feature', 'id': '2'} + +In this example, ``^^`` represents 0x1e. + + diff --git a/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/RECORD b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/RECORD new file mode 100644 index 00000000..55b2d483 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/RECORD @@ -0,0 +1,10 @@ +cligj-0.7.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cligj-0.7.2.dist-info/LICENSE,sha256=WQLXFlRN35o0hJs0XDkauvfCHKFu0icG1qwvVQNxJZQ,1469 +cligj-0.7.2.dist-info/METADATA,sha256=0M7veLSbCNJVF57jt5ah44S43avjjTu9wuDC5qr91tQ,5002 +cligj-0.7.2.dist-info/RECORD,, +cligj-0.7.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +cligj-0.7.2.dist-info/top_level.txt,sha256=Hvy1tviiMzKbB1D3AYXJVeozEJV6SgCs-EHFnhGlwMA,6 +cligj/__init__.py,sha256=zvD8Kc5PcY-AopHqGIY-Iekjv53BUhRCD6FHiF0k8uM,3876 +cligj/__pycache__/__init__.cpython-312.pyc,, +cligj/__pycache__/features.cpython-312.pyc,, +cligj/features.py,sha256=FwVHj0iAdqtOOy0uCFllC6H0EVNqsf3Djj99VEBYqCU,6905 diff --git a/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/WHEEL new file mode 100644 index 00000000..385faab0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/top_level.txt new file mode 100644 index 00000000..7a4158e8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj-0.7.2.dist-info/top_level.txt @@ -0,0 +1 @@ +cligj diff --git a/.venv/lib/python3.12/site-packages/cligj/__init__.py b/.venv/lib/python3.12/site-packages/cligj/__init__.py new file mode 100644 index 00000000..d7578940 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj/__init__.py @@ -0,0 +1,154 @@ +"""cligj + +A package of arguments, options, and parsers for the Python GeoJSON +ecosystem. +""" + +import sys +from warnings import warn + +import click + +from .features import normalize_feature_inputs + +__version__ = "0.7.2" + +if sys.version_info < (3, 6): + warn("cligj 1.0.0 will require Python >= 3.6", FutureWarning) + + +# Multiple input files. +files_in_arg = click.argument( + 'files', + nargs=-1, + type=click.Path(resolve_path=True), + required=True, + metavar="INPUTS...") + + +# Multiple files, last of which is an output file. +files_inout_arg = click.argument( + 'files', + nargs=-1, + type=click.Path(resolve_path=True), + required=True, + metavar="INPUTS... OUTPUT") + + +# Features from files, command line args, or stdin. +# Returns the input data as an iterable of GeoJSON Feature-like +# dictionaries. +features_in_arg = click.argument( + 'features', + nargs=-1, + callback=normalize_feature_inputs, + metavar="FEATURES...") + + +# Options. +verbose_opt = click.option( + '--verbose', '-v', + count=True, + help="Increase verbosity.") + +quiet_opt = click.option( + '--quiet', '-q', + count=True, + help="Decrease verbosity.") + +# Format driver option. +format_opt = click.option( + '-f', '--format', '--driver', 'driver', + default='GTiff', + help="Output format driver") + +# JSON formatting options. +indent_opt = click.option( + '--indent', + type=int, + default=None, + help="Indentation level for JSON output") + +compact_opt = click.option( + '--compact/--not-compact', + default=False, + help="Use compact separators (',', ':').") + +# Coordinate precision option. +precision_opt = click.option( + '--precision', + type=int, + default=-1, + help="Decimal precision of coordinates.") + +# Geographic (default), projected, or Mercator switch. +projection_geographic_opt = click.option( + '--geographic', + 'projection', + flag_value='geographic', + default=True, + help="Output in geographic coordinates (the default).") + +projection_projected_opt = click.option( + '--projected', + 'projection', + flag_value='projected', + help="Output in dataset's own, projected coordinates.") + +projection_mercator_opt = click.option( + '--mercator', + 'projection', + flag_value='mercator', + help="Output in Web Mercator coordinates.") + +# Feature collection or feature sequence switch. +sequence_opt = click.option( + '--sequence/--no-sequence', + default=False, + help="Write a LF-delimited sequence of texts containing individual " + "objects or write a single JSON text containing a feature " + "collection object (the default).", + callback=lambda ctx, param, value: warn( + "Sequences of Features, not FeatureCollections, will be the default in version 1.0.0", + FutureWarning, + ) + or value, +) + +use_rs_opt = click.option( + '--rs/--no-rs', + 'use_rs', + default=False, + help="Use RS (0x1E) as a prefix for individual texts in a sequence " + "as per http://tools.ietf.org/html/draft-ietf-json-text-sequence-13 " + "(default is False).") + + +def geojson_type_collection_opt(default=False): + """GeoJSON FeatureCollection output mode""" + return click.option( + '--collection', + 'geojson_type', + flag_value='collection', + default=default, + help="Output as GeoJSON feature collection(s).") + + +def geojson_type_feature_opt(default=False): + """GeoJSON Feature or Feature sequence output mode""" + return click.option( + '--feature', + 'geojson_type', + flag_value='feature', + default=default, + help="Output as GeoJSON feature(s).") + + +def geojson_type_bbox_opt(default=False): + """GeoJSON bbox output mode""" + return click.option( + '--bbox', + 'geojson_type', + flag_value='bbox', + default=default, + help="Output as GeoJSON bounding box array(s).") diff --git a/.venv/lib/python3.12/site-packages/cligj/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/cligj/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..61f782bc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/cligj/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/cligj/__pycache__/features.cpython-312.pyc b/.venv/lib/python3.12/site-packages/cligj/__pycache__/features.cpython-312.pyc new file mode 100644 index 00000000..cb7d7271 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/cligj/__pycache__/features.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/cligj/features.py b/.venv/lib/python3.12/site-packages/cligj/features.py new file mode 100644 index 00000000..a7ecc064 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/cligj/features.py @@ -0,0 +1,214 @@ +"""Feature parsing and normalization""" + +from itertools import chain +import json +import re + +import click + + +def normalize_feature_inputs(ctx, param, value): + """Click callback that normalizes feature input values. + + Returns a generator over features from the input value. + + Parameters + ---------- + ctx: a Click context + param: the name of the argument or option + value: object + The value argument may be one of the following: + + 1. A list of paths to files containing GeoJSON feature + collections or feature sequences. + 2. A list of string-encoded coordinate pairs of the form + "[lng, lat]", or "lng, lat", or "lng lat". + + If no value is provided, features will be read from stdin. + + Yields + ------ + Mapping + A GeoJSON Feature represented by a Python mapping + + """ + for feature_like in value or ('-',): + try: + with click.open_file(feature_like, encoding="utf-8") as src: + for feature in iter_features(iter(src)): + yield feature + except IOError: + coords = list(coords_from_query(feature_like)) + yield { + 'type': 'Feature', + 'properties': {}, + 'geometry': { + 'type': 'Point', + 'coordinates': coords}} + + +def iter_features(geojsonfile, func=None): + """Extract GeoJSON features from a text file object. + + Given a file-like object containing a single GeoJSON feature + collection text or a sequence of GeoJSON features, iter_features() + iterates over lines of the file and yields GeoJSON features. + + Parameters + ---------- + geojsonfile: a file-like object + The geojsonfile implements the iterator protocol and yields + lines of JSON text. + func: function, optional + A function that will be applied to each extracted feature. It + takes a feature object and may return a replacement feature or + None -- in which case iter_features does not yield. + + Yields + ------ + Mapping + A GeoJSON Feature represented by a Python mapping + + """ + func = func or (lambda x: x) + first_line = next(geojsonfile) + + # Does the geojsonfile contain RS-delimited JSON sequences? + if first_line.startswith(u'\x1e'): + text_buffer = first_line.strip(u'\x1e') + for line in geojsonfile: + if line.startswith(u'\x1e'): + if text_buffer: + obj = json.loads(text_buffer) + if 'coordinates' in obj: + obj = to_feature(obj) + newfeat = func(obj) + if newfeat: + yield newfeat + text_buffer = line.strip(u'\x1e') + else: + text_buffer += line + # complete our parsing with a for-else clause. + else: + obj = json.loads(text_buffer) + if 'coordinates' in obj: + obj = to_feature(obj) + newfeat = func(obj) + if newfeat: + yield newfeat + + # If not, it may contains LF-delimited GeoJSON objects or a single + # multi-line pretty-printed GeoJSON object. + else: + # Try to parse LF-delimited sequences of features or feature + # collections produced by, e.g., `jq -c ...`. + try: + obj = json.loads(first_line) + if obj['type'] == 'Feature': + newfeat = func(obj) + if newfeat: + yield newfeat + for line in geojsonfile: + newfeat = func(json.loads(line)) + if newfeat: + yield newfeat + elif obj['type'] == 'FeatureCollection': + for feat in obj['features']: + newfeat = func(feat) + if newfeat: + yield newfeat + elif 'coordinates' in obj: + newfeat = func(to_feature(obj)) + if newfeat: + yield newfeat + for line in geojsonfile: + newfeat = func(to_feature(json.loads(line))) + if newfeat: + yield newfeat + + # Indented or pretty-printed GeoJSON features or feature + # collections will fail out of the try clause above since + # they'll have no complete JSON object on their first line. + # To handle these, we slurp in the entire file and parse its + # text. + except ValueError: + text = "".join(chain([first_line], geojsonfile)) + obj = json.loads(text) + if obj['type'] == 'Feature': + newfeat = func(obj) + if newfeat: + yield newfeat + elif obj['type'] == 'FeatureCollection': + for feat in obj['features']: + newfeat = func(feat) + if newfeat: + yield newfeat + elif 'coordinates' in obj: + newfeat = func(to_feature(obj)) + if newfeat: + yield newfeat + + +def to_feature(obj): + """Converts an object to a GeoJSON Feature + + Returns feature verbatim or wraps geom in a feature with empty + properties. + + Raises + ------ + ValueError + + Returns + ------- + Mapping + A GeoJSON Feature represented by a Python mapping + + """ + if obj['type'] == 'Feature': + return obj + elif 'coordinates' in obj: + return { + 'type': 'Feature', + 'properties': {}, + 'geometry': obj} + else: + raise ValueError("Object is not a feature or geometry") + + +def iter_query(query): + """Accept a filename, stream, or string. + Returns an iterator over lines of the query.""" + try: + itr = click.open_file(query).readlines() + except IOError: + itr = [query] + return itr + + +def coords_from_query(query): + """Transform a query line into a (lng, lat) pair of coordinates.""" + try: + coords = json.loads(query) + except ValueError: + query = query.replace(',', ' ') + vals = query.split() + coords = [float(v) for v in vals] + return tuple(coords[:2]) + + +def normalize_feature_objects(feature_objs): + """Takes an iterable of GeoJSON-like Feature mappings or + an iterable of objects with a geo interface and + normalizes it to the former.""" + for obj in feature_objs: + if ( + hasattr(obj, "__geo_interface__") + and "type" in obj.__geo_interface__.keys() + and obj.__geo_interface__["type"] == "Feature" + ): + yield obj.__geo_interface__ + elif isinstance(obj, dict) and "type" in obj and obj["type"] == "Feature": + yield obj + else: + raise ValueError("Did not recognize object as GeoJSON Feature") diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/LICENSE.txt b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/LICENSE.txt new file mode 100644 index 00000000..75091686 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/LICENSE.txt @@ -0,0 +1,28 @@ + +Copyright (c) 2007, Sean C. Gillies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Sean C. Gillies nor the names of + its contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/METADATA new file mode 100644 index 00000000..bf808d0c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/METADATA @@ -0,0 +1,1550 @@ +Metadata-Version: 2.1 +Name: fiona +Version: 1.10.1 +Summary: Fiona reads and writes spatial data files +Author: Sean Gillies +Maintainer: Fiona contributors +License: BSD 3-Clause +Project-URL: Documentation, https://fiona.readthedocs.io/ +Project-URL: Repository, https://github.com/Toblerity/Fiona +Keywords: gis,vector,feature,data +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Scientific/Engineering :: GIS +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Requires-Dist: attrs >=19.2.0 +Requires-Dist: certifi +Requires-Dist: click ~=8.0 +Requires-Dist: click-plugins >=1.0 +Requires-Dist: cligj >=0.5 +Requires-Dist: importlib-metadata ; python_version < "3.10" +Provides-Extra: all +Requires-Dist: fiona[calc,s3,test] ; extra == 'all' +Provides-Extra: calc +Requires-Dist: pyparsing ; extra == 'calc' +Requires-Dist: shapely ; extra == 'calc' +Provides-Extra: s3 +Requires-Dist: boto3 >=1.3.1 ; extra == 's3' +Provides-Extra: test +Requires-Dist: aiohttp ; extra == 'test' +Requires-Dist: fsspec ; extra == 'test' +Requires-Dist: fiona[s3] ; extra == 'test' +Requires-Dist: pytest >=7 ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: pytz ; extra == 'test' + +===== +Fiona +===== + +.. image:: https://github.com/Toblerity/Fiona/actions/workflows/tests.yml/badge.svg + :target: https://github.com/Toblerity/Fiona/actions/workflows/tests.yml +.. image:: https://github.com/Toblerity/Fiona/actions/workflows/test_gdal_latest.yml/badge.svg + :target: https://github.com/Toblerity/Fiona/actions/workflows/test_gdal_latest.yml +.. image:: https://img.shields.io/pypi/v/fiona + :target: https://pypi.org/project/fiona/ +.. image:: https://api.securityscorecards.dev/projects/github.com/Toblerity/Fiona/badge + :target: https://securityscorecards.dev/viewer/?uri=github.com/Toblerity/Fiona + +Fiona streams simple feature data to and from GIS formats like GeoPackage and +Shapefile. + +Fiona can read and write real-world data using multi-layered GIS formats, +zipped and in-memory virtual file systems, from files on your hard drive or in +cloud storage. This project includes Python modules and a command line +interface (CLI). + +Fiona depends on `GDAL `__ but is different from GDAL's own +`bindings `__. Fiona is designed to +be highly productive and to make it easy to write code which is easy to read. + +Installation +============ + +Fiona has several `extension modules +`__ which link against +libgdal. This complicates installation. Binary distributions (wheels) +containing libgdal and its own dependencies are available from the Python +Package Index and can be installed using pip. + +.. code-block:: console + + pip install fiona + +These wheels are mainly intended to make installation easy for simple +applications, not so much for production. They are not tested for compatibility +with all other binary wheels, conda packages, or QGIS, and omit many of GDAL's +optional format drivers. If you need, for example, GML support you will need to +build and install Fiona from a source distribution. It is possible to install +Fiona from source using pip (version >= 22.3) and the `--no-binary` option. A +specific GDAL installation can be selected by setting the GDAL_CONFIG +environment variable. + +.. code-block:: console + + pip install -U pip + pip install --no-binary fiona fiona + +Many users find Anaconda and conda-forge a good way to install Fiona and get +access to more optional format drivers (like GML). + +Fiona 1.10 requires Python 3.8 or higher and GDAL 3.4 or higher. + +Python Usage +============ + +Features are read from and written to file-like ``Collection`` objects returned +from the ``fiona.open()`` function. Features are data classes modeled on the +GeoJSON format. They don't have any spatial methods of their own, so if you +want to transform them you will need Shapely or something like it. Here is an +example of using Fiona to read some features from one data file, change their +geometry attributes using Shapely, and write them to a new data file. + +.. code-block:: python + + import fiona + from fiona import Feature, Geometry + from shapely.geometry import mapping, shape + + # Open a file for reading. We'll call this the source. + with fiona.open( + "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ) as src: + + # The file we'll write to must be initialized with a coordinate + # system, a format driver name, and a record schema. We can get + # initial values from the open source's profile property and then + # modify them as we need. + profile = src.profile + profile["schema"]["geometry"] = "Point" + profile["driver"] = "GPKG" + + # Open an output file, using the same format driver and coordinate + # reference system as the source. The profile mapping fills in the + # keyword parameters of fiona.open. + with fiona.open("centroids.gpkg", "w", **profile) as dst: + + # Process only the feature records intersecting a box. + for feat in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): + + # Get the feature's centroid. + centroid_shp = shape(feat.geometry).centroid + new_geom = Geometry.from_dict(centroid_shp) + + # Write the feature out. + dst.write( + Feature(geometry=new_geom, properties=f.properties) + ) + + # The destination's contents are flushed to disk and the file is + # closed when its with block ends. This effectively + # executes ``dst.flush(); dst.close()``. + +CLI Usage +========= + +Fiona's command line interface, named "fio", is documented at `docs/cli.rst +`__. The CLI has a +number of different commands. Its ``fio cat`` command streams GeoJSON features +from any dataset. + +.. code-block:: console + + $ fio cat --compact tests/data/coutwildrnp.shp | jq -c '.' + {"geometry":{"coordinates":[[[-111.73527526855469,41.995094299316406],...]]}} + ... + +Documentation +============= + +For more details about this project, please see: + +* Fiona `home page `__ +* `Docs and manual `__ +* `Examples `__ +* Main `user discussion group `__ +* `Developers discussion group `__ + +Changes +======= + +All issue numbers are relative to https://github.com/Toblerity/Fiona/issues. + +1.10.1 (2024-09-16) +------------------- + +Bug fixes: + +- Logging in the CRS class no longer tries to print representations of objects + that may be NULL when searching for authority matches (#1445). + +1.10.0 (2024-09-03) +------------------- + +The package version, credits, and citation file have been updated. There have +been no other changes since 1.10.0rc1. Fiona is the work of 73 contributors, +including 25 new contributors since 1.9.0. + +1.10.0rc1 (2024-08-21) +---------------------- + +This is the first release candidate for 1.10.0. + +Changes: + +- Mutable item access to Feature, Geometry, and Properties instances has been + restored (reported in #1430). This usage should be avoided as instances of + these classes will be immutable in a future version. +- The setup.cfg duplicates project configuration in pyproject.toml and has been + removed. + +1.10b3 (2024-07-29) +------------------- + +Bug fixes: + +- The sketchy, semi-private Python opener interfaces of version 1.10b2 have + been replaced by ABCs that are exported from fiona.abc (#1415). +- The truncate VSI plugin callback has been implemented (#1413). + +1.10b2 (2024-07-10) +------------------- + +Bug fixes: + +- The Pyopener registry and VSI plugin have been rewritten to avoid filename + conflicts and to be compatible with multithreading. Now, a new plugin handler + is registered for each instance of using an opener (#1408). Before GDAL 3.9.0 + plugin handlers cannot not be removed and so it may be observed that the size + of the Pyopener registry grows during the execution of a program. +- A CSLConstList ctypedef has been added and is used where appropriate (#1404). +- Fiona model objects have a informative, printable representation again + (#1380). + +Packaging: + +- PyPI wheels include GDAL 3.9.1 and curl 8.8.0. + +1.10b1 (2024-04-16) +------------------- + +Bug fixes: + +- Fiona can again set fields with values that are instances of classes derived + from date, time, and datetime (#1377). This was broken by changes in 1.10a2. + +1.10a2 (2024-04-05) +------------------- + +Deprecations: + +- The FIELD_TYPES, FIELD_TYPES_MAP, and FIELD_TYPES_MAP_REV attributes of + fiona.schema are no longer used by the project and will be removed in version + 2.0 (#1366). +- The Python style of rio-filter expressions introduced in version 1.0 are + deprecated. Only the parenthesized list type of expression will be supported + by version 2.0. + +New features: + +- All supported Fiona field types are now represented by classes in + fiona.schema. These classes are mapped in FIELD_TYPES_MAP2 and + FIELD_TYPES_MAP2_REV to OGR field type and field subtype pairs (#1366). +- The filter, map, and reduce CLI commands from the public domain version 1.1.0 + of fio-planet have been incorporated into Fiona's core set of commands + (#1362). These commands are only available if pyparsing and shapely (each of + these are declared in the "calc" set of extra requirements) are installed. + +Bug fixes: + +- Fiona's python opener VSI plugin prefix has been changed to "vsifiopener" to + not conflict with Rasterio (#1368). +- Add a 16-bit integer type "int16" based on OGR's OSFTInt16 integer sub-type + (#1358). +- Allow a GeoJSON collection's layer name to be set on opening in write mode + (#1352). +- The legacy crs.py module which was shadowed by the new crs.pyx module has + been deleted (#1344). +- Python 3.8 has been added back to the list of supported versions and + a dependency on Numpy added in 1.10a1 has been removed. +- An implementation of the VSI flush callback has been added to _vsiopener.pyx. +- Openers are now registered only by urlpath. The mode is no longer considered + as OGR drivers may use a mix of modes when creating a new dataset. + +Other changes: + +- Feature builder and field getter/setter instances are reused when reading and + writing features (#1366). + +1.10a1 (2024-03-01) +------------------- + +Python version: + +Fiona 1.10 will require Python version 3.9 or higher. + +Deprecations: + +The fiona.path module will be removed in version 2.0 and a deprecation warning +is issued when the module is imported (#1334). Additionally, members of that +module are no longer exported from the top level module. + +New features: + +Python openers can now support discovery of auxiliary "sidecar" files like +.aux.xml, .msk, and .tfw files for GeoTIFFs (#1331). Additionally, filesystem +objects, such as those from fsspec, can be used as openers. This will become +the recommended usage, supplanting the use of single file openers. + +Bug fixes: + +- Use of pkg_resources in test_rio_info.py has been eliminated. +- gzip, tar, and zip archive URIs containing drive letters were not always + parsed properly on Windows, but are now (#1334). + +1.9.6 (2024-03-07) +------------------ + +- Ensure that geometry types in a schema are translated to a linear type, as + geometry instances are (#1313). +- Fix broken stable API documentation on Read The Docs (#). +- Remove install requirement of setuptools, a regression introduced in 1.9.5. + +1.9.5 (2023-10-11) +------------------ + +Bug fixes: + +- Expand keys in schema mismatch exception, resolving #1278. +- Preserve the null properties and geometry of a Feature when serializing + (#1276). + +Packaging: + +- The distribution name is now officially "fiona", not "Fiona". The import + name remains "fiona". +- Builds now require Cython >= 3.0.2 (#1276). +- PyPI wheels include GDAL 3.6.4, PROJ 9.0.1, and GEOS 3.11.2. +- PyPI wheels include curl 8.4.0, addressing CVE-2023-38545 and CVE-38546. +- PyPI wheels are now available for Python 3.12. + +1.9.4.post1 (2023-05-23) +------------------------ + +Extraneous files were unintentionally packaged in the 1.9.4 wheels. This post1 +release excludes them so that wheel contents are as in version 1.9.3. + +1.9.4 (2023-05-16) +------------------ + +- The performance of Feature.from_dict() has been improved (#1267). +- Several sources of meaningless log messages from fiona._geometry about NULL + geometries are avoided (#1264). +- The Parquet driver has been added to the list of supported drivers and will + be available if your system's GDAL library links libarrow. Note that fiona + wheels on PyPI do not include libarrow as it is rather large. +- Ensure that fiona._vendor modules are found and included. +- Bytes type feature properties are now hex encoded when serializing to GeoJSON + (#1263). +- Docstrings for listdir and listlayers have been clarified and harmonized. +- Nose style test cases have been converted to unittest.TestCase (#1256). +- The munch package used by fio-filter and fio-calc is now vendored and patched + to remove usage of the deprecated pkg_resources module (#1255). + +1.9.3 (2023-04-10) +------------------ + +- Rasterio CRS objects are compatible with the Collection constructor and are + now accepted (#1248). +- Enable append mode for fio-load (#1237). +- Reading a GeoJSON with an empty array property can result in a segmentation + fault since version 1.9.0. This has been fixed (#1228). + +1.9.2 (2023-03-20) +------------------ + +- Get command entry points using importlib.metadata (#1220). +- Instead of warning, transform_geom() raises an exception when some points + can't be reprojected unless the caller opts in to partial reprojection. This + restores the behavior of version 1.8.22. +- Add support for open options to all CLI commands that call fiona.open + (#1215). +- Fix a memory leak that can occur when iterating over a dataset using strides + (#1205). +- ZipMemoryFile now supports zipped GDB data (#1203). + +1.9.1 (2023-02-09) +------------------ + +- Log a warning message when identically named fields are encountered (#1201). +- Avoid dependence on listdir order in tests (#1193). +- Prevent empty geometries arrays from appearing in __geo_interface__ (#1197). +- setuptools added to pyproject.toml. Its pkg_resources module is used by the + CLI (#1191). + +1.9.0 (2023-01-30) +------------------ + +- CITATION.txt has been replaced by a new CITATION.cff file and the credits + have been updated. +- In setup.py the distutils (deprecated) logger is no longer used. + +1.9b2 (2023-01-22) +------------------ + +- Add Feature.__geo_interface__ property (#1181). +- Invalid creation options are filtered and ignored (#1180). +- The readme doc has been shortened and freshened up, with a modern example for + version 1.9.0 (#1174). +- The Geometry class now provides and looks for __geo_interface__ (#1174). +- The top level fiona module now exports Feature, Geometry, and Properties + (#1174). +- Functions that take Feature or Geometry objects will continue to take dicts + or objects that provide __geo_interface__ (#1177). This reverses the + deprecation introduced in 1.9a2. +- Python ignores SIGPIPE by default. By never catching BrokenPipeError via + `except Exception` when, for example, piping the output of rio-shapes to + the Unix head program, we avoid getting an unhandled BrokenPipeError message + when the interpreter shuts down (#2689). + +1.9b1 (2022-12-13) +------------------ + +New features: + +* Add listdir and listlayers method to io.MemoryFile (resolving #754). +* Add support for TIN and triangle geometries (#1163). +* Add an allow_unsupported_drivers option to fiona.open() (#1126). +* Added support for the OGR StringList field type (#1141). + +Changes and bug fixes: + +* Missing and unused imports have been added or removed. +* Make sure that errors aren't lost when a collection can't be saved properly + (#1169). +* Ensure that ZipMemoryFile have the proper GDAL name after creation so that we + can use listdir() (#1092). +* The fiona._loading module, which supports DLL loading on Windows, + has been moved into __init__.py and is no longer used anywhere else (#1168). +* Move project metadata to pyproject.toml (#1165). +* Update drvsupport.py to reflect new format capabilities in GDAL 3.6.0 + (#1122). +* Remove debug logging from env and _env modules. + +1.9a3 (2022-10-17) +------------------ + +Packaging: + +* Builds now require Cython >= 0.29.29 because of +* https://github.com/cython/cython/issues/4609 (see #1143). +* PyPI wheels now include GDAL 3.5.2, PROJ 9.0.1, and GEOS 3.11.0. +* PyPI wheels are now available for Python 3.11. + +1.9a2 (2022-06-10) +------------------ + +Deprecations: + +- Fiona's API methods will accept feature and geometry dicts in 1.9.0, but this + usage is deprecated. Instances of Feature and Geometry will be required in + 2.0. +- The precision keyword argument of fiona.transform.transform_geom is + deprecated and will be removed in version 2.0. +- Deprecated usage has been eliminated in the project. Fiona's tests pass when + run with a -Werror::DeprecationWarning filter. + +Changes: + +- Fiona's FionaDeprecationWarning now sub-classes DeprecationWarning. +- Some test modules have been re-formatted using black. + +New features: + +- Fiona Collections now carry a context exit stack into which we can push fiona + Envs and MemoryFiles (#1059). +- Fiona has a new CRS class, like rasterio's, which is compatible with the CRS + dicts of previous versions (#714). + +1.9a1 (2022-05-19) +------------------ + +Deprecations: + +- The fiona.drivers() function has been deprecated and will be removed in + version 2.0. It should be replaced by fiona.Env(). +- The new fiona.meta module will be renamed to fiona.drivers in version 2.0. + +Packaging: + +- Source distributions contain no C source files and require Cython to create + them from .pyx files (#1096). + +Changes: + +- Shims for various versions of GDAL have been removed and are replaced by + Cython compilation conditions (#1093). +- Use of CURL_CA_BUNDLE environment variable is replaced by a more specific + GDAL/PROJ_CURL_CA_BUNDLE (#1095). +- Fiona's feature accessors now return instances of fiona.model.Feature instead + of Python dicts (#787). The Feature class is compatible with code that + expects GeoJSON-like dicts but also provides id, geometry, and properties + attributes. The last two of these are instances of fiona.model.Geometry and + fiona.model.Properties. +- GDAL 3.1.0 is the minimum GDAL version. +- Drop Python 2, and establish Python 3.7 as the minimum version (#1079). +- Remove six and reduce footprint of fiona.compat (#985). + +New features: + +- The appropriate format driver can be detected from filename in write mode (#948). +- Driver metadata including dataset open and dataset and layer creations + options are now exposed through methods of the fiona.meta module (#950). +- CRS WKT format support (#979). +- Add 'where' SQL clause to set attribute filter (#961, #1097). + +Bug fixes: + +- Env and Session classes have been updated for parity with rasterio and to + resolve a credential refresh bug (#1055). + +1.8.22 (2022-10-14) +------------------- + +Builds now require Cython >= 0.29.29 because of +https://github.com/cython/cython/issues/4609 (#1143). + +1.8.21 (2022-02-07) +------------------- + +Changes: + +- Driver mode support tests have been made more general and less susceptible to + driver quirks involving feature fields and coordinate values (#1060). +- OSError is raised on attempts to open a dataset in a Python file object in + "a" mode (see #1027). +- Upgrade attrs, cython, etc to open up Python 3.10 support (#1049). + +Bug fixes: + +- Allow FieldSkipLogFilter to handle exception messages as well as strings + (reported in #1035). +- Clean up VSI files left by MemoryFileBase, resolving #1041. +- Hard-coded "utf-8" collection encoding added in #423 has been removed + (#1057). + +1.8.20 (2021-05-31) +------------------- + +Packaging: + +- Wheels include GDAL 3.3.0 and GEOS 3.9.1. + +Bug fixes: + +- Allow use with click 8 and higher (#1015). + +1.8.19 (2021-04-07) +------------------- + +Packaging: + +- Wheels include GDAL 3.2.1 and PROJ 7.2.1. + +Bug fixes: + +- In fiona/env.py the GDAL data path is now configured using set_gdal_config + instead by setting the GDAL_DATA environment variable (#1007). +- Spurious iterator reset warnings have been eliminatged (#987). + +1.8.18 (2020-11-17) +------------------- + +- The precision option of transform has been fixed for the case of + GeometryCollections (#971, #972). +- Added missing --co (creation) option to fio-load (#390). +- If the certifi package can be imported, its certificate store location will + be passed to GDAL during import of fiona._env unless CURL_CA_BUNDLE is + already set. +- Warn when feature fields named "" are found (#955). + +1.8.17 (2020-09-09) +------------------- + +- To fix issue #952 the fio-cat command no longer cuts feature geometries at + the anti-meridian by default. A --cut-at-antimeridian option has been added + to allow cutting of geometries in a geographic destination coordinate + reference system. + +1.8.16 (2020-09-04) +------------------- + +- More OGR errors and warnings arising in calls to GDAL C API functions are + surfaced (#946). +- A circular import introduced in some cases in 1.8.15 has been fixed (#945). + +1.8.15 (2020-09-03) +------------------- + +- Change shim functions to not return tuples (#942) as a solution for the + packaging problem reported in #941. +- Raise a Python exception when VSIFOpenL fails (#937). + +1.8.14 (2020-08-31) +------------------- + +- When creating a new Collection in a MemoryFile with a default (random) name + Fiona will attempt to use a format driver-supported file extension (#934). + When initializing a MemoryFile with bytes of data formatted for a vector + driver that requires a certain file name or extension, the user should + continue to pass an appropriate filename and/or extension. +- Read support for FlatGeobuf has been enabled in the drvsupport module. +- The MemoryFile implementation has been improved so that it can support multi-part + S3 downloads (#906). This is largely a port of code from rasterio. +- Axis ordering for results of fiona.transform was wrong when CRS were passed + in the "EPSG:dddd" form (#919). This has been fixed by (#926). +- Allow implicit access to the only dataset in a ZipMemoryFile. The path + argument of ZipMemoryFile.open() is now optional (#928). +- Improve support for datetime types: support milliseconds (#744), timezones (#914) + and improve warnings if type is not supported by driver (#572). +- Fix "Failed to commit transaction" TransactionError for FileGDB driver. +- Load GDAL DLL dependencies on Python 3.8+ / Windows with add_dll_directory() (#851). +- Do not require optional properties (#848). +- Ensure that slice does not overflow available data (#884). +- Resolve issue when "ERROR 4: Unable to open EPSG support file gcs.csv." is raised on + importing fiona (#897). +- Resolve issue resulting in possible mixed up fields names (affecting only DXF, GPX, + GPSTrackMacker and DGN driver) (#916). +- Ensure crs_wkt is passed when writing to MemoryFile (#907). + + +1.8.13.post1 (2020-02-21) +------------------------- + +- This release is being made to improve binary wheel compatibility with shapely + 1.7.0. There have been no changes to the fiona package code since 1.8.13. + +1.8.13 (2019-12-05) +------------------- + +- The Python version specs for argparse and ordereddict in 1.8.12 were wrong + and have been corrected (#843). + +1.8.12 (2019-12-04) +------------------- + +- Specify Python versions for argparse, enum34, and ordereddict requirements + (#842). + +1.8.11 (2019-11-07) +------------------- + +- Fix an access violation on Windows (#826). + +1.8.10 (2019-11-07) +------------------- + +Deprecations: + +- Use of vfs keyword argument with open or listlayers has been previously noted + as deprecated, but now triggers a deprecation warning. + +Bug fixes: + +- fiona.open() can now create new datasets using CRS URNs (#823). +- listlayers() now accepts file and Path objects, like open() (#825). +- Use new set_proj_search_path() function to set the PROJ data search path. For + GDAL versions before 3.0 this sets the PROJ_LIB environment variable. For + GDAL version 3.0 this calls OSRSetPROJSearchPaths(), which overrides + PROJ_LIB. +- Remove old and unused _drivers extension module. +- Check for header.dxf file instead of pcs.csv when looking for installed GDAL + data. The latter is gone with GDAL 3.0 but the former remains (#818). + +1.8.9.post2 (2019-10-22) +------------------------ + +- The 1.8.9.post1 release introduced a bug affecting builds of the package from + a source distribution using GDAL 2.x. This bug has been fixed in commit + 960568d. + +1.8.9.post1 (2019-10-22) +------------------------ + +- A change has been made to the package setup script so that the shim module + for GDAL 3 is used when building the package from a source distribution. + There are no other changes to the package. + +1.8.9 (2019-10-21) +------------------ + +- A shim module and support for GDAL 3.0 has been added. The package can now be + built and used with GDAL 3.0 and PROJ 6.1 or 6.2. Note that the 1.8.9 wheels + we will upload to PyPI will contain GDAL 2.4.2 and PROJ 4.9.3 as in the 1.8.8 + wheels. + +1.8.8 (2019-09-25) +------------------ + +- The schema of geopackage files with a geometry type code of 3000 could not be + reported using Fiona 1.8.7. This bug is fixed. + +1.8.7 (2019-09-24) +------------------ + +Bug fixes: + +- Regression in handling of polygons with M values noted under version 1.8.5 + below was in fact not fixed then (see new report #789), but is fixed in + version 1.8.7. +- Windows filenames containing "!" are now parsed correctly, fixing issue #742. + +Upcoming changes: + +- In version 1.9.0, the objects yielded when a Collection is iterated will be + mutable mappings but will no longer be instances of Python's dict. Version + 1.9 is intended to be backwards compatible with 1.8 except where user code + tests `isinstance(feature, dict)`. In version 2.0 the new Feature, Geometry, + and Properties classes will become immutable mappings. See + https://github.com/Toblerity/fiona-rfc/blob/main/rfc/0001-fiona-2-0-changes.md + for more discussion of the upcoming changes for version 2.0. + +1.8.6 (2019-03-18) +------------------ + +- The advertisement for JSON driver enablement in 1.8.5 was false (#176), but + in this release they are ready for use. + +1.8.5 (2019-03-15) +------------------ + +- GDAL seems to work best if GDAL_DATA is set as early as possible. Ideally it + is set when building the library or in the environment before importing + Fiona, but for wheels we patch GDAL_DATA into os.environ when fiona.env + is imported. This resolves #731. +- A combination of bugs which allowed .cpg files to be overlooked has been + fixed (#726). +- On entering a collection context (Collection.__enter__) a new anonymous GDAL + environment is created if needed and entered. This makes `with + fiona.open(...) as collection:` roughly equivalent to `with fiona.open(...) + as collection, Env():`. This helps prevent bugs when Collections are created + and then used later or in different scopes. +- Missing GDAL support for TopoJSON, GeoJSONSeq, and ESRIJSON has been enabled + (#721). +- A regression in handling of polygons with M values (#724) has been fixed. +- Per-feature debug logging calls in OGRFeatureBuilder methods have been + eliminated to improve feature writing performance (#718). +- Native support for datasets in Google Cloud Storage identified by "gs" + resource names has been added (#709). +- Support has been added for triangle, polyhedral surface, and TIN geometry + types (#679). +- Notes about using the MemoryFile and ZipMemoryFile classes has been added to + the manual (#674). + +1.8.4 (2018-12-10) +------------------ + +- 3D geometries can now be transformed with a specified precision (#523). +- A bug producing a spurious DriverSupportError for Shapefiles with a "time" + field (#692) has been fixed. +- Patching of the GDAL_DATA environment variable was accidentally left in place + in 1.8.3 and now has been removed. + +1.8.3 (2018-11-30) +------------------ + +- The RASTERIO_ENV config environment marker this project picked up from + Rasterio has been renamed to FIONA_ENV (#665). +- Options --gdal-data and --proj-data have been added to the fio-env command so + that users of Rasterio wheels can get paths to set GDAL_DATA and PROJ_LIB + environment variables. +- The unsuccessful attempt to make GDAL and PROJ support file discovery and + configuration automatic within collection's crs and crs_wkt properties has + been reverted. Users must execute such code inside a `with Env()` block or + set the GDAL_DATA and PROJ_LIB environment variables needed by GDAL. + +1.8.2 (2018-11-19) +------------------ + +Bug fixes: + +- Raise FionaValueError when an iterator's __next__ is called and the session + is found to be missing or inactive instead of passing a null pointer to + OGR_L_GetNextFeature (#687). + +1.8.1 (2018-11-15) +------------------ + +Bug fixes: + +- Add checks around OSRGetAuthorityName and OSRGetAuthorityCode calls that will + log problems with looking up these items. +- Opened data sources are now released before we raise exceptions in + WritingSession.start (#676). This fixes an issue with locked files on + Windows. +- We now ensure that an Env instance exists when getting the crs or crs_wkt + properties of a Collection (#673, #690). Otherwise, required GDAL and PROJ + data files included in Fiona wheels can not be found. +- GDAL and PROJ data search has been refactored to improve testability (#678). +- In the project's Cython code, void* pointers have been replaced with proper + GDAL types (#672). +- Pervasive warning level log messages about ENCODING creation options (#668) + have been eliminated. + +1.8.0 (2018-10-31) +------------------ + +This is the final 1.8.0 release. Thanks, everyone! + +Bug fixes: + +- We cpdef Session.stop so that it has a C version that can be called safely + from __dealloc__, fixing a PyPy issue (#659, #553). + +1.8rc1 (2018-10-26) +------------------- + +There are no changes in 1.8rc1 other than more test standardization and the +introduction of a temporary test_collection_legacy.py module to support the +build of fully tested Python 2.7 macosx wheels on Travis-CI. + +1.8b2 (2018-10-23) +------------------ + +Bug fixes: + +- The ensure_env_with_credentials decorator will no longer clobber credentials + of the outer environment. This fixes a bug reported to the Rasterio project + and which also existed in Fiona. +- An unused import of the packaging module and the dependency have been + removed (#653). +- The Env class logged to the 'rasterio' hierarchy instead of 'fiona'. This + mistake has been corrected (#646). +- The Mapping abstract base class is imported from collections.abc when + possible (#647). + +Refactoring: + +- Standardization of the tests on pytest functions and fixtures continues and + is nearing completion (#648, #649, #650, #651, #652). + +1.8b1 (2018-10-15) +------------------ + +Deprecations: + +- Collection slicing has been deprecated and will be prohibited in a future + version. + +Bug fixes: + +- Rasterio CRS objects passed to transform module methods will be converted + to dicts as needed (#590). +- Implicitly convert curve geometries to their linear approximations rather + than failing (#617). +- Migrated unittest test cases in test_collection.py and test_layer.py to the + use of the standard data_dir and path_coutwildrnp_shp fixtures (#616). +- Root logger configuration has been removed from all test scripts (#615). +- An AWS session is created for the CLI context Env only if explicitly + requested, matching the behavior of Rasterio's CLI (#635). +- Dependency on attrs is made explicit. +- Other dependencies are pinned to known good versions in requirements files. +- Unused arguments have been removed from the Env constructor (#637). + +Refactoring: + +- A with_context_env decorator has been added and used to set up the GDAL + environment for CLI commands. The command functions themselves are now + simplified. + +1.8a3 (2018-10-01) +------------------ + +Deprecations: + +- The ``fiona.drivers()`` context manager is officially deprecated. All + users should switch to ``fiona.Env()``, which registers format drivers and + manages GDAL configuration in a reversible manner. + +Bug fixes: + +- The Collection class now filters log messages about skipped fields to + a maximum of one warning message per field (#627). +- The boto3 module is only imported when needed (#507, #629). +- Compatibility with Click 7.0 is achieved (#633). +- Use of %r instead of %s in a debug() call prevents UnicodeDecodeErrors + (#620). + +1.8a2 (2018-07-24) +------------------ + +New features: + +- 64-bit integers are the now the default for int type fields (#562, #564). +- 'http', 's3', 'zip+http', and 'zip+s3' URI schemes for datasets are now + supported (#425, #426). +- We've added a ``MemoryFile`` class which supports formatted in-memory + feature collections (#501). +- Added support for GDAL 2.x boolean field sub-type (#531). +- A new ``fio rm`` command makes it possible to cleanly remove multi-file + datasets (#538). +- The geometry type in a feature collection is more flexible. We can now + specify not only a single geometry type, but a sequence of permissible types, + or "Any" to permit any geometry type (#539). +- Support for GDAL 2.2+ null fields has been added (#554). +- The new ``gdal_open_vector()`` function of our internal API provides much + improved error handling (#557). + +Bug fixes: + +- The bug involving OrderedDict import on Python 2.7 has been fixed (#533). +- An ``AttributeError`` raised when the ``--bbox`` option of fio-cat is used + with more than one input file has been fixed (#543, #544). +- Obsolete and derelict fiona.tool module has been removed. +- Revert the change in 0a2bc7c that discards Z in geometry types when a + collection's schema is reported (#541). +- Require six version 1.7 or higher (#550). +- A regression related to "zip+s3" URIs has been fixed. +- Debian's GDAL data locations are now searched by default (#583). + +1.8a1 (2017-11-06) +------------------ + +New features: + +- Each call of ``writerecords()`` involves one or more transactions of up to + 20,000 features each. This improves performance when writing GeoPackage files + as the previous transaction size was only 200 features (#476, #491). + +Packaging: + +- Fiona's Cython source files have been refactored so that there are no longer + separate extension modules for GDAL 1.x and GDAL 2.x. Instead there is a base + extension module based on GDAL 2.x and shim modules for installations that + use GDAL 1.x. + +1.7.11.post1 (2018-01-08) +------------------------- + +- This post-release adds missing expat (and thereby GPX format) support to + the included GDAL library (still version 2.2.2). + +1.7.11 (2017-12-14) +------------------- + +- The ``encoding`` keyword argument for ``fiona.open()``, which is intended + to allow a caller to override a data source's own and possibly erroneous + encoding, has not been working (#510, #512). The problem is that we weren't + always setting GDAL open or config options before opening the data sources. + This bug is resolved by a number of commits in the maint-1.7 branch and + the fix is demonstrated in tests/test_encoding.py. +- An ``--encoding`` option has been added to fio-load to enable creation of + encoded shapefiles with an accompanying .cpg file (#499, #517). + +1.7.10.post1 (2017-10-30) +------------------------- + +- A post-release has been made to fix a problem with macosx wheels uploaded + to PyPI. + +1.7.10 (2017-10-26) +------------------- + +Bug fixes: + +- An extraneous printed line from the ``rio cat --layers`` validator has been + removed (#478). + +Packaging: + +- Official OS X and Manylinux1 wheels (on PyPI) for this release will be + compatible with Shapely 1.6.2 and Rasterio 1.0a10 wheels. + +1.7.9.post1 (2017-08-21) +------------------------ + +This release introduces no changes in the Fiona package. It upgrades GDAL +from 2.2.0 to 2.2.1 in wheels that we publish to the Python Package Index. + +1.7.9 (2017-08-17) +------------------ + +Bug fixes: + +- Acquire the GIL for GDAL error callback functions to prevent crashes when + GDAL errors occur when the GIL has been released by user code. +- Sync and flush layers when closing even when the number of features is not + precisely known (#467). + +1.7.8 (2017-06-20) +------------------ + +Bug fixes: + +- Provide all arguments needed by CPLError based exceptions (#456). + +1.7.7 (2017-06-05) +------------------ + +Bug fixes: + +- Switch logger `warn()` (deprecated) calls to `warning()`. +- Replace all relative imports and cimports in Cython modules with absolute + imports (#450). +- Avoid setting `PROJ_LIB` to a non-existent directory (#439). + +1.7.6 (2017-04-26) +------------------ + +Bug fixes: + +- Fall back to `share/proj` for PROJ_LIB (#440). +- Replace every call to `OSRDestroySpatialReference()` with `OSRRelease()`, + fixing the GPKG driver crasher reported in #441 (#443). +- Add a `DriverIOError` derived from `IOError` to use for driver-specific + errors such as the GeoJSON driver's refusal to overwrite existing files. + Also we now ensure that when this error is raised by `fiona.open()` any + created read or write session is deleted, this eliminates spurious + exceptions on teardown of broken `Collection` objects (#437, #444). + +1.7.5 (2017-03-20) +------------------ + +Bug fixes: + +- Opening a data file in read (the default) mode with `fiona.open()` using the + the `driver` or `drivers` keyword arguments (to specify certain format + drivers) would sometimes cause a crash on Windows due to improperly + terminated lists of strings (#428). The fix: Fiona's buggy `string_list()` + has been replaced by GDAL's `CSLAddString()`. + +1.7.4 (2017-02-20) +------------------ + +Bug fixes: + +- OGR's EsriJSON detection fails when certain keys aren't found in the first + 6000 bytes of data passed to `BytesCollection` (#422). A .json file extension + is now explicitly given to the in-memory file behind `BytesCollection` when + the `driver='GeoJSON'` keyword argument is given (#423). + +1.7.3 (2017-02-14) +------------------ + +Roses are red. +Tan is a pug. +Software regression's +the most embarrassing bug. + +Bug fixes: + +- Use __stdcall for GDAL error handling callback on Windows as in Rasterio. +- Turn on latent support for zip:// URLs in rio-cat and rio-info (#421). +- The 1.7.2 release broke support for zip files with absolute paths (#418). + This regression has been fixed with tests to confirm. + +1.7.2 (2017-01-27) +------------------ + +Future Deprecation: + +- `Collection.__next__()` is buggy in that it can lead to duplication of + features when used in combination with `Collection.filter()` or + `Collection.__iter__()`. It will be removed in Fiona 2.0. Please check for + usage of this deprecated feature by running your tests or programs with + `PYTHONWARNINGS="always:::fiona"` or `-W"always:::fiona"` and switch from + `next(collection)` to `next(iter(collection))` (#301). + +Bug fix: + +- Zipped streams of bytes can be accessed by `BytesCollection` (#318). + +1.7.1.post1 (2016-12-23) +------------------------ +- New binary wheels using version 1.2.0 of sgillies/frs-wheel-builds. See + https://github.com/sgillies/frs-wheel-builds/blob/master/CHANGES.txt. + +1.7.1 (2016-11-16) +------------------ + +Bug Fixes: + +- Prevent Fiona from stumbling over 'Z', 'M', and 'ZM' geometry types + introduced in GDAL 2.1 (#384). Fiona 1.7.1 doesn't add explicit support for + these types, they are coerced to geometry types 1-7 ('Point', 'LineString', + etc.) +- Raise an `UnsupportedGeometryTypeError` when a bogus or unsupported + geometry type is encountered in a new collection's schema or elsewhere + (#340). +- Enable `--precision 0` for fio-cat (#370). +- Prevent datetime exceptions from unnecessarily stopping collection iteration + by yielding `None` (#385) +- Replace log.warn calls with log.warning calls (#379). +- Print an error message if neither gdal-config or `--gdalversion` indicate + a GDAL C API version when running `setup.py` (#364). +- Let dict-like subclasses through CRS type checks (#367). + +1.7.0post2 (2016-06-15) +----------------------- + +Packaging: define extension modules for 'clean' and 'config' targets (#363). + +1.7.0post1 (2016-06-15) +----------------------- + +Packaging: No files are copied for the 'clean' setup target (#361, #362). + +1.7.0 (2016-06-14) +------------------ + +The C extension modules in this library can now be built and used with either +a 1.x or 2.x release of the GDAL library. Big thanks to René Buffat for +leading this effort. + +Refactoring: + +- The `ogrext1.pyx` and `ogrext2.pyx` files now use separate + C APIs defined in `ogrext1.pxd` and `ogrex2.pxd`. The other extension + modules have been refactored so that they do not depend on either of these + modules and use subsets of the GDAL/OGR API compatible with both GDAL 1.x and + 2.x (#359). + +Packaging: + +- Source distributions now contain two different sources for the + `ogrext` extension module. The `ogrext1.c` file will be used with GDAL 1.x + and the `ogrext2.c` file will be used with GDAL 2.x. + +1.7b2 (2016-06-13) +------------------ + +- New feature: enhancement of the `--layer` option for fio-cat and fio-dump + to allow separate layers of one or more multi-layer input files to be + selected (#349). + +1.7b1 (2016-06-10) +------------------ + +- New feature: support for GDAL version 2+ (#259). +- New feature: a new fio-calc CLI command (#273). +- New feature: `--layer` options for fio-info (#316) and fio-load (#299). +- New feature: a `--no-parse` option for fio-collect that lets a careful user + avoid extra JSON serialization and deserialization (#306). +- Bug fix: `+wktext` is now preserved when serializing CRS from WKT to PROJ.4 + dicts (#352). +- Bug fix: a small memory leak when opening a collection has been fixed (#337). +- Bug fix: internal unicode errors now result in a log message and a + `UnicodeError` exception, not a `TypeError` (#356). + +1.6.4 (2016-05-06) +------------------ +- Raise ImportError if the active GDAL library version is >= 2.0 instead of + failing unpredictably (#338, #341). Support for GDAL>=2.0 is coming in + Fiona 1.7. + +1.6.3.post1 (2016-03-27) +------------------------ +- No changes to the library in this post-release version, but there is a + significant change to the distributions on PyPI: to help make Fiona more + compatible with Shapely on OS X, the GDAL shared library included in the + macosx (only) binary wheels now statically links the GEOS library. See + https://github.com/sgillies/frs-wheel-builds/issues/5. + +1.6.3 (2015-12-22) +------------------ +- Daytime has been decreasing in the Northern Hemisphere, but is now + increasing again as it should. +- Non-UTF strings were being passed into OGR functions in some situations + and on Windows this would sometimes crash a Python process (#303). Fiona + now raises errors derived from UnicodeError when field names or field + values can't be encoded. + +1.6.2 (2015-09-22) +------------------ +- Providing only PROJ4 representations in the dataset meta property resulted in + loss of CRS information when using the `fiona.open(..., **src.meta) as dst` + pattern (#265). This bug has been addressed by adding a crs_wkt item to the` + meta property and extending the `fiona.open()` and the collection constructor + to look for and prioritize this keyword argument. + +1.6.1 (2015-08-12) +------------------ +- Bug fix: Fiona now deserializes JSON-encoded string properties provided by + the OGR GeoJSON driver (#244, #245, #246). +- Bug fix: proj4 data was not copied properly into binary distributions due to + a typo (#254). + +Special thanks to WFMU DJ Liz Berg for the awesome playlist that's fueling my +release sprint. Check it out at https://wfmu.org/playlists/shows/62083. You +can't unhear Love Coffin. + +1.6.0 (2015-07-21) +------------------ +- Upgrade Cython requirement to 0.22 (#214). +- New BytesCollection class (#215). +- Add GDAL's OpenFileGDB driver to registered drivers (#221). +- Implement CLI commands as plugins (#228). +- Raise click.abort instead of calling sys.exit, preventing surprising exits + (#236). + +1.5.1 (2015-03-19) +------------------ +- Restore test data to sdists by fixing MANIFEST.in (#216). + +1.5.0 (2015-02-02) +------------------ +- Finalize GeoJSON feature sequence options (#174). +- Fix for reading of datasets that don't support feature counting (#190). +- New test dataset (#188). +- Fix for encoding error (#191). +- Remove confusing warning (#195). +- Add data files for binary wheels (#196). +- Add control over drivers enabled when reading datasets (#203). +- Use cligj for CLI options involving GeoJSON (#204). +- Fix fio-info --bounds help (#206). + +1.4.8 (2014-11-02) +------------------ +- Add missing crs_wkt property as in Rasterio (#182). + +1.4.7 (2014-10-28) +------------------ +- Fix setting of CRS from EPSG codes (#149). + +1.4.6 (2014-10-21) +------------------ +- Handle 3D coordinates in bounds() #178. + +1.4.5 (2014-10-18) +------------------ +- Add --bbox option to fio-cat (#163). +- Skip geopackage tests if run from an sdist (#167). +- Add fio-bounds and fio-distrib. +- Restore fio-dump to working order. + +1.4.4 (2014-10-13) +------------------ +- Fix accidental requirement on GDAL 1.11 introduced in 1.4.3 (#164). + +1.4.3 (2014-10-10) +------------------ +- Add support for geopackage format (#160). +- Add -f and --format aliases for --driver in CLI (#162). +- Add --version option and env command to CLI. + +1.4.2 (2014-10-03) +------------------ +- --dst-crs and --src-crs options for fio cat and collect (#159). + +1.4.1 (2014-09-30) +------------------ +- Fix encoding bug in collection's __getitem__ (#153). + +1.4.0 (2014-09-22) +------------------ +- Add fio cat and fio collect commands (#150). +- Return of Python 2.6 compatibility (#148). +- Improved CRS support (#149). + +1.3.0 (2014-09-17) +------------------ +- Add single metadata item accessors to fio inf (#142). +- Move fio to setuptools entry point (#142). +- Add fio dump and load commands (#143). +- Remove fio translate command. + +1.2.0 (2014-09-02) +------------------ +- Always show property width and precision in schema (#123). +- Write datetime properties of features (#125). +- Reset spatial filtering in filter() (#129). +- Accept datetime.date objects as feature properties (#130). +- Add slicing to collection iterators (#132). +- Add geometry object masks to collection iterators (#136). +- Change source layout to match Shapely and Rasterio (#138). + +1.1.6 (2014-07-23) +------------------ +- Implement Collection __getitem__() (#112). +- Leave GDAL finalization to the DLL's destructor (#113). +- Add Collection keys(), values(), items(), __contains__() (#114). +- CRS bug fix (#116). +- Add fio CLI program. + +1.1.5 (2014-05-21) +------------------ +- Addition of cpl_errs context manager (#108). +- Check for NULLs with '==' test instead of 'is' (#109). +- Open auxiliary files with encoding='utf-8' in setup for Python 3 (#110). + +1.1.4 (2014-04-03) +------------------ +- Convert 'long' in schemas to 'int' (#101). +- Carefully map Python schema to the possibly munged internal schema (#105). +- Allow writing of features with geometry: None (#71). + +1.1.3 (2014-03-23) +------------------ +- Always register all GDAL and OGR drivers when entering the DriverManager + context (#80, #92). +- Skip unsupported field types with a warning (#91). +- Allow OGR config options to be passed to fiona.drivers() (#90, #93). +- Add a bounds() function (#100). +- Turn on GPX driver. + +1.1.2 (2014-02-14) +------------------ +- Remove collection slice left in dumpgj (#88). + +1.1.1 (2014-02-02) +------------------ +- Add an interactive file inspector like the one in rasterio. +- CRS to_string bug fix (#83). + +1.1 (2014-01-22) +---------------- +- Use a context manager to manage drivers (#78), a backwards compatible but + big change. Fiona is now compatible with rasterio and plays better with the + osgeo package. + +1.0.3 (2014-01-21) +------------------ +- Fix serialization of +init projections (#69). + +1.0.2 (2013-09-09) +------------------ +- Smarter, better test setup (#65, #66, #67). +- Add type='Feature' to records read from a Collection (#68). +- Skip geometry validation when using GeoJSON driver (#61). +- Dumpgj file description reports record properties as a list (as in + dict.items()) instead of a dict. + +1.0.1 (2013-08-16) +------------------ +- Allow ordering of written fields and preservation of field order when + reading (#57). + +1.0 (2013-07-30) +----------------- +- Add prop_type() function. +- Allow UTF-8 encoded paths for Python 2 (#51). For Python 3, paths must + always be str, never bytes. +- Remove encoding from collection.meta, it's a file creation option only. +- Support for linking GDAL frameworks (#54). + +0.16.1 (2013-07-02) +------------------- +- Add listlayers, open, prop_width to __init__py:__all__. +- Reset reading of OGR layer whenever we ask for a collection iterator (#49). + +0.16 (2013-06-24) +----------------- +- Add support for writing layers to multi-layer files. +- Add tests to reach 100% Python code coverage. + +0.15 (2013-06-06) +----------------- +- Get and set numeric field widths (#42). +- Add support for multi-layer data sources (#17). +- Add support for zip and tar virtual filesystems (#45). +- Add listlayers() function. +- Add GeoJSON to list of supported formats (#47). +- Allow selection of layers by index or name. + +0.14 (2013-05-04) +----------------- +- Add option to add JSON-LD in the dumpgj program. +- Compare values to six.string_types in Collection constructor. +- Add encoding to Collection.meta. +- Document dumpgj in README. + +0.13 (2013-04-30) +----------------- +- Python 2/3 compatibility in a single package. Pythons 2.6, 2.7, 3.3 now supported. + +0.12.1 (2013-04-16) +------------------- +- Fix messed up linking of README in sdist (#39). + +0.12 (2013-04-15) +----------------- +- Fix broken installation of extension modules (#35). +- Log CPL errors at their matching Python log levels. +- Use upper case for encoding names within OGR, lower case in Python. + +0.11 (2013-04-14) +----------------- +- Cythonize .pyx files (#34). +- Work with or around OGR's internal recoding of record data (#35). +- Fix bug in serialization of int/float PROJ.4 params. + +0.10 (2013-03-23) +----------------- +- Add function to get the width of str type properties. +- Handle validation and schema representation of 3D geometry types (#29). +- Return {'geometry': None} in the case of a NULL geometry (#31). + +0.9.1 (2013-03-07) +------------------ +- Silence the logger in ogrext.so (can be overridden). +- Allow user specification of record field encoding (like 'Windows-1252' for + Natural Earth shapefiles) to help when OGR can't detect it. + +0.9 (2013-03-06) +---------------- +- Accessing file metadata (crs, schema, bounds) on never inspected closed files + returns None without exceptions. +- Add a dict of supported_drivers and their supported modes. +- Raise ValueError for unsupported drivers and modes. +- Remove asserts from ogrext.pyx. +- Add validate_record method to collections. +- Add helpful coordinate system functions to fiona.crs. +- Promote use of fiona.open over fiona.collection. +- Handle Shapefile's mix of LineString/Polygon and multis (#18). +- Allow users to specify width of shapefile text fields (#20). + +0.8 (2012-02-21) +---------------- +- Replaced .opened attribute with .closed (product of collection() is always + opened). Also a __del__() which will close a Collection, but still not to be + depended upon. +- Added writerecords method. +- Added a record buffer and better counting of records in a collection. +- Manage one iterator per collection/session. +- Added a read-only bounds property. + +0.7 (2012-01-29) +---------------- +- Initial timezone-naive support for date, time, and datetime fields. Don't use + these field types if you can avoid them. RFC 3339 datetimes in a string field + are much better. + +0.6.2 (2012-01-10) +------------------ +- Diagnose and set the driver property of collection in read mode. +- Fail if collection paths are not to files. Multi-collection workspaces are + a (maybe) TODO. + +0.6.1 (2012-01-06) +------------------ +- Handle the case of undefined crs for disk collections. + +0.6 (2012-01-05) +---------------- +- Support for collection coordinate reference systems based on Proj4. +- Redirect OGR warnings and errors to the Fiona log. +- Assert that pointers returned from the ograpi functions are not NULL before + using. + +0.5 (2011-12-19) +---------------- +- Support for reading and writing collections of any geometry type. +- Feature and Geometry classes replaced by mappings (dicts). +- Removal of Workspace class. + +0.2 (2011-09-16) +---------------- +- Rename WorldMill to Fiona. + +0.1.1 (2008-12-04) +------------------ +- Support for features with no geometry. + + +Credits +======= + +Fiona is written by: + +- Adam J. Stewart +- Alan D. Snow +- Alexandre Detiste +- Ariel Nunez +- Ariki +- Bas Couwenberg +- Brandon Liu +- Brendan Ward +- Chad Hawkins +- Chris Mutel +- Christoph Gohlke +- Dan "Ducky" Little +- daryl herzmann +- Denis +- Denis Rykov +- dimlev +- Efrén +- Egor Fedorov +- Elliott Sales de Andrade +- Even Rouault +- Ewout ter Hoeven +- Filipe Fernandes +- fredj +- Gavin S +- Géraud +- Hannes Gräuler +- Hao Lyu <20434183+IncubatorShokuhou@users.noreply.github.com> +- Herz +- Ian Rose +- Jacob Wasserman +- James McBride +- James Wilshaw +- Jelle van der Waa +- Jesse Crocker +- joehuanguf <51337028+joehuanguf@users.noreply.github.com> +- Johan Van de Wauw +- Joris Van den Bossche +- Joshua Arnott +- Juan Luis Cano Rodríguez +- Keith Jenkins +- Kelsey Jordahl +- Kevin Wurster +- lgolston <30876419+lgolston@users.noreply.github.com> +- Loïc Dutrieux +- Ludovic Delauné +- Martijn Visser +- Matthew Perry +- Micah Cochran +- Michael Weisman +- Michele Citterio +- Mike Taves +- Miro HronÄok +- Oliver Tonnhofer +- Patrick Young +- Phillip Cloud <417981+cpcloud@users.noreply.github.com> +- pmav99 +- qinfeng +- René Buffat +- Reuben Fletcher-Costin +- Ryan Grout +- Ryan Munro +- Sandro Mani +- Sean Gillies +- Sid Kapur +- Simon Norris +- Stefan Brand +- Stefano Costa +- Stephane Poss +- Tim Tröndle +- wilsaj +- Yann-Sebastien Tremblay-Johnston + +The GeoPandas project (Joris Van den Bossche et al.) has been a major driver +for new features in 1.8.0. + +Fiona would not be possible without the great work of Frank Warmerdam and other +GDAL/OGR developers. + +Some portions of this work were supported by a grant (for Pleiades_) from the +U.S. National Endowment for the Humanities (https://www.neh.gov). + +.. _Pleiades: https://pleiades.stoa.org diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/RECORD new file mode 100644 index 00000000..9d28d989 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/RECORD @@ -0,0 +1,279 @@ +../../../bin/fio,sha256=MgjwOIe3xJQjncRMINtuED6yQ6uNWHvm2jKWRj6v2iA,365 +fiona-1.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fiona-1.10.1.dist-info/LICENSE.txt,sha256=8hUThE5dJ0xiUNbW3FpWenWoGKuXghiWxBbcNDr4r1E,1519 +fiona-1.10.1.dist-info/METADATA,sha256=I9BLJkNMGNHTtP5SVOjt_kY_B6sddVGM7YsFdk2HwFs,56554 +fiona-1.10.1.dist-info/RECORD,, +fiona-1.10.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fiona-1.10.1.dist-info/WHEEL,sha256=7B4nnId14TToQHuAKpxbDLCJbNciqBsV-mvXE2hVLJc,151 +fiona-1.10.1.dist-info/entry_points.txt,sha256=4tU8abfsG7w57AOUTz0Zbpr2yd9hmW1xvLjW9JTd_Z0,50 +fiona-1.10.1.dist-info/top_level.txt,sha256=y4VuMwkgZXN48JxZuwWNUtIXZp0iGHDUMioEOiHdX1A,6 +fiona.libs/libcrypto-fiona-2769ca46.so.1.1,sha256=LhEKBkiSSRtYdzP4JU-bJRjcEHWIfgCcC8bYnyCudIk,3477177 +fiona.libs/libcurl-fiona-1d984654.so.4.8.0,sha256=K7CJwmhgXNEEcD_HCPwYKLbFw7UgfOsbuWv7Xt-BS9c,745577 +fiona.libs/libgdal-fiona-e8f6bdb0.so.35.3.9.2,sha256=-NzeqzsJhvT4aB3HojV1fEifwRrnwVUtUJtc_BqyJic,20476873 +fiona.libs/libgeos-fiona-d914d573.so.3.11.2,sha256=sQG3J01xFLwFuz5GOZoO0t2pd4glr8zpgWqkPeebFks,3281225 +fiona.libs/libgeos_c-fiona-3b303efa.so.1.17.2,sha256=uyUmFBFgccFPkw_afWXF4dIA9-A98swp1n_Qqbezmkc,365505 +fiona.libs/libjpeg-fiona-320f4797.so.9.6.0,sha256=9ppY1xFilNsZpz8_p528pcF8Xv4k0H_VI7EIDJaIQVM,344705 +fiona.libs/libjson-c-fiona-b8129721.so.5.1.0,sha256=bAGA0W2lF3chgjIL5RvY0QRLNJ1hmn3NhXDwsZUOP7I,94913 +fiona.libs/liblzma-fiona-c949e524.so.5.2.2,sha256=THa-FoCwxaGfffs22B7hEG-XZFVhrDHTNktCFEiotkg,220761 +fiona.libs/libnghttp2-fiona-e183d352.so.14.21.1,sha256=PeY7s9cK6f2lo1WJv0VuArYLcher4PLc6OoFRiGhff8,215929 +fiona.libs/libpcre2-8-fiona-221be7d6.so.0.13.0,sha256=TR450_GhLU3OTzKEGirEDiT0jB-k3nhtDdkCt2GDzno,404465 +fiona.libs/libpng16-fiona-8ebcc106.so.16.35.0,sha256=Zsn6VpH7E4aFOpmYMh-_JK7zZ5fpgkMYCxJKU-1kock,277825 +fiona.libs/libproj-fiona-3ec30893.so.25.9.4.1,sha256=8w9esMqt60JmHBJPOtRg_shpm3pTnJlsEEOuAyhu5Gs,4555089 +fiona.libs/libsqlite3-fiona-83998bda.so.0.8.6,sha256=CTM1gpxin3_Af-SpHjQtwkIbVHtkv2ohiTjuYiiCufA,1421513 +fiona.libs/libssl-fiona-6c758070.so.1.1,sha256=BXfVRy59ekjphQcRFYj3H1kRpLaXV45qxA9roOBOHPI,764849 +fiona.libs/libtiff-fiona-c967de8d.so.5.7.0,sha256=m3hdCjFFYbUQ4GtZ2QtxfwIGQFzsxPSVaRI8QxemKLU,673553 +fiona/__init__.py,sha256=auljvJG6o3Uydmw-rcvkoS6G2n0nG8sNDxDk-ApEPTk,22584 +fiona/__pycache__/__init__.cpython-312.pyc,, +fiona/__pycache__/_path.cpython-312.pyc,, +fiona/__pycache__/_show_versions.cpython-312.pyc,, +fiona/__pycache__/abc.cpython-312.pyc,, +fiona/__pycache__/collection.cpython-312.pyc,, +fiona/__pycache__/compat.cpython-312.pyc,, +fiona/__pycache__/drvsupport.cpython-312.pyc,, +fiona/__pycache__/enums.cpython-312.pyc,, +fiona/__pycache__/env.cpython-312.pyc,, +fiona/__pycache__/errors.cpython-312.pyc,, +fiona/__pycache__/features.cpython-312.pyc,, +fiona/__pycache__/inspector.cpython-312.pyc,, +fiona/__pycache__/io.cpython-312.pyc,, +fiona/__pycache__/logutils.cpython-312.pyc,, +fiona/__pycache__/meta.cpython-312.pyc,, +fiona/__pycache__/model.cpython-312.pyc,, +fiona/__pycache__/path.cpython-312.pyc,, +fiona/__pycache__/rfc3339.cpython-312.pyc,, +fiona/__pycache__/session.cpython-312.pyc,, +fiona/__pycache__/transform.cpython-312.pyc,, +fiona/__pycache__/vfs.cpython-312.pyc,, +fiona/_cpl.pxd,sha256=qYoraBfv5raDd52HpMVzzDAlAtTWR-PZXN41Ps-Psks,733 +fiona/_csl.pxd,sha256=7I_eBBweSvZit5nXJZapzvQZp-xOlRpYGeAsBY0Idso,229 +fiona/_env.cpython-312-x86_64-linux-gnu.so,sha256=iZqLHHPgSoFZK4PaCcTKVN0n5wNQjlJ6TtVfPw6eG9U,241721 +fiona/_env.pxd,sha256=OL0uBA9UOjsDGr8RWdffe7cgxKnMubw_3J0QfmRwbG0,214 +fiona/_err.cpython-312-x86_64-linux-gnu.so,sha256=M62mAc9M-PJ0MwGEVMvcviwuHeuFlyAZ_tj78ZBxPgU,188225 +fiona/_err.pxd,sha256=FuX6uT762TskCC0D73VHnB533kiYuiSy4ub5-bgARnc,448 +fiona/_geometry.cpython-312-x86_64-linux-gnu.so,sha256=oF2Gx54-cmneu_1ptB19kmuqDseTb0iOfy-GCIMXLnU,188713 +fiona/_geometry.pxd,sha256=LoTN3u1MX8vsGPZ737-3h3BQJgGWW1h44dT5u1AgK5s,4667 +fiona/_path.py,sha256=ZN191TgwUuwGqQL2bc-fy3CRrZ1_VqS9JZzsla35WsY,5487 +fiona/_show_versions.py,sha256=ZRPiGi_PTSDINMpaXG5YvTUBUxf6euSyI40yCsbgx5o,532 +fiona/_transform.cpython-312-x86_64-linux-gnu.so,sha256=nBoMBbLNwiZn_Ks10532-FEDhL2IuGrsYY3_am6e4oI,311113 +fiona/_vendor/__pycache__/snuggs.cpython-312.pyc,, +fiona/_vendor/munch/__init__.py,sha256=TjDYzSRTw9KRwBCwdx9QEdFNtaHTX7ru48Zn2HJRuzI,17380 +fiona/_vendor/munch/__pycache__/__init__.cpython-312.pyc,, +fiona/_vendor/snuggs.py,sha256=2iJbaVre2ORMqM7XYmepoivjmLKg2Z72b-ig2n0dun4,7821 +fiona/_vsiopener.cpython-312-x86_64-linux-gnu.so,sha256=ftwoJVazYJhAKx80QvGRq4oZvvlwfVd9jhUwmICb9Bg,443001 +fiona/_vsiopener.pxd,sha256=By-V65UcAiFY_sY6x4yvpfw5lAMjJqcEvRZfRgGMIRo,19 +fiona/abc.py,sha256=yP3LHHKNcNwO4ZseY0NaKu2r2DS6kMLBqFUBYxlheUI,106 +fiona/collection.py,sha256=piB8cu69rxXKqMVAcpMGG_AvBO3mxulYuWNBDvFLx00,27566 +fiona/compat.py,sha256=y4F-P696tYnrIhJ3dx8-f3M73UA6Xf-1drdXBu625xA,278 +fiona/crs.cpython-312-x86_64-linux-gnu.so,sha256=8MFQ02i8oJzxcpJjZMapNuXIkZ2kR29gXFcGcHJsNTM,324121 +fiona/crs.pxd,sha256=InSp_T8n6CgfIV7dIOMYOFcE8ODwOOXirMnYbsSMfOQ,218 +fiona/drvsupport.py,sha256=cGkluDdICE5yATJRauQ754rpTsk9OVxRgGOXzQOYxX4,14071 +fiona/enums.py,sha256=2J94YSBHUQbiRqJSYwk9d9u-vjkbs4-SmiuXX-7BVSM,779 +fiona/env.py,sha256=z44GbJvlDJiXl8HH8k1aviswTVbWIpc0MShbyFldwK4,20951 +fiona/errors.py,sha256=XmPGXlui33iPNI-8zM8PNg26Q5YTbh6NtXbv4FY6dI8,2184 +fiona/features.py,sha256=8jPo2YKY-h8M_6cgAaIHQX4DNCHrmhpyGH_3XjCW0KA,8539 +fiona/fio/__init__.py,sha256=dmK16m-xo61uxZj3EjSrXGbe9RlGLotPMwmDjed9Xi8,510 +fiona/fio/__pycache__/__init__.cpython-312.pyc,, +fiona/fio/__pycache__/bounds.cpython-312.pyc,, +fiona/fio/__pycache__/calc.cpython-312.pyc,, +fiona/fio/__pycache__/cat.cpython-312.pyc,, +fiona/fio/__pycache__/collect.cpython-312.pyc,, +fiona/fio/__pycache__/distrib.cpython-312.pyc,, +fiona/fio/__pycache__/dump.cpython-312.pyc,, +fiona/fio/__pycache__/env.cpython-312.pyc,, +fiona/fio/__pycache__/features.cpython-312.pyc,, +fiona/fio/__pycache__/helpers.cpython-312.pyc,, +fiona/fio/__pycache__/info.cpython-312.pyc,, +fiona/fio/__pycache__/insp.cpython-312.pyc,, +fiona/fio/__pycache__/load.cpython-312.pyc,, +fiona/fio/__pycache__/ls.cpython-312.pyc,, +fiona/fio/__pycache__/main.cpython-312.pyc,, +fiona/fio/__pycache__/options.cpython-312.pyc,, +fiona/fio/__pycache__/rm.cpython-312.pyc,, +fiona/fio/bounds.py,sha256=Mm4as4ySKBbwoxlm3GCLJGtYNnfJlDP57ammzsPpIoc,2873 +fiona/fio/calc.py,sha256=v256U6FZyc0cp2pGBeyoBdeLkfcZk3WcWLt1-19fm2k,2064 +fiona/fio/cat.py,sha256=HxMauK01UGFq9ejtrsWm9W9R5trU9GJJcwMJS0Pc8uA,3926 +fiona/fio/collect.py,sha256=yE8xrvgsEYDgyrLErBEJ6ORWEFtjq6_9B-13xpsNmpY,7729 +fiona/fio/distrib.py,sha256=l8trUttEx_rntSmZXobYKoAla6AImQ_ZATy1dq9uOrQ,941 +fiona/fio/dump.py,sha256=BXbAlo025O7v9rx1sKNwtSpgPBawDPWnjWHoflG-xP4,6909 +fiona/fio/env.py,sha256=0zyzD9n3xzHQZRYElCSjtCocjEPgoxPbsazmzbBa0tg,1477 +fiona/fio/features.py,sha256=jpF1M0fs4naCEazi48V94LX3NKO33FOIK8qdrbX-JA8,9028 +fiona/fio/helpers.py,sha256=BzkMrT4GqwlgBhwseTzj0AkplU4Qp4gCCvnlwCor6Bw,4082 +fiona/fio/info.py,sha256=xSjOvwZQOnn0cifQKY6OlR1griFXHA0_-dJny4BvLB0,2559 +fiona/fio/insp.py,sha256=BCFcHmOrLyp-1WQzsp0yCW6LM4HseLRne7vdXGbC5aI,1253 +fiona/fio/load.py,sha256=rmv_ZzdnO4eAtIPuqSbkk1W8vPAvMHdQkD3ER3XQ8Rk,2997 +fiona/fio/ls.py,sha256=mI-P3IEwCycBVMPN8BDZcQniWeV_K4h-KuwFcIl9asg,457 +fiona/fio/main.py,sha256=u2nBMO-jCk5heLdL8459MgJ0jSfEZsCIG_40EBnCVq8,3067 +fiona/fio/options.py,sha256=8Esjbuo8oDCRwi9o4LlJk7UVoMuPZqqiqfkVxqs66as,2555 +fiona/fio/rm.py,sha256=HnNFQf61cDKZgVY00qzawyBjXNthX515PGwZxZjVzMg,769 +fiona/gdal.pxi,sha256=8os6hqVJEGRN3EysiZ5LsisBBw9neXLSmQd2YC6DFjk,35961 +fiona/gdal_data/GDALLogoBW.svg,sha256=qsnasz1HnguDK4vXyeVRDcvvNKfGzJrqlrviqrUWHFM,13022 +fiona/gdal_data/GDALLogoColor.svg,sha256=LBAqMcbpEWqpuYeH2L9c9kjbzCuTCu5lPNNd9l7RGNA,12305 +fiona/gdal_data/GDALLogoGS.svg,sha256=Gubi2S5rBDnzuu4MtV2ti5er-0_MWMkYmyO2PIGK7so,12305 +fiona/gdal_data/LICENSE.TXT,sha256=Ha40aOgdANpW4pNvdNM7izrQnXJkN_Gc4gml2r6kH3c,21841 +fiona/gdal_data/MM_m_idofic.csv,sha256=acV9u5XhcnROPCNrvnMulxBaApZ45wEd9nzjGoWylgQ,32388 +fiona/gdal_data/cubewerx_extra.wkt,sha256=HMo_o-JJ9Mx9G_D_sKcaGir63RCv337QqiktvF5PB2g,11977 +fiona/gdal_data/default.rsc,sha256=nhydcIHVJ3XnUNQVXUklUlKwTKgf7yfLKZdnXTzZYms,463632 +fiona/gdal_data/ecw_cs.wkt,sha256=1DGJPrRGsmXSBu6ohj3NzOO2XHwqcH2qtY6J5YOvAz4,364032 +fiona/gdal_data/epsg.wkt,sha256=5QiV6QYaum3RWI8GoIiHKqViqNmHFliLwaQgGJ3LJUw,27 +fiona/gdal_data/esri_StatePlane_extra.wkt,sha256=aC3xsjHLbxKHaF1MZL8bJhHAr0PDBINHO7s2bOG9jTM,332546 +fiona/gdal_data/gdalicon.png,sha256=2Q9QqPdMMlCGyGe_AGqSJmwlmicNkWay9B9Ya2d1Vyk,2021 +fiona/gdal_data/gdalinfo_output.schema.json,sha256=TDBG6nQXoUlzsZ0U0IY2Kgj3SqGCSueQwwXQXxrfJzM,8241 +fiona/gdal_data/gdalmdiminfo_output.schema.json,sha256=uYqJkLuuqtCA6jwdnNGeNRF7XeeI6fy2Y-ybzZf7VaQ,6543 +fiona/gdal_data/gdaltileindex.xsd,sha256=9dobaUOIXxQDJbE6IZF7iWkp9TZqeSF5FyNNSlau35g,11827 +fiona/gdal_data/gdalvrt.xsd,sha256=ns4HrkpnUhYdmn5b6C89jdaXzK0lGDhIsWh5K1olqmU,38694 +fiona/gdal_data/gfs.xsd,sha256=TCLwaVONVD6Fh94o6Hw-NClD8-2erBsz3V9dgozmG5I,16469 +fiona/gdal_data/gml_registry.xml,sha256=2rVxsqdOS23ieg_eYtNIhxQcpFbRHKrN11oWay3lp0M,6643 +fiona/gdal_data/gml_registry.xsd,sha256=75C2JexYEv3asxO4seOHmJzP6glAnFFfMTm21jSRKXo,6462 +fiona/gdal_data/grib2_center.csv,sha256=9qwbZ4W8m8-6dZG6LFskca3rCgl42bMLfzg0yQI_QjM,4171 +fiona/gdal_data/grib2_process.csv,sha256=5t64qqEuz435_VQfKj6jZrPpi9GUJPL-XS32V9VECg8,4926 +fiona/gdal_data/grib2_subcenter.csv,sha256=H1NnC7PusHTUbS3cFTfnwFyHskqvdgJNApd1FDUClFs,2328 +fiona/gdal_data/grib2_table_4_2_0_0.csv,sha256=7lcXiYZHBVvyTtJuzN7_N7bcCTaT98t9pGZ1qCAClPs,10363 +fiona/gdal_data/grib2_table_4_2_0_1.csv,sha256=Islx1uwOrtFhUgZcq0urhO9eG1bFDWjFieujFdQJq3M,16505 +fiona/gdal_data/grib2_table_4_2_0_13.csv,sha256=FiAWXbiJem9ePbrtXLu7chBm2H-PhZ39DNI_c7CXE3I,9596 +fiona/gdal_data/grib2_table_4_2_0_14.csv,sha256=FX7I5iVqwq7iZITOHtI74X-ifJWuBDvpv2qW18jkZ2k,9551 +fiona/gdal_data/grib2_table_4_2_0_15.csv,sha256=7eBU5WigNMmSwRD7SKmCMpMzvqoBJJgcJvT55LIowhI,9846 +fiona/gdal_data/grib2_table_4_2_0_16.csv,sha256=mut7PcQsX0ZPxMjU_oHfuTTGwPUAbxkJy2Yp-ybLV-0,9671 +fiona/gdal_data/grib2_table_4_2_0_17.csv,sha256=A4I5bH537GYR-TnRfb22wWfzM7nLp4OBcXSExvrLv18,923 +fiona/gdal_data/grib2_table_4_2_0_18.csv,sha256=UGnoCo2YiZQj9wJJE4oajSegwv7jMxebvWU69-TeVfY,10224 +fiona/gdal_data/grib2_table_4_2_0_19.csv,sha256=GzGUGbbmLLm1y0qMaRi0GOVeinZ_W0XkOJ_EUlyS9q0,11874 +fiona/gdal_data/grib2_table_4_2_0_190.csv,sha256=xF_PY2JAidyUTzFRSakOV3SITD5OD2LgF-2WT_vG0bc,9507 +fiona/gdal_data/grib2_table_4_2_0_191.csv,sha256=nb6bmMA90CcIbRmdWTwC_BWcEo4M46hoJSHQnzsEHDA,9620 +fiona/gdal_data/grib2_table_4_2_0_2.csv,sha256=Hcr_tN5FR17RXjlBWHh1N-fHS7opzUxThapntte7Bd4,10892 +fiona/gdal_data/grib2_table_4_2_0_20.csv,sha256=IUriHiSfaj_jzgaTT_lWVCZt-Uo_KSTinbaYDsQK8eY,12291 +fiona/gdal_data/grib2_table_4_2_0_21.csv,sha256=30k7pyX-dRE2KdZzZ4lE4tNliimuRzYJevqXoRU9UvM,10262 +fiona/gdal_data/grib2_table_4_2_0_3.csv,sha256=VyZN8AA5z_yyp6bELgKOect5RZZWX27NWAjd5msJ9GI,10558 +fiona/gdal_data/grib2_table_4_2_0_4.csv,sha256=KVlHHOG2-UdYSWk2F3e0f1AtEmQJGvuhy-bNhmwItvQ,10311 +fiona/gdal_data/grib2_table_4_2_0_5.csv,sha256=Acxx9cLX729oggWNVsmfPc9Lqq1nNTWWvgnta66WfaE,9826 +fiona/gdal_data/grib2_table_4_2_0_6.csv,sha256=Dldt7q0TbnIkPdL6eqhiiiJU9jBzmIXCGNOsLvFXl2g,11642 +fiona/gdal_data/grib2_table_4_2_0_7.csv,sha256=8OAwltJUJVjKWW10OC-jGolnOn6i--r1Tot2rwfXF7A,10492 +fiona/gdal_data/grib2_table_4_2_10_0.csv,sha256=9P_7VCO55ZdL-Acj5lwY6ECnruzcgMAb7bmeQYIG7-c,11822 +fiona/gdal_data/grib2_table_4_2_10_1.csv,sha256=lWT-VZLPKNdqWmFE4TBRVncsUsqRkxCzcIIk7TNuOGk,9625 +fiona/gdal_data/grib2_table_4_2_10_191.csv,sha256=D1EVM9Eyzl1STFGdSwy-OjkzyVNqUJc4JvToQgIcscg,9634 +fiona/gdal_data/grib2_table_4_2_10_2.csv,sha256=OitdURbBcwCaOEMxzAy0kugXFRiYn2TdB9oBIvSmTq4,10112 +fiona/gdal_data/grib2_table_4_2_10_3.csv,sha256=LxrwVe-pgQTuqDSTGSarqfDaTqBED-L9uohVEyTeKIY,9989 +fiona/gdal_data/grib2_table_4_2_10_4.csv,sha256=v11Tm6AQX7Y9rLhY7zjQeCR6jaO9lcTb6mV_SkV1Amk,10816 +fiona/gdal_data/grib2_table_4_2_1_0.csv,sha256=wk1sBMq1quhb3o6ROtT1ptqxnHV2a0TJYgspUqgERTw,10142 +fiona/gdal_data/grib2_table_4_2_1_1.csv,sha256=3FyIbZA43DVDSURtfTyKmbqw2bCyvB9K5ObuED1fIiU,9655 +fiona/gdal_data/grib2_table_4_2_1_2.csv,sha256=b_R3xjTgyeyJeKz_6Rm_TnqOGtsvHUBMPo8vJEZZuYM,9798 +fiona/gdal_data/grib2_table_4_2_20_0.csv,sha256=p4oIJ9cFOANpbQmbaOdm1OJtqOcgdtt2w0IO9oNvCGc,9643 +fiona/gdal_data/grib2_table_4_2_20_1.csv,sha256=PssQWXb7slKE9Lj3j6-7bzaT_MbaNYvW1Ye9svtdHRQ,9851 +fiona/gdal_data/grib2_table_4_2_20_2.csv,sha256=ZudVi5DPaFj3QMlMw6a0jkSnBbPzfkrxeOH6cGqDgrs,9506 +fiona/gdal_data/grib2_table_4_2_2_0.csv,sha256=VmMPvFRcEifRi8w6eXg9Z3i_JcxhnqAcdxM4ymWV1E0,12666 +fiona/gdal_data/grib2_table_4_2_2_3.csv,sha256=NhDBdFAucOn0CBT4N8txwupv58jwnpCY6ChnhGNjkfo,10634 +fiona/gdal_data/grib2_table_4_2_2_4.csv,sha256=FbmMUnZlbMWW-Fya4RsGe58rPjeAn02QFgRLlLuQGPM,11228 +fiona/gdal_data/grib2_table_4_2_2_5.csv,sha256=DMm7VDNZcLAKXwFA4SIgfExBb1d8iGFNMKpOoYmwY5U,9513 +fiona/gdal_data/grib2_table_4_2_2_6.csv,sha256=bUI0gAfG770GFRvGRwdLeMQ5EjRWhoymXwIgUeRU-L8,9599 +fiona/gdal_data/grib2_table_4_2_3_0.csv,sha256=lXurUVCwfKCGo8XSUdwGG0YWahBnd_LSU-LCwGULIVE,10951 +fiona/gdal_data/grib2_table_4_2_3_1.csv,sha256=_UjqsYLrMmAUhR1h2iJzqZJeTvhFuNfnBrelFgueB5c,10663 +fiona/gdal_data/grib2_table_4_2_3_2.csv,sha256=FWUq3hX244-kBMJor_98vRY7ujIaf_ZF6SzLHOv5X8M,3833 +fiona/gdal_data/grib2_table_4_2_3_3.csv,sha256=I4cT-Ad19soux0yIC8cQw-UPDHA8slwLynqEtOb-_PQ,784 +fiona/gdal_data/grib2_table_4_2_3_4.csv,sha256=Hu4t2JRqRwfmaM3AqgFhUXKfhb86y3A4e4qOYxVMrQw,1051 +fiona/gdal_data/grib2_table_4_2_3_5.csv,sha256=BPr_V_toW-rW2gBwOrD910yaad-Z6W97LA29E3IEjOw,920 +fiona/gdal_data/grib2_table_4_2_3_6.csv,sha256=GklA-e-8iI9GXxT-rAQ_fzEA9h4s13bKfVuUvLj2v9g,819 +fiona/gdal_data/grib2_table_4_2_4_0.csv,sha256=9kJY8m3csUYibz041_Whq1iA-6RdqYg1dzgarcPku_k,9553 +fiona/gdal_data/grib2_table_4_2_4_1.csv,sha256=9ZHp2TMk8z3QZUo4_G8DS-4MdgCHA0MZnFP-rzaRFXM,9686 +fiona/gdal_data/grib2_table_4_2_4_10.csv,sha256=LO-onEZ63bsCe_QuDb7HRmGf4QcCU_TrElpuKRCTjLg,9701 +fiona/gdal_data/grib2_table_4_2_4_2.csv,sha256=mRIkqJQ6Chbyavutcact38Scvqal5vK177DT8Mko6ag,9660 +fiona/gdal_data/grib2_table_4_2_4_3.csv,sha256=H6aiN0QNLVmYkallM7yVbCiR3g6x2TGNVaxHnONqa4I,9722 +fiona/gdal_data/grib2_table_4_2_4_4.csv,sha256=7ZOpoUdEUBKtzctagMGLREW6GMan6_WECFbywvFimSE,9690 +fiona/gdal_data/grib2_table_4_2_4_5.csv,sha256=UH1R3eJHZQ9Xejfrgr9km-ko28squEkiEdtdfqN9boE,9495 +fiona/gdal_data/grib2_table_4_2_4_6.csv,sha256=fn0fYAm3jDZsGeXwE8NCnEAo9ZJ4HoicnCEVwends0s,9633 +fiona/gdal_data/grib2_table_4_2_4_7.csv,sha256=T6hqPEH1cIRqbWuGUPrs3eiKFnUyHK-eaSrycm7V4yI,9556 +fiona/gdal_data/grib2_table_4_2_4_8.csv,sha256=TAwuXG7KGaAjs6yGFBT6yg7dnSzfcfVHEdHhbrnD59g,9686 +fiona/gdal_data/grib2_table_4_2_4_9.csv,sha256=zk5CHm-MgMuskaBqflKemoUY7aUSvK1tUTubKiFppxk,9536 +fiona/gdal_data/grib2_table_4_2_local_Canada.csv,sha256=m8GRxbypdPQvvdSIA38HTuHKu6gO_i2lC4_SVMDcv_U,333 +fiona/gdal_data/grib2_table_4_2_local_HPC.csv,sha256=uJ0Xy_y46b8uqS0X2P1VUpMjPbV85qh-OXe_SHd5o_4,87 +fiona/gdal_data/grib2_table_4_2_local_MRMS.csv,sha256=H_u9WF10JNULZrrg7fE4D0q-tFG0BT28Z8w3umpVDlM,15587 +fiona/gdal_data/grib2_table_4_2_local_NCEP.csv,sha256=3u547LbwYlq1zC48OFIOqpIZmHtobfkUtZgd-M6TRHs,27977 +fiona/gdal_data/grib2_table_4_2_local_NDFD.csv,sha256=-ax9PGvL20SKnBtm_yeUiXiV_N6CRx1PT_1C_iKDhsY,2659 +fiona/gdal_data/grib2_table_4_2_local_index.csv,sha256=iL7UDbsNDfcrQyeSujIUv93LopATsZpd_P9neBGjN7c,251 +fiona/gdal_data/grib2_table_4_5.csv,sha256=JtMFPcnPHyzAwCeMZc9ZU8Gi2FmFqHSSiWrdx8AutNA,10013 +fiona/gdal_data/grib2_table_versions.csv,sha256=kmlTzSvGEG0hhPJwT9se8LVIVz61oufFbH-90NhFh_U,38 +fiona/gdal_data/header.dxf,sha256=9GpEP0k-Q3B7x5hbnyhEViwk_qV21xVw_yr-kTF0Sd4,6572 +fiona/gdal_data/inspire_cp_BasicPropertyUnit.gfs,sha256=KG17Z8L6OlwyHUOfgxk7wG4MagplNoOOefFRZ4q1Rxw,1740 +fiona/gdal_data/inspire_cp_CadastralBoundary.gfs,sha256=VZY8kxf3ZQj1tkcp5gV6Iw8fX8lHOFHxAx-ZxQsa64Y,1650 +fiona/gdal_data/inspire_cp_CadastralParcel.gfs,sha256=bxS0mxaUq_4ZHx1FyExuYmeIZuOUHcU_CV4Oxs-jDiQ,2450 +fiona/gdal_data/inspire_cp_CadastralZoning.gfs,sha256=e1GC-8pLjZ6gc-OY0IUDd5LL2URNHcazWW-i7QMLTPo,4812 +fiona/gdal_data/jpfgdgml_AdmArea.gfs,sha256=F8i9JfKPfW_hTQyCHb9rqmnqHhH2nGlVfAFsmF67-Rg,1640 +fiona/gdal_data/jpfgdgml_AdmBdry.gfs,sha256=XY7zmIi3gWDR5xIWx_wJon7IgWCfwc8Ex-oVylQCSAQ,1382 +fiona/gdal_data/jpfgdgml_AdmPt.gfs,sha256=zvbd8Fp_ZzXKSYDWradRuDrkjpJQtBPzT5FI1dwOKJk,1633 +fiona/gdal_data/jpfgdgml_BldA.gfs,sha256=AU71Y7NeeWHRmDB4tq7--DZWAQqsTV_XRNpJiBVULt0,1501 +fiona/gdal_data/jpfgdgml_BldL.gfs,sha256=9j7aVPTJLDF0I1sb6YzVloK1VD5pAV-e4aP4kpJH9bA,1503 +fiona/gdal_data/jpfgdgml_Cntr.gfs,sha256=wOA3_ml-DHFJVXGNFgI8KplOJfwOmM9aluWTXapDYJM,1501 +fiona/gdal_data/jpfgdgml_CommBdry.gfs,sha256=dLHB579iCR6aIbNNiFHaX_xpdiLs3xMqm_zrRZ1kACw,1384 +fiona/gdal_data/jpfgdgml_CommPt.gfs,sha256=A53IK4rYp1tK1fAYRcLi_AUpRCNyaV6C_iKTqyLgEVA,1635 +fiona/gdal_data/jpfgdgml_Cstline.gfs,sha256=cbzTD-Dbdi2QC-nO9Ep8i64E2CUW3m9nRhmKej6qzLU,1509 +fiona/gdal_data/jpfgdgml_ElevPt.gfs,sha256=9MRSGeU4EV1qAZ6Q2JEBsINXQXjxtlLg7JFgnFH3bIM,1500 +fiona/gdal_data/jpfgdgml_GCP.gfs,sha256=1bTmQNeaTceyuvv2p4QVBC19KFk3aLx4tgBsGeZQ0GQ,2523 +fiona/gdal_data/jpfgdgml_LeveeEdge.gfs,sha256=kQnqUPoyJfjqfvdnSAB2VvIN9FM3Jd-r9dCPOPqGiZ4,1386 +fiona/gdal_data/jpfgdgml_RailCL.gfs,sha256=WfcPfaAxyRwH0hGYR8cWo9KzkYaeVjdMpWdCj88L2Lc,1507 +fiona/gdal_data/jpfgdgml_RdASL.gfs,sha256=mg_FOmbuKivcTjWcIxVdf3KYzKv9x0m7DUGI1UHnFOk,1251 +fiona/gdal_data/jpfgdgml_RdArea.gfs,sha256=ldLwklTkt4-v--uIOd9E3T5evFxWeUDgUS6cd27sgMQ,1515 +fiona/gdal_data/jpfgdgml_RdCompt.gfs,sha256=e19AWCj_6GE7AR8sUE8WlRUZuOkDsfPgVrkRW4aZaK8,1646 +fiona/gdal_data/jpfgdgml_RdEdg.gfs,sha256=zFrWUKdRQpadmkTottrgNxknbGTc-87RTqTzhJ-Jt4U,1642 +fiona/gdal_data/jpfgdgml_RdMgtBdry.gfs,sha256=icSd5TDjjersUSthlHjPande-IFpCVAEJWWReCSo0vY,1386 +fiona/gdal_data/jpfgdgml_RdSgmtA.gfs,sha256=DrQoc9ubkSfkE8QL1Uvoq-EWklYvSIGeTw8cuJtZuZU,1644 +fiona/gdal_data/jpfgdgml_RvrMgtBdry.gfs,sha256=3mSGD0Rm2PFV97OzIDjK8Mb3dAom_uQADHtIKsSwNjM,1388 +fiona/gdal_data/jpfgdgml_SBAPt.gfs,sha256=6SXkKWcv4sOfsZUtRPBx3kSg2H8JgSLhK_fxaHhOxuE,1375 +fiona/gdal_data/jpfgdgml_SBArea.gfs,sha256=eXR1XphhUeJpoSOqqAbhKpnwcDaatIYa_Yap3xwFJFo,1507 +fiona/gdal_data/jpfgdgml_SBBdry.gfs,sha256=JDySSKChX3q0KetcBBC62Bvsr59qj8aq7_LWlAPOMys,1253 +fiona/gdal_data/jpfgdgml_WA.gfs,sha256=GBCluVns9HsNGQMnEb4VXaYYi-cNgIo9LDYMCINVlnM,1497 +fiona/gdal_data/jpfgdgml_WL.gfs,sha256=J9BKIPRJyY30wGjgQk9CYpguSk38EF4706xzYIqESKk,1499 +fiona/gdal_data/jpfgdgml_WStrA.gfs,sha256=ul-hFP9A9jVcIW4xKShR6C0yjXAq8HEPUkk8oLyH27A,1503 +fiona/gdal_data/jpfgdgml_WStrL.gfs,sha256=6_4tnRheyIf-c3JQwKYxSHA4i-I7ORkf3InxygGqyJA,1505 +fiona/gdal_data/ogrinfo_output.schema.json,sha256=KAg0erS175ZGsaHth8ZOga_PIzra4wWbjHC3gK1qE6s,11667 +fiona/gdal_data/ogrvrt.xsd,sha256=gmXOkcLkEBAb4rXNy5wMxXz0EHsRnYPq7bxSzA2JN-Q,26204 +fiona/gdal_data/osmconf.ini,sha256=2__o9tGkmEMQa8WXGnlOPmJQtefKxePj5HdypuDLOM8,5400 +fiona/gdal_data/ozi_datum.csv,sha256=42eOPxRAhK6zSIwuGuIkfweoQfUBqJ5yJa6d2b-xmGg,8482 +fiona/gdal_data/ozi_ellips.csv,sha256=DINjv97cF4fVNGPXBeLhzo-9_NSdHjx3j11CgpgNdcY,1349 +fiona/gdal_data/pci_datum.txt,sha256=AvdrClRR4GgBukxN5mGsXHYAZaZfQtBsnTWcNJD_nIE,45587 +fiona/gdal_data/pci_ellips.txt,sha256=XPN-_epqhUBrVE5v1ihrjbXyP6TNo4xqHV-8tQQy17Y,6351 +fiona/gdal_data/plscenesconf.json,sha256=Vd__WXJyPouwHYD13vuYnTYDtyPD7wPk1xV4oDcQ-Gc,41372 +fiona/gdal_data/ruian_vf_ob_v1.gfs,sha256=FnvtM-za4grKUwHUGLzBE_wxXj_0iYMXSg9Uf02MpBs,46735 +fiona/gdal_data/ruian_vf_st_uvoh_v1.gfs,sha256=C8cSYgbhiXuJGE9aeBOoOfcR4mWkU-8hFmeZhv302dM,2596 +fiona/gdal_data/ruian_vf_st_v1.gfs,sha256=2SFO3C10aUwZita6Iy1yR-rKmNk1FF7eX1OtB6kIcy4,45900 +fiona/gdal_data/ruian_vf_v1.gfs,sha256=Tl4_j6ZZgk6Do3HDCmPLrMyvDcI0KZZ7cpIvTMh6FBU,67252 +fiona/gdal_data/s57agencies.csv,sha256=IS73IR7YPEOCulni7kQ3pzMXE7KYM77nZFwHMcq8QnA,13304 +fiona/gdal_data/s57attributes.csv,sha256=9Lbf4ugv6l5-5DPhNoMpKj-gm7ZnUEKOSxV4-ZpG0t4,20001 +fiona/gdal_data/s57expectedinput.csv,sha256=1bCaXEJI8-sJBqTjrGjXOAyG-BnXDerjZi5zbwWXvGw,20885 +fiona/gdal_data/s57objectclasses.csv,sha256=BqzXwn2LnMDgLbkTav9gyclc-tL1wMu9nN-KpNB9hxY,53425 +fiona/gdal_data/seed_2d.dgn,sha256=3YRl8YVp2SiYCengliEV02XQpW3gITk5UqXnoKILUnw,9216 +fiona/gdal_data/seed_3d.dgn,sha256=l8LwDubqloc7fRbl6Ji0hQ49NUSCmdfZ0359F5K1aJY,2048 +fiona/gdal_data/stateplane.csv,sha256=MMYhCHKwpMENQHOajLa-2mMKfvPrTLB6cSjibo6Dy-M,10360 +fiona/gdal_data/tms_LINZAntarticaMapTileGrid.json,sha256=QjJ8qJZPlc5-AfNPvDJ4AlCkqVJOSy14g7T37sUS5aA,4115 +fiona/gdal_data/tms_MapML_APSTILE.json,sha256=Co119fRol3Xlvp6Tv8OyfOPr8oLyd-hCQOXvK7KX5KM,6273 +fiona/gdal_data/tms_MapML_CBMTILE.json,sha256=tZaccbdXtciaiOyOkESWhCT1lGtiXtTwfSvgmNwm14A,7792 +fiona/gdal_data/tms_NZTM2000.json,sha256=irMxe-d_H9zzagr3FS4bsJZScfe-XLvEMmdxanJYOeo,5265 +fiona/gdal_data/trailer.dxf,sha256=UGuvDFk9eo_yT7zXg4hykr1tPI1QVHKeO_OLlGXRGfw,2275 +fiona/gdal_data/vdv452.xml,sha256=q1KF0BR-dDEIomwkRLhTksR4f_maMumayx4hQ2wju8k,25816 +fiona/gdal_data/vdv452.xsd,sha256=rCZLv6wROL-3FdB_OiimR6l5Jw37cV553UOiSE5K1Pg,2854 +fiona/inspector.py,sha256=QgRPh2Bt7G0drrPieJRzCSIuGCVezqy0REf1lSs0TYs,982 +fiona/io.py,sha256=s8sjmYR_BGAQSOF-wv0bbhOwVtUcztQcj8BwU_rYD-I,6262 +fiona/logutils.py,sha256=yNJKFMtdn7gNAvyIEovWJpdQzxOHx61hMoSe3bkBRvk,872 +fiona/meta.py,sha256=BBSQ9e02omv7Zv8DeOUbfrHtGRvN5dVuugUC3lrLZ2I,6807 +fiona/model.py,sha256=injqx8n3SrOEIErBh_PggSHq80T4JAmhR47z6cP2sJw,12018 +fiona/ogrext.cpython-312-x86_64-linux-gnu.so,sha256=ftCiitWN8HeL84u0MFSfjN3OlCaXNbF0qwXLWvr2go0,1035153 +fiona/ogrext1.pxd,sha256=zykNe48lVTpRbAQKCSIbJWBDm2jE_06egIb0-Vu-2Yw,11296 +fiona/ogrext2.pxd,sha256=h42rpX-lmkCJ2AuzGRpx8adf9BUd0yNLuNSXqJ-07cU,12875 +fiona/ogrext3.pxd,sha256=ibBOKu2q9Pcdi6glkWvTK7gUmSJD_Pu_wIDcM-7GCjs,12843 +fiona/path.py,sha256=oYcnDj2jeJ9qfW7g94-iTL99lqKWrtc1rRNuKd1ipMw,496 +fiona/proj_data/CH,sha256=bFPqQKLGAyW6bGuaK5wUO9FlZxw4gg7NjyPKq0omSrU,1097 +fiona/proj_data/GL27,sha256=hTdeYxXW9kTkV32t88XxV1KK0VtxXJmyh73asj1EGT0,728 +fiona/proj_data/ITRF2000,sha256=UiXDszZ6NwIeY5z9W9FSHSvV0Ay8R0zWs7OCdhWk_nI,2099 +fiona/proj_data/ITRF2008,sha256=3GmF1XIeLh0Y-smW3uXeyQJE7z4gw49yIF4y2IQq8uI,5682 +fiona/proj_data/ITRF2014,sha256=KYUwTjflfDYqYTl7YyZ9psYMN2HT5GOhH0xr0BPC19U,3489 +fiona/proj_data/deformation_model.schema.json,sha256=ugxZERFyB6k2_HOMHjHaABtFtRGTdzzgxnQhQ_NyuZ0,17671 +fiona/proj_data/nad.lst,sha256=Gijthy_RVQ9b97Wl5b_F51ALX5TJdFuEodFWsyA5kow,6385 +fiona/proj_data/nad27,sha256=C8IxkiRhrHWJIsanJR6W1-U-ZWYIsbTwa3hI-o_CVSA,19535 +fiona/proj_data/nad83,sha256=mmJgyGgKvlIWyo_phZmPq6vBIbADKHmoIddcrjQR3JY,16593 +fiona/proj_data/other.extra,sha256=we90wKmz4fQqV2wVvENmbBNbfpNh21cno1MgTaJ5zLs,3915 +fiona/proj_data/proj.db,sha256=pZY8Nqdts-EGicz_rf1VT-sRXHmP5jUMVsXXiQcv6gY,9154560 +fiona/proj_data/proj.ini,sha256=SvO-E5l27NOlzFZWPKMhu1ASFjwLid8IB773in3StB4,2107 +fiona/proj_data/projjson.schema.json,sha256=Z_CQVWuumWUi7a1IJZfA-eG5GcpMkoisn4981BgPSbY,38418 +fiona/proj_data/triangulation.schema.json,sha256=sVKCZmZ9Tz6HByx043CycTUQFRWpASolKH_PMSzqF5o,8403 +fiona/proj_data/world,sha256=8nHNPlbHdZ0vz7u9OYcCZOuBBkFVcTwE_JLq3TCt60g,7079 +fiona/rfc3339.py,sha256=EJeAnPIXB-4c-3xNRRnS83WA0F9pOWQJ8136ZFWMh0g,3369 +fiona/schema.cpython-312-x86_64-linux-gnu.so,sha256=L-SmjnvtNuP5qJDBSkDS7oleUGu077YrnsCMwxahc58,228785 +fiona/session.py,sha256=xvDh96jYrylFiFeGNzzpcP6q3V72qwsQwq4-T53eL00,18022 +fiona/transform.py,sha256=ZiqMA-4PLo5GbZhM4dNgGoBZ_FXr6hv7BIKzk_YR1ZY,4248 +fiona/vfs.py,sha256=JVB2F4KBaCHEK2H2YM6M5qlp8M8mQlBO58nRPJiUnCc,2503 diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/WHEEL new file mode 100644 index 00000000..3e811828 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.1.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/entry_points.txt new file mode 100644 index 00000000..3fa7d8c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +fio = fiona.fio.main:main_group diff --git a/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/top_level.txt new file mode 100644 index 00000000..9d901a6c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona-1.10.1.dist-info/top_level.txt @@ -0,0 +1 @@ +fiona diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libcrypto-fiona-2769ca46.so.1.1 b/.venv/lib/python3.12/site-packages/fiona.libs/libcrypto-fiona-2769ca46.so.1.1 new file mode 100755 index 00000000..a6cf1100 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libcrypto-fiona-2769ca46.so.1.1 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libcurl-fiona-1d984654.so.4.8.0 b/.venv/lib/python3.12/site-packages/fiona.libs/libcurl-fiona-1d984654.so.4.8.0 new file mode 100755 index 00000000..2f8d557a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libcurl-fiona-1d984654.so.4.8.0 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libgdal-fiona-e8f6bdb0.so.35.3.9.2 b/.venv/lib/python3.12/site-packages/fiona.libs/libgdal-fiona-e8f6bdb0.so.35.3.9.2 new file mode 100755 index 00000000..e6f508b5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libgdal-fiona-e8f6bdb0.so.35.3.9.2 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libgeos-fiona-d914d573.so.3.11.2 b/.venv/lib/python3.12/site-packages/fiona.libs/libgeos-fiona-d914d573.so.3.11.2 new file mode 100755 index 00000000..8241dffc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libgeos-fiona-d914d573.so.3.11.2 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libgeos_c-fiona-3b303efa.so.1.17.2 b/.venv/lib/python3.12/site-packages/fiona.libs/libgeos_c-fiona-3b303efa.so.1.17.2 new file mode 100755 index 00000000..77de6ae2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libgeos_c-fiona-3b303efa.so.1.17.2 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libjpeg-fiona-320f4797.so.9.6.0 b/.venv/lib/python3.12/site-packages/fiona.libs/libjpeg-fiona-320f4797.so.9.6.0 new file mode 100755 index 00000000..0d736dba Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libjpeg-fiona-320f4797.so.9.6.0 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libjson-c-fiona-b8129721.so.5.1.0 b/.venv/lib/python3.12/site-packages/fiona.libs/libjson-c-fiona-b8129721.so.5.1.0 new file mode 100755 index 00000000..0babaf70 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libjson-c-fiona-b8129721.so.5.1.0 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/liblzma-fiona-c949e524.so.5.2.2 b/.venv/lib/python3.12/site-packages/fiona.libs/liblzma-fiona-c949e524.so.5.2.2 new file mode 100755 index 00000000..56a71fde Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/liblzma-fiona-c949e524.so.5.2.2 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libnghttp2-fiona-e183d352.so.14.21.1 b/.venv/lib/python3.12/site-packages/fiona.libs/libnghttp2-fiona-e183d352.so.14.21.1 new file mode 100755 index 00000000..faa93ae6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libnghttp2-fiona-e183d352.so.14.21.1 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libpcre2-8-fiona-221be7d6.so.0.13.0 b/.venv/lib/python3.12/site-packages/fiona.libs/libpcre2-8-fiona-221be7d6.so.0.13.0 new file mode 100755 index 00000000..469a6c4d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libpcre2-8-fiona-221be7d6.so.0.13.0 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libpng16-fiona-8ebcc106.so.16.35.0 b/.venv/lib/python3.12/site-packages/fiona.libs/libpng16-fiona-8ebcc106.so.16.35.0 new file mode 100755 index 00000000..f59889e0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libpng16-fiona-8ebcc106.so.16.35.0 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libproj-fiona-3ec30893.so.25.9.4.1 b/.venv/lib/python3.12/site-packages/fiona.libs/libproj-fiona-3ec30893.so.25.9.4.1 new file mode 100755 index 00000000..3a07136f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libproj-fiona-3ec30893.so.25.9.4.1 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libsqlite3-fiona-83998bda.so.0.8.6 b/.venv/lib/python3.12/site-packages/fiona.libs/libsqlite3-fiona-83998bda.so.0.8.6 new file mode 100755 index 00000000..53987593 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libsqlite3-fiona-83998bda.so.0.8.6 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libssl-fiona-6c758070.so.1.1 b/.venv/lib/python3.12/site-packages/fiona.libs/libssl-fiona-6c758070.so.1.1 new file mode 100755 index 00000000..3824e259 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libssl-fiona-6c758070.so.1.1 differ diff --git a/.venv/lib/python3.12/site-packages/fiona.libs/libtiff-fiona-c967de8d.so.5.7.0 b/.venv/lib/python3.12/site-packages/fiona.libs/libtiff-fiona-c967de8d.so.5.7.0 new file mode 100755 index 00000000..02ebc842 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona.libs/libtiff-fiona-c967de8d.so.5.7.0 differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__init__.py b/.venv/lib/python3.12/site-packages/fiona/__init__.py new file mode 100644 index 00000000..29711927 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/__init__.py @@ -0,0 +1,659 @@ +""" +Fiona is OGR's neat, nimble API. + +Fiona provides a minimal, uncomplicated Python interface to the open +source GIS community's most trusted geodata access library and +integrates readily with other Python GIS packages such as pyproj, Rtree +and Shapely. + +A Fiona feature is a Python mapping inspired by the GeoJSON format. It +has ``id``, ``geometry``, and ``properties`` attributes. The value of +``id`` is a string identifier unique within the feature's parent +collection. The ``geometry`` is another mapping with ``type`` and +``coordinates`` keys. The ``properties`` of a feature is another mapping +corresponding to its attribute table. + +Features are read and written using the ``Collection`` class. These +``Collection`` objects are a lot like Python ``file`` objects. A +``Collection`` opened in reading mode serves as an iterator over +features. One opened in a writing mode provides a ``write`` method. + +""" + +from contextlib import ExitStack +import glob +import logging +import os +from pathlib import Path +import platform +import warnings + +if platform.system() == "Windows": + _whl_dir = os.path.join(os.path.dirname(__file__), ".libs") + if os.path.exists(_whl_dir): + os.add_dll_directory(_whl_dir) + else: + if "PATH" in os.environ: + for p in os.environ["PATH"].split(os.pathsep): + if glob.glob(os.path.join(p, "gdal*.dll")): + os.add_dll_directory(os.path.abspath(p)) + + +from fiona._env import ( + calc_gdal_version_num, + get_gdal_release_name, + get_gdal_version_num, + get_gdal_version_tuple, +) +from fiona._env import driver_count +from fiona._path import _ParsedPath, _UnparsedPath, _parse_path, _vsi_path +from fiona._show_versions import show_versions +from fiona._vsiopener import _opener_registration +from fiona.collection import BytesCollection, Collection +from fiona.drvsupport import supported_drivers +from fiona.env import ensure_env_with_credentials, Env +from fiona.errors import FionaDeprecationWarning +from fiona.io import MemoryFile +from fiona.model import Feature, Geometry, Properties +from fiona.ogrext import _bounds, _listdir, _listlayers, _remove, _remove_layer +from fiona.schema import FIELD_TYPES_MAP, NAMED_FIELD_TYPES +from fiona.vfs import parse_paths as vfs_parse_paths + +# These modules are imported by fiona.ogrext, but are also import here to +# help tools like cx_Freeze find them automatically +from fiona import _geometry, _err, rfc3339 +import uuid + + +__all__ = [ + "Feature", + "Geometry", + "Properties", + "bounds", + "listlayers", + "listdir", + "open", + "prop_type", + "prop_width", + "remove", +] + +__version__ = "1.10.1" +__gdal_version__ = get_gdal_release_name() + +gdal_version = get_gdal_version_tuple() + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +@ensure_env_with_credentials +def open( + fp, + mode="r", + driver=None, + schema=None, + crs=None, + encoding=None, + layer=None, + vfs=None, + enabled_drivers=None, + crs_wkt=None, + ignore_fields=None, + ignore_geometry=False, + include_fields=None, + wkt_version=None, + allow_unsupported_drivers=False, + opener=None, + **kwargs +): + """Open a collection for read, append, or write + + In write mode, a driver name such as "ESRI Shapefile" or "GPX" (see + OGR docs or ``ogr2ogr --help`` on the command line) and a schema + mapping such as: + + {'geometry': 'Point', + 'properties': [('class', 'int'), ('label', 'str'), + ('value', 'float')]} + + must be provided. If a particular ordering of properties ("fields" + in GIS parlance) in the written file is desired, a list of (key, + value) pairs as above or an ordered dict is required. If no ordering + is needed, a standard dict will suffice. + + A coordinate reference system for collections in write mode can be + defined by the ``crs`` parameter. It takes Proj4 style mappings like + + {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', + 'no_defs': True} + + short hand strings like + + EPSG:4326 + + or WKT representations of coordinate reference systems. + + The drivers used by Fiona will try to detect the encoding of data + files. If they fail, you may provide the proper ``encoding``, such + as 'Windows-1252' for the original Natural Earth datasets. + + When the provided path is to a file containing multiple named layers + of data, a layer can be singled out by ``layer``. + + The drivers enabled for opening datasets may be restricted to those + listed in the ``enabled_drivers`` parameter. This and the ``driver`` + parameter afford much control over opening of files. + + # Trying only the GeoJSON driver when opening to read, the + # following raises ``DataIOError``: + fiona.open('example.shp', driver='GeoJSON') + + # Trying first the GeoJSON driver, then the Shapefile driver, + # the following succeeds: + fiona.open( + 'example.shp', enabled_drivers=['GeoJSON', 'ESRI Shapefile']) + + Some format drivers permit low-level filtering of fields. Specific + fields can be omitted by using the ``ignore_fields`` parameter. + Specific fields can be selected, excluding all others, by using the + ``include_fields`` parameter. + + Parameters + ---------- + fp : URI (str or pathlib.Path), or file-like object + A dataset resource identifier or file object. + mode : str + One of 'r', to read (the default); 'a', to append; or 'w', to + write. + driver : str + In 'w' mode a format driver name is required. In 'r' or 'a' + mode this parameter has no effect. + schema : dict + Required in 'w' mode, has no effect in 'r' or 'a' mode. + crs : str or dict + Required in 'w' mode, has no effect in 'r' or 'a' mode. + encoding : str + Name of the encoding used to encode or decode the dataset. + layer : int or str + The integer index or name of a layer in a multi-layer dataset. + vfs : str + This is a deprecated parameter. A URI scheme such as "zip://" + should be used instead. + enabled_drivers : list + An optional list of driver names to used when opening a + collection. + crs_wkt : str + An optional WKT representation of a coordinate reference + system. + ignore_fields : list[str], optional + List of field names to ignore on load. + include_fields : list[str], optional + List of a subset of field names to include on load. + ignore_geometry : bool + Ignore the geometry on load. + wkt_version : fiona.enums.WktVersion or str, optional + Version to use to for the CRS WKT. + Defaults to GDAL's default (WKT1_GDAL for GDAL 3). + allow_unsupported_drivers : bool + If set to true do not limit GDAL drivers to set set of known working. + opener : callable or obj, optional + A custom dataset opener which can serve GDAL's virtual + filesystem machinery via Python file-like objects. The + underlying file-like object is obtained by calling *opener* with + (*fp*, *mode*) or (*fp*, *mode* + "b") depending on the format + driver's native mode. *opener* must return a Python file-like + object that provides read, seek, tell, and close methods. Note: + only one opener at a time per fp, mode pair is allowed. + + Alternatively, opener may be a filesystem object from a package + like fsspec that provides the following methods: isdir(), + isfile(), ls(), mtime(), open(), and size(). The exact interface + is defined in the fiona._vsiopener._AbstractOpener class. + kwargs : mapping + Other driver-specific parameters that will be interpreted by + the OGR library as layer creation or opening options. + + Returns + ------- + Collection + + Raises + ------ + DriverError + When the selected format driver cannot provide requested + capabilities such as ignoring fields. + + """ + if mode == "r" and hasattr(fp, "read"): + memfile = MemoryFile(fp.read()) + colxn = memfile.open( + driver=driver, + crs=crs, + schema=schema, + layer=layer, + encoding=encoding, + ignore_fields=ignore_fields, + include_fields=include_fields, + ignore_geometry=ignore_geometry, + wkt_version=wkt_version, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + colxn._env.enter_context(memfile) + return colxn + + elif mode == "w" and hasattr(fp, "write"): + memfile = MemoryFile() + colxn = memfile.open( + driver=driver, + crs=crs, + schema=schema, + layer=layer, + encoding=encoding, + ignore_fields=ignore_fields, + include_fields=include_fields, + ignore_geometry=ignore_geometry, + wkt_version=wkt_version, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + colxn._env.enter_context(memfile) + + # For the writing case we push an extra callback onto the + # ExitStack. It ensures that the MemoryFile's contents are + # copied to the open file object. + def func(*args, **kwds): + memfile.seek(0) + fp.write(memfile.read()) + + colxn._env.callback(func) + return colxn + + elif mode == "a" and hasattr(fp, "write"): + raise OSError( + "Append mode is not supported for datasets in a Python file object." + ) + + # TODO: test for a shared base class or abstract type. + elif isinstance(fp, MemoryFile): + if mode.startswith("r"): + colxn = fp.open( + driver=driver, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + + # Note: FilePath does not support writing and an exception will + # result from this. + elif mode.startswith("w"): + colxn = fp.open( + driver=driver, + crs=crs, + schema=schema, + layer=layer, + encoding=encoding, + ignore_fields=ignore_fields, + include_fields=include_fields, + ignore_geometry=ignore_geometry, + wkt_version=wkt_version, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + return colxn + + # At this point, the fp argument is a string or path-like object + # which can be converted to a string. + else: + stack = ExitStack() + + if hasattr(fp, "path") and hasattr(fp, "fs"): + log.debug("Detected fp is an OpenFile: fp=%r", fp) + raw_dataset_path = fp.path + opener = fp.fs.open + else: + raw_dataset_path = os.fspath(fp) + + try: + if opener: + log.debug("Registering opener: raw_dataset_path=%r, opener=%r", raw_dataset_path, opener) + vsi_path_ctx = _opener_registration(raw_dataset_path, opener) + registered_vsi_path = stack.enter_context(vsi_path_ctx) + log.debug("Registered vsi path: registered_vsi_path=%r", registered_vsi_path) + path = _UnparsedPath(registered_vsi_path) + else: + if vfs: + warnings.warn( + "The vfs keyword argument is deprecated and will be removed in version 2.0.0. Instead, pass a URL that uses a zip or tar (for example) scheme.", + FionaDeprecationWarning, + stacklevel=2, + ) + path, scheme, archive = vfs_parse_paths(fp, vfs=vfs) + path = _ParsedPath(path, archive, scheme) + else: + path = _parse_path(fp) + + if mode in ("a", "r"): + colxn = Collection( + path, + mode, + driver=driver, + encoding=encoding, + layer=layer, + ignore_fields=ignore_fields, + include_fields=include_fields, + ignore_geometry=ignore_geometry, + wkt_version=wkt_version, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + elif mode == "w": + colxn = Collection( + path, + mode, + crs=crs, + driver=driver, + schema=schema, + encoding=encoding, + layer=layer, + ignore_fields=ignore_fields, + include_fields=include_fields, + ignore_geometry=ignore_geometry, + wkt_version=wkt_version, + enabled_drivers=enabled_drivers, + crs_wkt=crs_wkt, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + else: + raise ValueError("mode string must be one of {'r', 'w', 'a'}") + + except Exception: + stack.close() + raise + + colxn._env = stack + return colxn + + +collection = open + + +@ensure_env_with_credentials +def remove(path_or_collection, driver=None, layer=None, opener=None): + """Delete an OGR data source or one of its layers. + + If no layer is specified, the entire dataset and all of its layers + and associated sidecar files will be deleted. + + Parameters + ---------- + path_or_collection : str, pathlib.Path, or Collection + The target Collection or its path. + opener : callable or obj, optional + A custom dataset opener which can serve GDAL's virtual + filesystem machinery via Python file-like objects. The + underlying file-like object is obtained by calling *opener* with + (*fp*, *mode*) or (*fp*, *mode* + "b") depending on the format + driver's native mode. *opener* must return a Python file-like + object that provides read, seek, tell, and close methods. Note: + only one opener at a time per fp, mode pair is allowed. + + Alternatively, opener may be a filesystem object from a package + like fsspec that provides the following methods: isdir(), + isfile(), ls(), mtime(), open(), and size(). The exact interface + is defined in the fiona._vsiopener._AbstractOpener class. + driver : str, optional + The name of a driver to be used for deletion, optional. Can + usually be detected. + layer : str or int, optional + The name or index of a specific layer. + + Returns + ------- + None + + Raises + ------ + DatasetDeleteError + If the data source cannot be deleted. + + """ + if isinstance(path_or_collection, Collection): + collection = path_or_collection + raw_dataset_path = collection.path + driver = collection.driver + collection.close() + + else: + fp = path_or_collection + if hasattr(fp, "path") and hasattr(fp, "fs"): + log.debug("Detected fp is an OpenFile: fp=%r", fp) + raw_dataset_path = fp.path + opener = fp.fs.open + else: + raw_dataset_path = os.fspath(fp) + + if opener: + log.debug("Registering opener: raw_dataset_path=%r, opener=%r", raw_dataset_path, opener) + with _opener_registration(raw_dataset_path, opener) as registered_vsi_path: + log.debug("Registered vsi path: registered_vsi_path=%r", registered_vsi_path) + if layer is None: + _remove(registered_vsi_path, driver) + else: + _remove_layer(registered_vsi_path, layer, driver) + else: + pobj = _parse_path(raw_dataset_path) + if layer is None: + _remove(_vsi_path(pobj), driver) + else: + _remove_layer(_vsi_path(pobj), layer, driver) + + +@ensure_env_with_credentials +def listdir(fp, opener=None): + """Lists the datasets in a directory or archive file. + + Archive files must be prefixed like "zip://" or "tar://". + + Parameters + ---------- + fp : str or pathlib.Path + Directory or archive path. + opener : callable or obj, optional + A custom dataset opener which can serve GDAL's virtual + filesystem machinery via Python file-like objects. The + underlying file-like object is obtained by calling *opener* with + (*fp*, *mode*) or (*fp*, *mode* + "b") depending on the format + driver's native mode. *opener* must return a Python file-like + object that provides read, seek, tell, and close methods. Note: + only one opener at a time per fp, mode pair is allowed. + + Alternatively, opener may be a filesystem object from a package + like fsspec that provides the following methods: isdir(), + isfile(), ls(), mtime(), open(), and size(). The exact interface + is defined in the fiona._vsiopener._AbstractOpener class. + + Returns + ------- + list of str + A list of datasets. + + Raises + ------ + TypeError + If the input is not a str or Path. + + """ + if hasattr(fp, "path") and hasattr(fp, "fs"): + log.debug("Detected fp is an OpenFile: fp=%r", fp) + raw_dataset_path = fp.path + opener = fp.fs.open + else: + raw_dataset_path = os.fspath(fp) + + if opener: + log.debug("Registering opener: raw_dataset_path=%r, opener=%r", raw_dataset_path, opener) + with _opener_registration(raw_dataset_path, opener) as registered_vsi_path: + log.debug("Registered vsi path: registered_vsi_path=%r", registered_vsi_path) + return _listdir(registered_vsi_path) + else: + pobj = _parse_path(raw_dataset_path) + return _listdir(_vsi_path(pobj)) + + +@ensure_env_with_credentials +def listlayers(fp, opener=None, vfs=None, **kwargs): + """Lists the layers (collections) in a dataset. + + Archive files must be prefixed like "zip://" or "tar://". + + Parameters + ---------- + fp : str, pathlib.Path, or file-like object + A dataset identifier or file object containing a dataset. + opener : callable or obj, optional + A custom dataset opener which can serve GDAL's virtual + filesystem machinery via Python file-like objects. The + underlying file-like object is obtained by calling *opener* with + (*fp*, *mode*) or (*fp*, *mode* + "b") depending on the format + driver's native mode. *opener* must return a Python file-like + object that provides read, seek, tell, and close methods. Note: + only one opener at a time per fp, mode pair is allowed. + + Alternatively, opener may be a filesystem object from a package + like fsspec that provides the following methods: isdir(), + isfile(), ls(), mtime(), open(), and size(). The exact interface + is defined in the fiona._vsiopener._AbstractOpener class. + vfs : str + This is a deprecated parameter. A URI scheme such as "zip://" + should be used instead. + kwargs : dict + Dataset opening options and other keyword args. + + Returns + ------- + list of str + A list of layer name strings. + + Raises + ------ + TypeError + If the input is not a str, Path, or file object. + + """ + if vfs and not isinstance(vfs, str): + raise TypeError(f"invalid vfs: {vfs!r}") + + if hasattr(fp, 'read'): + with MemoryFile(fp.read()) as memfile: + return _listlayers(memfile.name, **kwargs) + + if hasattr(fp, "path") and hasattr(fp, "fs"): + log.debug("Detected fp is an OpenFile: fp=%r", fp) + raw_dataset_path = fp.path + opener = fp.fs.open + else: + raw_dataset_path = os.fspath(fp) + + if opener: + log.debug("Registering opener: raw_dataset_path=%r, opener=%r", raw_dataset_path, opener) + with _opener_registration(raw_dataset_path, opener) as registered_vsi_path: + log.debug("Registered vsi path: registered_vsi_path=%r", registered_vsi_path) + return _listlayers(registered_vsi_path, **kwargs) + else: + if vfs: + warnings.warn( + "The vfs keyword argument is deprecated and will be removed in 2.0. " + "Instead, pass a URL that uses a zip or tar (for example) scheme.", + FionaDeprecationWarning, + stacklevel=2, + ) + pobj_vfs = _parse_path(vfs) + pobj_path = _parse_path(raw_dataset_path) + pobj = _ParsedPath(pobj_path.path, pobj_vfs.path, pobj_vfs.scheme) + else: + pobj = _parse_path(raw_dataset_path) + + return _listlayers(_vsi_path(pobj), **kwargs) + + +def prop_width(val): + """Returns the width of a str type property. + + Undefined for non-str properties. + + Parameters + ---------- + val : str + A type:width string from a collection schema. + + Returns + ------- + int or None + + Examples + -------- + >>> prop_width('str:25') + 25 + >>> prop_width('str') + 80 + + """ + if val.startswith('str'): + return int((val.split(":")[1:] or ["80"])[0]) + return None + + +def prop_type(text): + """Returns a schema property's proper Python type. + + Parameters + ---------- + text : str + A type name, with or without width. + + Returns + ------- + obj + A Python class. + + Examples + -------- + >>> prop_type('int') + + >>> prop_type('str:25') + + + """ + key = text.split(':')[0] + return NAMED_FIELD_TYPES[key].type + + +def drivers(*args, **kwargs): + """Returns a context manager with registered drivers. + + DEPRECATED + """ + warnings.warn("Use fiona.Env() instead.", FionaDeprecationWarning, stacklevel=2) + + if driver_count == 0: + log.debug("Creating a chief GDALEnv in drivers()") + return Env(**kwargs) + else: + log.debug("Creating a not-responsible GDALEnv in drivers()") + return Env(**kwargs) + + +def bounds(ob): + """Returns a (minx, miny, maxx, maxy) bounding box. + + The ``ob`` may be a feature record or geometry.""" + geom = ob.get('geometry') or ob + return _bounds(geom) diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..2a67a43b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/_path.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/_path.cpython-312.pyc new file mode 100644 index 00000000..712f7646 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/_path.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/_show_versions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/_show_versions.cpython-312.pyc new file mode 100644 index 00000000..62fa904a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/_show_versions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/abc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/abc.cpython-312.pyc new file mode 100644 index 00000000..91d8fce1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/abc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/collection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/collection.cpython-312.pyc new file mode 100644 index 00000000..4ce9f9ae Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/collection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/compat.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/compat.cpython-312.pyc new file mode 100644 index 00000000..dcb07f3d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/compat.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/drvsupport.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/drvsupport.cpython-312.pyc new file mode 100644 index 00000000..85e5ad9f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/drvsupport.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/enums.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/enums.cpython-312.pyc new file mode 100644 index 00000000..1129902e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/enums.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/env.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/env.cpython-312.pyc new file mode 100644 index 00000000..26ea8b12 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/env.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/errors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/errors.cpython-312.pyc new file mode 100644 index 00000000..7313823f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/errors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/features.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/features.cpython-312.pyc new file mode 100644 index 00000000..c71689bb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/features.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/inspector.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/inspector.cpython-312.pyc new file mode 100644 index 00000000..3c6b134b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/inspector.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/io.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/io.cpython-312.pyc new file mode 100644 index 00000000..af278d7e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/io.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/logutils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/logutils.cpython-312.pyc new file mode 100644 index 00000000..4b85f8a3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/logutils.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/meta.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/meta.cpython-312.pyc new file mode 100644 index 00000000..9cb18c3f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/meta.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/model.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/model.cpython-312.pyc new file mode 100644 index 00000000..6c579686 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/model.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/path.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/path.cpython-312.pyc new file mode 100644 index 00000000..781e0086 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/path.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/rfc3339.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/rfc3339.cpython-312.pyc new file mode 100644 index 00000000..1c187669 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/rfc3339.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/session.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/session.cpython-312.pyc new file mode 100644 index 00000000..fe229ec9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/session.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/transform.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/transform.cpython-312.pyc new file mode 100644 index 00000000..27ff8936 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/transform.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/__pycache__/vfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/__pycache__/vfs.cpython-312.pyc new file mode 100644 index 00000000..09ecceb7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/__pycache__/vfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_cpl.pxd b/.venv/lib/python3.12/site-packages/fiona/_cpl.pxd new file mode 100644 index 00000000..609ee410 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_cpl.pxd @@ -0,0 +1,24 @@ +# Cross-platform API functions. + +cdef extern from "cpl_conv.h": + void * CPLMalloc (size_t) + void CPLFree (void *ptr) + void CPLSetThreadLocalConfigOption (char *key, char *val) + const char *CPLGetConfigOption (char *, char *) + +cdef extern from "cpl_vsi.h": + ctypedef struct VSILFILE: + pass + int VSIFCloseL (VSILFILE *) + VSILFILE * VSIFileFromMemBuffer (const char * filename, + unsigned char * data, + int data_len, + int take_ownership) + int VSIUnlink (const char * pathname) + +ctypedef int OGRErr +ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY diff --git a/.venv/lib/python3.12/site-packages/fiona/_csl.pxd b/.venv/lib/python3.12/site-packages/fiona/_csl.pxd new file mode 100644 index 00000000..e4f3561e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_csl.pxd @@ -0,0 +1,6 @@ +# String API functions. + +cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, char *name, char *value) + char ** CSLSetNameValue (char **list, char *name, char *value) + void CSLDestroy (char **list) diff --git a/.venv/lib/python3.12/site-packages/fiona/_env.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/_env.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..a1ece8c6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_env.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_env.pxd b/.venv/lib/python3.12/site-packages/fiona/_env.pxd new file mode 100644 index 00000000..4992f9c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_env.pxd @@ -0,0 +1,12 @@ +include "gdal.pxi" + + +cdef class ConfigEnv(object): + cdef public object options + + +cdef class GDALEnv(ConfigEnv): + cdef public object _have_registered_drivers + + +cdef _safe_osr_release(OGRSpatialReferenceH srs) diff --git a/.venv/lib/python3.12/site-packages/fiona/_err.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/_err.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..051a5436 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_err.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_err.pxd b/.venv/lib/python3.12/site-packages/fiona/_err.pxd new file mode 100644 index 00000000..28229751 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_err.pxd @@ -0,0 +1,14 @@ +include "gdal.pxi" + +from libc.stdio cimport * + +cdef get_last_error_msg() +cdef int exc_wrap_int(int retval) except -1 +cdef OGRErr exc_wrap_ogrerr(OGRErr retval) except -1 +cdef void *exc_wrap_pointer(void *ptr) except NULL +cdef VSILFILE *exc_wrap_vsilfile(VSILFILE *f) except NULL + +cdef class StackChecker: + cdef object error_stack + cdef int exc_wrap_int(self, int retval) except -1 + cdef void *exc_wrap_pointer(self, void *ptr) except NULL diff --git a/.venv/lib/python3.12/site-packages/fiona/_geometry.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/_geometry.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..685c8808 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_geometry.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_geometry.pxd b/.venv/lib/python3.12/site-packages/fiona/_geometry.pxd new file mode 100644 index 00000000..82ba5e9c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_geometry.pxd @@ -0,0 +1,144 @@ +# Geometry API functions. + +ctypedef int OGRErr + + +cdef extern from "ogr_core.h": + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + +ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + +cdef extern from "ogr_api.h": + OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) + void OGR_G_AddPoint (void *geometry, double x, double y, double z) + void OGR_G_AddPoint_2D (void *geometry, double x, double y) + void OGR_G_CloseRings (void *geometry) + void * OGR_G_CreateGeometry (OGRwkbGeometryType wkbtypecode) + void OGR_G_DestroyGeometry (void *geometry) + unsigned char * OGR_G_ExportToJson (void *geometry) + void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) + int OGR_G_GetCoordinateDimension (void *geometry) + int OGR_G_GetGeometryCount (void *geometry) + unsigned char * OGR_G_GetGeometryName (void *geometry) + int OGR_G_GetGeometryType (void *geometry) + void * OGR_G_GetGeometryRef (void *geometry, int n) + int OGR_G_GetPointCount (void *geometry) + double OGR_G_GetX (void *geometry, int n) + double OGR_G_GetY (void *geometry, int n) + double OGR_G_GetZ (void *geometry, int n) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + int OGR_G_WkbSize (void *geometry) + + +cdef class GeomBuilder: + cdef object ndims + cdef list _buildCoords(self, void *geom) + cdef dict _buildPoint(self, void *geom) + cdef dict _buildLineString(self, void *geom) + cdef dict _buildLinearRing(self, void *geom) + cdef list _buildParts(self, void *geom) + cdef dict _buildPolygon(self, void *geom) + cdef dict _buildMultiPoint(self, void *geom) + cdef dict _buildMultiLineString(self, void *geom) + cdef dict _buildMultiPolygon(self, void *geom) + cdef dict _buildGeometryCollection(self, void *geom) + cdef object build_from_feature(self, void *feature) + cdef object build(self, void *geom) + cpdef build_wkb(self, object wkb) + + +cdef class OGRGeomBuilder: + cdef void * _createOgrGeometry(self, int geom_type) except NULL + cdef _addPointToGeometry(self, void *cogr_geometry, object coordinate) + cdef void * _buildPoint(self, object coordinates) except NULL + cdef void * _buildLineString(self, object coordinates) except NULL + cdef void * _buildLinearRing(self, object coordinates) except NULL + cdef void * _buildPolygon(self, object coordinates) except NULL + cdef void * _buildMultiPoint(self, object coordinates) except NULL + cdef void * _buildMultiLineString(self, object coordinates) except NULL + cdef void * _buildMultiPolygon(self, object coordinates) except NULL + cdef void * _buildGeometryCollection(self, object coordinates) except NULL + cdef void * build(self, object geom) except NULL + + +cdef unsigned int geometry_type_code(object name) except? 9999 +cdef object normalize_geometry_type_code(unsigned int code) +cdef unsigned int base_geometry_type_code(unsigned int code) + diff --git a/.venv/lib/python3.12/site-packages/fiona/_path.py b/.venv/lib/python3.12/site-packages/fiona/_path.py new file mode 100644 index 00000000..24dbf860 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_path.py @@ -0,0 +1,215 @@ +"""Dataset paths, identifiers, and filenames + +Note: this module is not part of Rasterio's API. It is for internal use +only. + +""" + +import os +import pathlib +import re +import sys +from urllib.parse import urlparse + +import attr + +from fiona.errors import PathError + +# Supported URI schemes and their mapping to GDAL's VSI suffix. +# TODO: extend for other cloud platforms. +SCHEMES = { + 'ftp': 'curl', + 'gzip': 'gzip', + 'http': 'curl', + 'https': 'curl', + 's3': 's3', + 'tar': 'tar', + 'zip': 'zip', + 'file': 'file', + 'oss': 'oss', + 'gs': 'gs', + 'az': 'az', +} + +ARCHIVESCHEMES = set +CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) + +# TODO: extend for other cloud platforms. +REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'oss', 'gs', 'az',)]) + + +class _Path: + """Base class for dataset paths""" + + def as_vsi(self): + return _vsi_path(self) + + +@attr.s(slots=True) +class _ParsedPath(_Path): + """Result of parsing a dataset URI/Path + + Attributes + ---------- + path : str + Parsed path. Includes the hostname and query string in the case + of a URI. + archive : str + Parsed archive path. + scheme : str + URI scheme such as "https" or "zip+s3". + """ + path = attr.ib() + archive = attr.ib() + scheme = attr.ib() + + @classmethod + def from_uri(cls, uri): + parts = urlparse(uri) + if sys.platform == "win32" and re.match(r"^[a-zA-Z]\:", parts.netloc): + parsed_path = f"{parts.netloc}{parts.path}" + parsed_netloc = None + else: + parsed_path = parts.path + parsed_netloc = parts.netloc + + path = parsed_path + scheme = parts.scheme or None + + if parts.query: + path += "?" + parts.query + + if scheme and scheme.startswith(("gzip", "tar", "zip")): + path_parts = path.split('!') + path = path_parts.pop() if path_parts else None + archive = path_parts.pop() if path_parts else None + else: + archive = None + + if scheme and parsed_netloc: + if archive: + archive = parsed_netloc + archive + else: + path = parsed_netloc + path + + return _ParsedPath(path, archive, scheme) + + @property + def name(self): + """The parsed path's original URI""" + if not self.scheme: + return self.path + elif self.archive: + return "{}://{}!{}".format(self.scheme, self.archive, self.path) + else: + return "{}://{}".format(self.scheme, self.path) + + @property + def is_remote(self): + """Test if the path is a remote, network URI""" + return bool(self.scheme) and self.scheme.split("+")[-1] in REMOTESCHEMES + + @property + def is_local(self): + """Test if the path is a local URI""" + return not self.scheme or (self.scheme and self.scheme.split('+')[-1] not in REMOTESCHEMES) + + +@attr.s(slots=True) +class _UnparsedPath(_Path): + """Encapsulates legacy GDAL filenames + + Attributes + ---------- + path : str + The legacy GDAL filename. + """ + path = attr.ib() + + @property + def name(self): + """The unparsed path's original path""" + return self.path + + +def _parse_path(path): + """Parse a dataset's identifier or path into its parts + + Parameters + ---------- + path : str or path-like object + The path to be parsed. + + Returns + ------- + ParsedPath or UnparsedPath + + Notes + ----- + When legacy GDAL filenames are encountered, they will be returned + in a UnparsedPath. + + """ + if isinstance(path, _Path): + return path + elif isinstance(path, pathlib.PurePath): + return _ParsedPath(os.fspath(path), None, None) + elif isinstance(path, str): + if sys.platform == "win32" and re.match(r"^[a-zA-Z]\:", path): + return _ParsedPath(path, None, None) + elif path.startswith('/vsi'): + return _UnparsedPath(path) + else: + parts = urlparse(path) + else: + raise PathError("invalid path '{!r}'".format(path)) + + # if the scheme is not one of Rasterio's supported schemes, we + # return an UnparsedPath. + if parts.scheme: + if all(p in SCHEMES for p in parts.scheme.split('+')): + return _ParsedPath.from_uri(path) + + return _UnparsedPath(path) + + +def _vsi_path(path): + """Convert a parsed path to a GDAL VSI path + + Parameters + ---------- + path : Path + A ParsedPath or UnparsedPath object. + + Returns + ------- + str + + """ + if isinstance(path, _UnparsedPath): + return path.path + + elif isinstance(path, _ParsedPath): + + if not path.scheme: + return path.path + + else: + if path.scheme.split('+')[-1] in CURLSCHEMES: + suffix = '{}://'.format(path.scheme.split('+')[-1]) + else: + suffix = '' + + prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in path.scheme.split('+') if p != 'file') + + if prefix: + if path.archive: + result = '/{}/{}{}/{}'.format(prefix, suffix, path.archive, path.path.lstrip('/')) + else: + result = '/{}/{}{}'.format(prefix, suffix, path.path) + else: + result = path.path + return result + + else: + raise ValueError("path must be a ParsedPath or UnparsedPath object") diff --git a/.venv/lib/python3.12/site-packages/fiona/_show_versions.py b/.venv/lib/python3.12/site-packages/fiona/_show_versions.py new file mode 100644 index 00000000..ecbfc34a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_show_versions.py @@ -0,0 +1,19 @@ +import platform +import sys + +import fiona +from fiona._env import get_gdal_release_name, get_proj_version_tuple + + +def show_versions(): + """ + Prints information useful for bug reports + """ + + print("Fiona version:", fiona.__version__) + print("GDAL version:", get_gdal_release_name()) + print("PROJ version:", ".".join(map(str, get_proj_version_tuple()))) + print() + print("OS:", platform.system(), platform.release()) + print("Python:", platform.python_version()) + print("Python executable:", sys.executable) diff --git a/.venv/lib/python3.12/site-packages/fiona/_transform.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/_transform.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..51305f95 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_transform.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_vendor/__pycache__/snuggs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/_vendor/__pycache__/snuggs.cpython-312.pyc new file mode 100644 index 00000000..4c5045c0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_vendor/__pycache__/snuggs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_vendor/munch/__init__.py b/.venv/lib/python3.12/site-packages/fiona/_vendor/munch/__init__.py new file mode 100644 index 00000000..f43e1292 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_vendor/munch/__init__.py @@ -0,0 +1,534 @@ +""" Munch is a subclass of dict with attribute-style access. + + >>> b = Munch() + >>> b.hello = 'world' + >>> b.hello + 'world' + >>> b['hello'] += "!" + >>> b.hello + 'world!' + >>> b.foo = Munch(lol=True) + >>> b.foo.lol + True + >>> b.foo is b['foo'] + True + + It is safe to import * from this module: + + __all__ = ('Munch', 'munchify','unmunchify') + + un/munchify provide dictionary conversion; Munches can also be + converted via Munch.to/fromDict(). +""" + +from collections.abc import Mapping + +__version__ = "2.5.0" +VERSION = tuple(map(int, __version__.split('.')[:3])) + +__all__ = ('Munch', 'munchify', 'DefaultMunch', 'DefaultFactoryMunch', 'unmunchify') + + + +class Munch(dict): + """ A dictionary that provides attribute-style access. + + >>> b = Munch() + >>> b.hello = 'world' + >>> b.hello + 'world' + >>> b['hello'] += "!" + >>> b.hello + 'world!' + >>> b.foo = Munch(lol=True) + >>> b.foo.lol + True + >>> b.foo is b['foo'] + True + + A Munch is a subclass of dict; it supports all the methods a dict does... + + >>> sorted(b.keys()) + ['foo', 'hello'] + + Including update()... + + >>> b.update({ 'ponies': 'are pretty!' }, hello=42) + >>> print (repr(b)) + Munch({'ponies': 'are pretty!', 'foo': Munch({'lol': True}), 'hello': 42}) + + As well as iteration... + + >>> sorted([ (k,b[k]) for k in b ]) + [('foo', Munch({'lol': True})), ('hello', 42), ('ponies', 'are pretty!')] + + And "splats". + + >>> "The {knights} who say {ni}!".format(**Munch(knights='lolcats', ni='can haz')) + 'The lolcats who say can haz!' + + See unmunchify/Munch.toDict, munchify/Munch.fromDict for notes about conversion. + """ + def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called + self.update(*args, **kwargs) + + # only called if k not found in normal places + def __getattr__(self, k): + """ Gets key if it exists, otherwise throws AttributeError. + + nb. __getattr__ is only called if key is not found in normal places. + + >>> b = Munch(bar='baz', lol={}) + >>> b.foo + Traceback (most recent call last): + ... + AttributeError: foo + + >>> b.bar + 'baz' + >>> getattr(b, 'bar') + 'baz' + >>> b['bar'] + 'baz' + + >>> b.lol is b['lol'] + True + >>> b.lol is getattr(b, 'lol') + True + """ + try: + # Throws exception if not in prototype chain + return object.__getattribute__(self, k) + except AttributeError: + try: + return self[k] + except KeyError: + raise AttributeError(k) + + def __setattr__(self, k, v): + """ Sets attribute k if it exists, otherwise sets key k. A KeyError + raised by set-item (only likely if you subclass Munch) will + propagate as an AttributeError instead. + + >>> b = Munch(foo='bar', this_is='useful when subclassing') + >>> hasattr(b.values, '__call__') + True + >>> b.values = 'uh oh' + >>> b.values + 'uh oh' + >>> b['values'] + Traceback (most recent call last): + ... + KeyError: 'values' + """ + try: + # Throws exception if not in prototype chain + object.__getattribute__(self, k) + except AttributeError: + try: + self[k] = v + except: + raise AttributeError(k) + else: + object.__setattr__(self, k, v) + + def __delattr__(self, k): + """ Deletes attribute k if it exists, otherwise deletes key k. A KeyError + raised by deleting the key--such as when the key is missing--will + propagate as an AttributeError instead. + + >>> b = Munch(lol=42) + >>> del b.lol + >>> b.lol + Traceback (most recent call last): + ... + AttributeError: lol + """ + try: + # Throws exception if not in prototype chain + object.__getattribute__(self, k) + except AttributeError: + try: + del self[k] + except KeyError: + raise AttributeError(k) + else: + object.__delattr__(self, k) + + def toDict(self): + """ Recursively converts a munch back into a dictionary. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> sorted(b.toDict().items()) + [('foo', {'lol': True}), ('hello', 42), ('ponies', 'are pretty!')] + + See unmunchify for more info. + """ + return unmunchify(self) + + @property + def __dict__(self): + return self.toDict() + + def __repr__(self): + """ Invertible* string-form of a Munch. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> print (repr(b)) + Munch({'ponies': 'are pretty!', 'foo': Munch({'lol': True}), 'hello': 42}) + >>> eval(repr(b)) + Munch({'ponies': 'are pretty!', 'foo': Munch({'lol': True}), 'hello': 42}) + + >>> with_spaces = Munch({1: 2, 'a b': 9, 'c': Munch({'simple': 5})}) + >>> print (repr(with_spaces)) + Munch({'a b': 9, 1: 2, 'c': Munch({'simple': 5})}) + >>> eval(repr(with_spaces)) + Munch({'a b': 9, 1: 2, 'c': Munch({'simple': 5})}) + + (*) Invertible so long as collection contents are each repr-invertible. + """ + return '{0}({1})'.format(self.__class__.__name__, dict.__repr__(self)) + + def __dir__(self): + return list(self.keys()) + + def __getstate__(self): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + return {k: v for k, v in self.items()} + + def __setstate__(self, state): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + self.clear() + self.update(state) + + __members__ = __dir__ # for python2.x compatibility + + @classmethod + def fromDict(cls, d): + """ Recursively transforms a dictionary into a Munch via copy. + + >>> b = Munch.fromDict({'urmom': {'sez': {'what': 'what'}}}) + >>> b.urmom.sez.what + 'what' + + See munchify for more info. + """ + return munchify(d, cls) + + def copy(self): + return type(self).fromDict(self) + + def update(self, *args, **kwargs): + """ + Override built-in method to call custom __setitem__ method that may + be defined in subclasses. + """ + for k, v in dict(*args, **kwargs).items(): + self[k] = v + + def get(self, k, d=None): + """ + D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. + """ + if k not in self: + return d + return self[k] + + def setdefault(self, k, d=None): + """ + D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + """ + if k not in self: + self[k] = d + return self[k] + + +class AutoMunch(Munch): + def __setattr__(self, k, v): + """ Works the same as Munch.__setattr__ but if you supply + a dictionary as value it will convert it to another Munch. + """ + if isinstance(v, Mapping) and not isinstance(v, (AutoMunch, Munch)): + v = munchify(v, AutoMunch) + super(AutoMunch, self).__setattr__(k, v) + + +class DefaultMunch(Munch): + """ + A Munch that returns a user-specified value for missing keys. + """ + + def __init__(self, *args, **kwargs): + """ Construct a new DefaultMunch. Like collections.defaultdict, the + first argument is the default value; subsequent arguments are the + same as those for dict. + """ + # Mimic collections.defaultdict constructor + if args: + default = args[0] + args = args[1:] + else: + default = None + super(DefaultMunch, self).__init__(*args, **kwargs) + self.__default__ = default + + def __getattr__(self, k): + """ Gets key if it exists, otherwise returns the default value.""" + try: + return super(DefaultMunch, self).__getattr__(k) + except AttributeError: + return self.__default__ + + def __setattr__(self, k, v): + if k == '__default__': + object.__setattr__(self, k, v) + else: + super(DefaultMunch, self).__setattr__(k, v) + + def __getitem__(self, k): + """ Gets key if it exists, otherwise returns the default value.""" + try: + return super(DefaultMunch, self).__getitem__(k) + except KeyError: + return self.__default__ + + def __getstate__(self): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + return (self.__default__, {k: v for k, v in self.items()}) + + def __setstate__(self, state): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + self.clear() + default, state_dict = state + self.update(state_dict) + self.__default__ = default + + @classmethod + def fromDict(cls, d, default=None): + # pylint: disable=arguments-differ + return munchify(d, factory=lambda d_: cls(default, d_)) + + def copy(self): + return type(self).fromDict(self, default=self.__default__) + + def __repr__(self): + return '{0}({1!r}, {2})'.format( + type(self).__name__, self.__undefined__, dict.__repr__(self)) + + +class DefaultFactoryMunch(Munch): + """ A Munch that calls a user-specified function to generate values for + missing keys like collections.defaultdict. + + >>> b = DefaultFactoryMunch(list, {'hello': 'world!'}) + >>> b.hello + 'world!' + >>> b.foo + [] + >>> b.bar.append('hello') + >>> b.bar + ['hello'] + """ + + def __init__(self, default_factory, *args, **kwargs): + super(DefaultFactoryMunch, self).__init__(*args, **kwargs) + self.default_factory = default_factory + + @classmethod + def fromDict(cls, d, default_factory): + # pylint: disable=arguments-differ + return munchify(d, factory=lambda d_: cls(default_factory, d_)) + + def copy(self): + return type(self).fromDict(self, default_factory=self.default_factory) + + def __repr__(self): + factory = self.default_factory.__name__ + return '{0}({1}, {2})'.format( + type(self).__name__, factory, dict.__repr__(self)) + + def __setattr__(self, k, v): + if k == 'default_factory': + object.__setattr__(self, k, v) + else: + super(DefaultFactoryMunch, self).__setattr__(k, v) + + def __missing__(self, k): + self[k] = self.default_factory() + return self[k] + + +# While we could convert abstract types like Mapping or Iterable, I think +# munchify is more likely to "do what you mean" if it is conservative about +# casting (ex: isinstance(str,Iterable) == True ). +# +# Should you disagree, it is not difficult to duplicate this function with +# more aggressive coercion to suit your own purposes. + +def munchify(x, factory=Munch): + """ Recursively transforms a dictionary into a Munch via copy. + + >>> b = munchify({'urmom': {'sez': {'what': 'what'}}}) + >>> b.urmom.sez.what + 'what' + + munchify can handle intermediary dicts, lists and tuples (as well as + their subclasses), but ymmv on custom datatypes. + + >>> b = munchify({ 'lol': ('cats', {'hah':'i win again'}), + ... 'hello': [{'french':'salut', 'german':'hallo'}] }) + >>> b.hello[0].french + 'salut' + >>> b.lol[1].hah + 'i win again' + + nb. As dicts are not hashable, they cannot be nested in sets/frozensets. + """ + # Munchify x, using `seen` to track object cycles + seen = dict() + + def munchify_cycles(obj): + # If we've already begun munchifying obj, just return the already-created munchified obj + try: + return seen[id(obj)] + except KeyError: + pass + + # Otherwise, first partly munchify obj (but without descending into any lists or dicts) and save that + seen[id(obj)] = partial = pre_munchify(obj) + # Then finish munchifying lists and dicts inside obj (reusing munchified obj if cycles are encountered) + return post_munchify(partial, obj) + + def pre_munchify(obj): + # Here we return a skeleton of munchified obj, which is enough to save for later (in case + # we need to break cycles) but it needs to filled out in post_munchify + if isinstance(obj, Mapping): + return factory({}) + elif isinstance(obj, list): + return type(obj)() + elif isinstance(obj, tuple): + type_factory = getattr(obj, "_make", type(obj)) + return type_factory(munchify_cycles(item) for item in obj) + else: + return obj + + def post_munchify(partial, obj): + # Here we finish munchifying the parts of obj that were deferred by pre_munchify because they + # might be involved in a cycle + if isinstance(obj, Mapping): + partial.update((k, munchify_cycles(obj[k])) for k in obj.keys()) + elif isinstance(obj, list): + partial.extend(munchify_cycles(item) for item in obj) + elif isinstance(obj, tuple): + for (item_partial, item) in zip(partial, obj): + post_munchify(item_partial, item) + + return partial + + return munchify_cycles(x) + + +def unmunchify(x): + """ Recursively converts a Munch into a dictionary. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> sorted(unmunchify(b).items()) + [('foo', {'lol': True}), ('hello', 42), ('ponies', 'are pretty!')] + + unmunchify will handle intermediary dicts, lists and tuples (as well as + their subclasses), but ymmv on custom datatypes. + + >>> b = Munch(foo=['bar', Munch(lol=True)], hello=42, + ... ponies=('are pretty!', Munch(lies='are trouble!'))) + >>> sorted(unmunchify(b).items()) #doctest: +NORMALIZE_WHITESPACE + [('foo', ['bar', {'lol': True}]), ('hello', 42), ('ponies', ('are pretty!', {'lies': 'are trouble!'}))] + + nb. As dicts are not hashable, they cannot be nested in sets/frozensets. + """ + + # Munchify x, using `seen` to track object cycles + seen = dict() + + def unmunchify_cycles(obj): + # If we've already begun unmunchifying obj, just return the already-created unmunchified obj + try: + return seen[id(obj)] + except KeyError: + pass + + # Otherwise, first partly unmunchify obj (but without descending into any lists or dicts) and save that + seen[id(obj)] = partial = pre_unmunchify(obj) + # Then finish unmunchifying lists and dicts inside obj (reusing unmunchified obj if cycles are encountered) + return post_unmunchify(partial, obj) + + def pre_unmunchify(obj): + # Here we return a skeleton of unmunchified obj, which is enough to save for later (in case + # we need to break cycles) but it needs to filled out in post_unmunchify + if isinstance(obj, Mapping): + return dict() + elif isinstance(obj, list): + return type(obj)() + elif isinstance(obj, tuple): + type_factory = getattr(obj, "_make", type(obj)) + return type_factory(unmunchify_cycles(item) for item in obj) + else: + return obj + + def post_unmunchify(partial, obj): + # Here we finish unmunchifying the parts of obj that were deferred by pre_unmunchify because they + # might be involved in a cycle + if isinstance(obj, Mapping): + partial.update((k, unmunchify_cycles(obj[k])) for k in obj.keys()) + elif isinstance(obj, list): + partial.extend(unmunchify_cycles(v) for v in obj) + elif isinstance(obj, tuple): + for (value_partial, value) in zip(partial, obj): + post_unmunchify(value_partial, value) + + return partial + + return unmunchify_cycles(x) + + +# Serialization + +try: + try: + import json + except ImportError: + import simplejson as json + + def toJSON(self, **options): + """ Serializes this Munch to JSON. Accepts the same keyword options as `json.dumps()`. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> json.dumps(b) == b.toJSON() + True + """ + return json.dumps(self, **options) + + def fromJSON(cls, stream, *args, **kwargs): + """ Deserializes JSON to Munch or any of its subclasses. + """ + factory = lambda d: cls(*(args + (d,)), **kwargs) + return munchify(json.loads(stream), factory=factory) + + Munch.toJSON = toJSON + Munch.fromJSON = classmethod(fromJSON) + +except ImportError: + pass + + diff --git a/.venv/lib/python3.12/site-packages/fiona/_vendor/munch/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/_vendor/munch/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..28732613 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_vendor/munch/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_vendor/snuggs.py b/.venv/lib/python3.12/site-packages/fiona/_vendor/snuggs.py new file mode 100644 index 00000000..638f3979 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_vendor/snuggs.py @@ -0,0 +1,298 @@ +"""Snuggs are s-expressions for Numpy.""" + +# This file is a modified version of snuggs 1.4.7. The numpy +# requirement has been removed and support for keyword arguments in +# expressions has been added. +# +# The original license follows. +# +# Copyright (c) 2014 Mapbox +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from collections import OrderedDict +import functools +import operator +import re +from typing import Mapping + +from pyparsing import ( # type: ignore + Keyword, + oneOf, + Literal, + QuotedString, + ParseException, + Forward, + Group, + OneOrMore, + ParseResults, + Regex, + ZeroOrMore, + alphanums, + pyparsing_common, + replace_with, +) + +__all__ = ["eval"] +__version__ = "1.4.7" + + +class Context(object): + def __init__(self): + self._data = OrderedDict() + + def add(self, name, val): + self._data[name] = val + + def get(self, name): + return self._data[name] + + def lookup(self, index, subindex=None): + s = list(self._data.values())[int(index) - 1] + if subindex: + return s[int(subindex) - 1] + else: + return s + + def clear(self): + self._data = OrderedDict() + + +_ctx = Context() + + +class ctx(object): + def __init__(self, kwd_dict=None, **kwds): + self.kwds = kwd_dict or kwds + + def __enter__(self): + _ctx.clear() + for k, v in self.kwds.items(): + _ctx.add(k, v) + return self + + def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): + self.kwds = None + _ctx.clear() + + +class ExpressionError(SyntaxError): + """A Snuggs-specific syntax error.""" + + filename = "" + lineno = 1 + + +op_map = { + "*": lambda *args: functools.reduce(lambda x, y: operator.mul(x, y), args), + "+": lambda *args: functools.reduce(lambda x, y: operator.add(x, y), args), + "/": lambda *args: functools.reduce(lambda x, y: operator.truediv(x, y), args), + "-": lambda *args: functools.reduce(lambda x, y: operator.sub(x, y), args), + "&": lambda *args: functools.reduce(lambda x, y: operator.and_(x, y), args), + "|": lambda *args: functools.reduce(lambda x, y: operator.or_(x, y), args), + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, + "truth": operator.truth, + "is": operator.is_, + "not": operator.not_, +} + + +def compose(f, g): + """Compose two functions. + + compose(f, g)(x) = f(g(x)). + + """ + return lambda x, *args, **kwds: f(g(x)) + + +func_map: Mapping = {} + +higher_func_map: Mapping = { + "compose": compose, + "map": map, + "partial": functools.partial, + "reduce": functools.reduce, + "attrgetter": operator.attrgetter, + "methodcaller": operator.methodcaller, + "itemgetter": operator.itemgetter, +} + +nil = Keyword("null").set_parse_action(replace_with(None)) +true = Keyword("true").set_parse_action(replace_with(True)) +false = Keyword("false").set_parse_action(replace_with(False)) + + +def resolve_var(source, loc, toks): + try: + return _ctx.get(toks[0]) + except KeyError: + err = ExpressionError("name '{}' is not defined".format(toks[0])) + err.text = source + err.offset = loc + 1 + raise err + + +var = pyparsing_common.identifier.set_parse_action(resolve_var) +string = QuotedString("'") | QuotedString('"') +lparen = Literal("(").suppress() +rparen = Literal(")").suppress() +op = oneOf(" ".join(op_map.keys())).set_parse_action( + lambda source, loc, toks: op_map[toks[0]] +) + + +def resolve_func(source, loc, toks): + try: + return func_map[toks[0]] + except (AttributeError, KeyError): + err = ExpressionError("'{}' is not a function or operator".format(toks[0])) + err.text = source + err.offset = loc + 1 + raise err + + +# The look behind assertion is to disambiguate between functions and +# variables. +func = Regex(r"(?<=\()[{}]+".format(alphanums + "_")).set_parse_action(resolve_func) + +higher_func = oneOf(" ".join(higher_func_map.keys())).set_parse_action( + lambda source, loc, toks: higher_func_map[toks[0]] +) + +func_expr = Forward() +higher_func_expr = Forward() +expr = higher_func_expr | func_expr + + +class KeywordArg: + def __init__(self, name): + self.name = name + + +kwarg = Regex(r":[{}]+".format(alphanums + "_")).set_parse_action( + lambda source, loc, toks: KeywordArg(toks[0][1:]) +) + +operand = ( + higher_func_expr + | func_expr + | true + | false + | nil + | var + | kwarg + | pyparsing_common.sci_real + | pyparsing_common.real + | pyparsing_common.signed_integer + | string +) + +func_expr << Group( + lparen + (higher_func_expr | op | func) + OneOrMore(operand) + rparen +) + +higher_func_expr << Group( + lparen + + higher_func + + (nil | higher_func_expr | op | func | OneOrMore(operand)) + + ZeroOrMore(operand) + + rparen +) + + +def processArg(arg): + if isinstance(arg, ParseResults): + return processList(arg) + else: + return arg + + +def processList(lst): + items = [processArg(x) for x in lst[1:]] + args = [] + kwds = {} + + # An iterator is used instead of implicit iteration to allow + # skipping ahead in the keyword argument case. + itemitr = iter(items) + + for item in itemitr: + if isinstance(item, KeywordArg): + # The next item after the keyword arg marker is its value. + # This advances the iterator in a way that is compatible + # with the for loop. + val = next(itemitr) + key = item.name + kwds[key] = val + else: + args.append(item) + + func = processArg(lst[0]) + + # list and tuple are two builtins that take a single argument, + # whereas args is a list. On a KeyError, the call is retried + # without arg unpacking. + try: + return func(*args, **kwds) + except TypeError: + return func(args, **kwds) + + +def handleLine(line): + try: + result = expr.parseString(line) + return processList(result[0]) + except ParseException as exc: + text = str(exc) + m = re.search(r"(Expected .+) \(at char (\d+)\), \(line:(\d+)", text) + msg = m.group(1) + if "map|partial" in msg: + msg = "expected a function or operator" + err = ExpressionError(msg) + err.text = line + err.offset = int(m.group(2)) + 1 + raise err + + +def eval(source, kwd_dict=None, **kwds): + """Evaluate a snuggs expression. + + Parameters + ---------- + source : str + Expression source. + kwd_dict : dict + A dict of items that form the evaluation context. Deprecated. + kwds : dict + A dict of items that form the valuation context. + + Returns + ------- + object + + """ + kwd_dict = kwd_dict or kwds + with ctx(kwd_dict): + return handleLine(source) diff --git a/.venv/lib/python3.12/site-packages/fiona/_vsiopener.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/_vsiopener.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..f3069f70 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/_vsiopener.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/_vsiopener.pxd b/.venv/lib/python3.12/site-packages/fiona/_vsiopener.pxd new file mode 100644 index 00000000..a91d7931 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/_vsiopener.pxd @@ -0,0 +1 @@ +include "gdal.pxi" diff --git a/.venv/lib/python3.12/site-packages/fiona/abc.py b/.venv/lib/python3.12/site-packages/fiona/abc.py new file mode 100644 index 00000000..3bdbbf9b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/abc.py @@ -0,0 +1,3 @@ +"""Abstract base classes.""" + +from fiona._vsiopener import FileContainer, MultiByteRangeResourceContainer diff --git a/.venv/lib/python3.12/site-packages/fiona/collection.py b/.venv/lib/python3.12/site-packages/fiona/collection.py new file mode 100644 index 00000000..c57531fb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/collection.py @@ -0,0 +1,782 @@ +"""Collections provide file-like access to feature data.""" + +from contextlib import ExitStack +import logging +from pathlib import Path +import warnings + +from fiona import compat, vfs +from fiona.ogrext import Iterator, ItemsIterator, KeysIterator +from fiona.ogrext import Session, WritingSession +from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES +from fiona.errors import ( + DriverError, + DriverSupportError, + GDALVersionError, + SchemaError, + UnsupportedGeometryTypeError, + UnsupportedOperation, +) +from fiona.logutils import FieldSkipLogFilter +from fiona.crs import CRS +from fiona._env import get_gdal_release_name, get_gdal_version_tuple +from fiona.env import env_ctx_if_needed +from fiona.errors import FionaDeprecationWarning +from fiona.drvsupport import ( + driver_from_extension, + supported_drivers, + driver_mode_mingdal, + _driver_converts_field_type_silently_to_str, + _driver_supports_field, +) +from fiona._path import _Path, _vsi_path, _parse_path + + +_GDAL_VERSION_TUPLE = get_gdal_version_tuple() +_GDAL_RELEASE_NAME = get_gdal_release_name() + +log = logging.getLogger(__name__) + + +class Collection: + + """A file-like interface to features of a vector dataset + + Python text file objects are iterators over lines of a file. Fiona + Collections are similar iterators (not lists!) over features + represented as GeoJSON-like mappings. + """ + + def __init__( + self, + path, + mode="r", + driver=None, + schema=None, + crs=None, + encoding=None, + layer=None, + vsi=None, + archive=None, + enabled_drivers=None, + crs_wkt=None, + ignore_fields=None, + ignore_geometry=False, + include_fields=None, + wkt_version=None, + allow_unsupported_drivers=False, + **kwargs + ): + + """The required ``path`` is the absolute or relative path to + a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can + be read only. In ``mode`` 'a', data can be appended to a file. + In ``mode`` 'w', data overwrites the existing contents of + a file. + + In ``mode`` 'w', an OGR ``driver`` name and a ``schema`` are + required. A Proj4 ``crs`` string is recommended. If both ``crs`` + and ``crs_wkt`` keyword arguments are passed, the latter will + trump the former. + + In 'w' mode, kwargs will be mapped to OGR layer creation + options. + + """ + self._closed = True + + if not isinstance(path, (str, _Path)): + raise TypeError(f"invalid path: {path!r}") + if not isinstance(mode, str) or mode not in ("r", "w", "a"): + raise TypeError(f"invalid mode: {mode!r}") + if driver and not isinstance(driver, str): + raise TypeError(f"invalid driver: {driver!r}") + if schema and not hasattr(schema, "get"): + raise TypeError("invalid schema: %r" % schema) + + # Rasterio's CRS is compatible with Fiona. This class + # constructor only requires that the crs value have a to_wkt() + # method. + if ( + crs + and not isinstance(crs, compat.DICT_TYPES + (str, CRS)) + and not (hasattr(crs, "to_wkt") and callable(crs.to_wkt)) + ): + raise TypeError("invalid crs: %r" % crs) + + if crs_wkt and not isinstance(crs_wkt, str): + raise TypeError(f"invalid crs_wkt: {crs_wkt!r}") + if encoding and not isinstance(encoding, str): + raise TypeError(f"invalid encoding: {encoding!r}") + if layer and not isinstance(layer, (str, int)): + raise TypeError(f"invalid name: {layer!r}") + if vsi: + if not isinstance(vsi, str) or not vfs.valid_vsi(vsi): + raise TypeError(f"invalid vsi: {vsi!r}") + if archive and not isinstance(archive, str): + raise TypeError(f"invalid archive: {archive!r}") + if ignore_fields is not None and include_fields is not None: + raise ValueError("Cannot specify both 'ignore_fields' and 'include_fields'") + + if mode == "w" and driver is None: + driver = driver_from_extension(path) + + # Check GDAL version against drivers + if ( + driver in driver_mode_mingdal[mode] + and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver] + ): + min_gdal_version = ".".join( + list(map(str, driver_mode_mingdal[mode][driver])) + ) + + raise DriverError( + f"{driver} driver requires at least GDAL {min_gdal_version} " + f"for mode '{mode}', " + f"Fiona was compiled against: {get_gdal_release_name()}" + ) + + self.session = None + self.iterator = None + self._len = 0 + self._bounds = None + self._driver = None + self._schema = None + self._crs = None + self._crs_wkt = None + self.enabled_drivers = enabled_drivers + self.include_fields = include_fields + self.ignore_fields = ignore_fields + self.ignore_geometry = bool(ignore_geometry) + self._allow_unsupported_drivers = allow_unsupported_drivers + self._closed = True + + # Check GDAL version against drivers + if ( + driver in driver_mode_mingdal[mode] + and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver] + ): + min_gdal_version = ".".join( + list(map(str, driver_mode_mingdal[mode][driver])) + ) + + raise DriverError( + f"{driver} driver requires at least GDAL {min_gdal_version} " + f"for mode '{mode}', " + f"Fiona was compiled against: {get_gdal_release_name()}" + ) + + if vsi: + self.path = vfs.vsi_path(path, vsi, archive) + path = _parse_path(self.path) + else: + path = _parse_path(path) + self.path = _vsi_path(path) + + self.layer = layer or 0 + + if mode == "w": + if layer and not isinstance(layer, str): + raise ValueError("in 'w' mode, layer names must be strings") + self.name = layer or Path(self.path).stem + else: + self.name = 0 if layer is None else layer or Path(self.path).stem + + self.mode = mode + + if self.mode == "w": + if driver == "Shapefile": + driver = "ESRI Shapefile" + if not driver: + raise DriverError("no driver") + if not allow_unsupported_drivers: + if driver not in supported_drivers: + raise DriverError(f"unsupported driver: {driver!r}") + if self.mode not in supported_drivers[driver]: + raise DriverError(f"unsupported mode: {self.mode!r}") + self._driver = driver + + if not schema: + raise SchemaError("no schema") + if "properties" in schema: + # Make properties as a dict built-in + this_schema = schema.copy() + this_schema["properties"] = dict(schema["properties"]) + schema = this_schema + else: + schema["properties"] = {} + if "geometry" not in schema: + schema["geometry"] = None + self._schema = schema + + self._check_schema_driver_support() + + if crs_wkt or crs: + self._crs_wkt = CRS.from_user_input(crs_wkt or crs).to_wkt( + version=wkt_version + ) + + self._driver = driver + kwargs.update(encoding=encoding) + self.encoding = encoding + + try: + if self.mode == "r": + self.session = Session() + self.session.start(self, **kwargs) + elif self.mode in ("a", "w"): + self.session = WritingSession() + self.session.start(self, **kwargs) + except OSError: + self.session = None + raise + + if self.session is not None: + self.guard_driver_mode() + + if self.mode in ("a", "w"): + self._valid_geom_types = _get_valid_geom_types(self.schema, self.driver) + + self.field_skip_log_filter = FieldSkipLogFilter() + self._env = ExitStack() + self._closed = False + + def __repr__(self): + return "<{} Collection '{}', mode '{}' at {}>".format( + self.closed and "closed" or "open", + self.path + ":" + str(self.name), + self.mode, + hex(id(self)), + ) + + def guard_driver_mode(self): + if not self._allow_unsupported_drivers: + driver = self.session.get_driver() + if driver not in supported_drivers: + raise DriverError(f"unsupported driver: {driver!r}") + if self.mode not in supported_drivers[driver]: + raise DriverError(f"unsupported mode: {self.mode!r}") + + @property + def driver(self): + """Returns the name of the proper OGR driver.""" + if not self._driver and self.mode in ("a", "r") and self.session: + self._driver = self.session.get_driver() + return self._driver + + @property + def schema(self): + """Returns a mapping describing the data schema. + + The mapping has 'geometry' and 'properties' items. The former is a + string such as 'Point' and the latter is an ordered mapping that + follows the order of fields in the data file. + """ + if not self._schema and self.mode in ("a", "r") and self.session: + self._schema = self.session.get_schema() + return self._schema + + @property + def crs(self): + """The coordinate reference system (CRS) of the Collection.""" + if self._crs is None and self.session: + self._crs = self.session.get_crs() + return self._crs + + @property + def crs_wkt(self): + """Returns a WKT string.""" + if self._crs_wkt is None and self.session: + self._crs_wkt = self.session.get_crs_wkt() + return self._crs_wkt + + def tags(self, ns=None): + """Returns a dict containing copies of the dataset or layers's + tags. Tags are pairs of key and value strings. Tags belong to + namespaces. The standard namespaces are: default (None) and + 'IMAGE_STRUCTURE'. Applications can create their own additional + namespaces. + + Parameters + ---------- + ns: str, optional + Can be used to select a namespace other than the default. + + Returns + ------- + dict + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "tags requires GDAL 2+, fiona was compiled " + f"against: {_GDAL_RELEASE_NAME}" + ) + if self.session: + return self.session.tags(ns=ns) + return None + + def get_tag_item(self, key, ns=None): + """Returns tag item value + + Parameters + ---------- + key: str + The key for the metadata item to fetch. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + str + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "get_tag_item requires GDAL 2+, fiona was compiled " + f"against: {_GDAL_RELEASE_NAME}" + ) + if self.session: + return self.session.get_tag_item(key=key, ns=ns) + return None + + def update_tags(self, tags, ns=None): + """Writes a dict containing the dataset or layers's tags. + Tags are pairs of key and value strings. Tags belong to + namespaces. The standard namespaces are: default (None) and + 'IMAGE_STRUCTURE'. Applications can create their own additional + namespaces. + + Parameters + ---------- + tags: dict + The dict of metadata items to set. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + int + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "update_tags requires GDAL 2+, fiona was compiled " + f"against: {_GDAL_RELEASE_NAME}" + ) + if not isinstance(self.session, WritingSession): + raise UnsupportedOperation("Unable to update tags as not in writing mode.") + return self.session.update_tags(tags, ns=ns) + + def update_tag_item(self, key, tag, ns=None): + """Updates the tag item value + + Parameters + ---------- + key: str + The key for the metadata item to set. + tag: str + The value of the metadata item to set. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + int + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "update_tag_item requires GDAL 2+, fiona was compiled " + f"against: {_GDAL_RELEASE_NAME}" + ) + if not isinstance(self.session, WritingSession): + raise UnsupportedOperation("Unable to update tag as not in writing mode.") + return self.session.update_tag_item(key=key, tag=tag, ns=ns) + + @property + def meta(self): + """Returns a mapping with the driver, schema, crs, and additional + properties.""" + return { + "driver": self.driver, + "schema": self.schema, + "crs": self.crs, + "crs_wkt": self.crs_wkt, + } + + profile = meta + + def filter(self, *args, **kwds): + """Returns an iterator over records, but filtered by a test for + spatial intersection with the provided ``bbox``, a (minx, miny, + maxx, maxy) tuple or a geometry ``mask``. An attribute filter can + be set using an SQL ``where`` clause, which uses the `OGR SQL dialect + `__. + + Positional arguments ``stop`` or ``start, stop[, step]`` allows + iteration to skip over items or stop at a specific item. + + Note: spatial filtering using ``mask`` may be inaccurate and returning + all features overlapping the envelope of ``mask``. + + """ + if self.closed: + raise ValueError("I/O operation on closed collection") + elif self.mode != "r": + raise OSError("collection not open for reading") + if args: + s = slice(*args) + start = s.start + stop = s.stop + step = s.step + else: + start = stop = step = None + bbox = kwds.get("bbox") + mask = kwds.get("mask") + if bbox and mask: + raise ValueError("mask and bbox can not be set together") + where = kwds.get("where") + self.iterator = Iterator(self, start, stop, step, bbox, mask, where) + return self.iterator + + def items(self, *args, **kwds): + """Returns an iterator over FID, record pairs, optionally + filtered by a test for spatial intersection with the provided + ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry + ``mask``. An attribute filter can be set using an SQL ``where`` + clause, which uses the `OGR SQL dialect + `__. + + Positional arguments ``stop`` or ``start, stop[, step]`` allows + iteration to skip over items or stop at a specific item. + + Note: spatial filtering using ``mask`` may be inaccurate and returning + all features overlapping the envelope of ``mask``. + + """ + if self.closed: + raise ValueError("I/O operation on closed collection") + elif self.mode != "r": + raise OSError("collection not open for reading") + if args: + s = slice(*args) + start = s.start + stop = s.stop + step = s.step + else: + start = stop = step = None + bbox = kwds.get("bbox") + mask = kwds.get("mask") + if bbox and mask: + raise ValueError("mask and bbox can not be set together") + where = kwds.get("where") + self.iterator = ItemsIterator(self, start, stop, step, bbox, mask, where) + return self.iterator + + def keys(self, *args, **kwds): + """Returns an iterator over FIDs, optionally + filtered by a test for spatial intersection with the provided + ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry + ``mask``. An attribute filter can be set using an SQL ``where`` + clause, which uses the `OGR SQL dialect + `__. + + Positional arguments ``stop`` or ``start, stop[, step]`` allows + iteration to skip over items or stop at a specific item. + + Note: spatial filtering using ``mask`` may be inaccurate and returning + all features overlapping the envelope of ``mask``. + """ + if self.closed: + raise ValueError("I/O operation on closed collection") + elif self.mode != "r": + raise OSError("collection not open for reading") + if args: + s = slice(*args) + start = s.start + stop = s.stop + step = s.step + else: + start = stop = step = None + bbox = kwds.get("bbox") + mask = kwds.get("mask") + if bbox and mask: + raise ValueError("mask and bbox can not be set together") + where = kwds.get("where") + self.iterator = KeysIterator(self, start, stop, step, bbox, mask, where) + return self.iterator + + def __contains__(self, fid): + return self.session.has_feature(fid) + + values = filter + + def __iter__(self): + """Returns an iterator over records.""" + return self.filter() + + def __next__(self): + """Returns next record from iterator.""" + warnings.warn( + "Collection.__next__() is buggy and will be removed in " + "Fiona 2.0. Switch to `next(iter(collection))`.", + FionaDeprecationWarning, + stacklevel=2, + ) + if not self.iterator: + iter(self) + return next(self.iterator) + + next = __next__ + + def __getitem__(self, item): + return self.session.__getitem__(item) + + def get(self, item): + return self.session.get(item) + + def writerecords(self, records): + """Stages multiple records for writing to disk.""" + if self.closed: + raise ValueError("I/O operation on closed collection") + if self.mode not in ("a", "w"): + raise OSError("collection not open for writing") + self.session.writerecs(records, self) + self._len = self.session.get_length() + self._bounds = None + + def write(self, record): + """Stages a record for writing to disk. + + Note: Each call of this method will start and commit a + unique transaction with the data source. + """ + self.writerecords([record]) + + def validate_record(self, record): + """Compares the record to the collection's schema. + + Returns ``True`` if the record matches, else ``False``. + """ + # Currently we only compare keys of properties, not the types of + # values. + return set(record["properties"].keys()) == set( + self.schema["properties"].keys() + ) and self.validate_record_geometry(record) + + def validate_record_geometry(self, record): + """Compares the record's geometry to the collection's schema. + + Returns ``True`` if the record matches, else ``False``. + """ + # Shapefiles welcome mixes of line/multis and polygon/multis. + # OGR reports these mixed files as type "Polygon" or "LineString" + # but will return either these or their multi counterparts when + # reading features. + if ( + self.driver == "ESRI Shapefile" + and "Point" not in record["geometry"]["type"] + ): + return record["geometry"]["type"].lstrip("Multi") == self.schema[ + "geometry" + ].lstrip("3D ").lstrip("Multi") + else: + return record["geometry"]["type"] == self.schema["geometry"].lstrip("3D ") + + def __len__(self): + if self._len <= 0 and self.session is not None: + self._len = self.session.get_length() + if self._len < 0: + # Raise TypeError when we don't know the length so that Python + # will treat Collection as a generator + raise TypeError("Layer does not support counting") + return self._len + + @property + def bounds(self): + """Returns (minx, miny, maxx, maxy).""" + if self._bounds is None and self.session is not None: + self._bounds = self.session.get_extent() + return self._bounds + + def _check_schema_driver_support(self): + """Check support for the schema against the driver + + See GH#572 for discussion. + """ + gdal_version_major = _GDAL_VERSION_TUPLE.major + + for field in self._schema["properties"].values(): + field_type = field.split(":")[0] + + if not _driver_supports_field(self.driver, field_type): + if ( + self.driver == "GPKG" + and gdal_version_major < 2 + and field_type == "datetime" + ): + raise DriverSupportError( + "GDAL 1.x GPKG driver does not support datetime fields" + ) + else: + raise DriverSupportError( + f"{self.driver} does not support {field_type} fields" + ) + elif ( + field_type + in { + "time", + "datetime", + "date", + } + and _driver_converts_field_type_silently_to_str(self.driver, field_type) + ): + if ( + self._driver == "GeoJSON" + and gdal_version_major < 2 + and field_type in {"datetime", "date"} + ): + warnings.warn( + "GeoJSON driver in GDAL 1.x silently converts " + f"{field_type} to string in non-standard format" + ) + else: + warnings.warn( + f"{self.driver} driver silently converts {field_type} " + "to string" + ) + + def flush(self): + """Flush the buffer.""" + if self.session is not None: + self.session.sync(self) + new_len = self.session.get_length() + self._len = new_len > self._len and new_len or self._len + self._bounds = None + + def close(self): + """In append or write mode, flushes data to disk, then ends access.""" + if not self._closed: + if self.session is not None and self.session.isactive(): + if self.mode in ("a", "w"): + self.flush() + log.debug("Flushed buffer") + self.session.stop() + log.debug("Stopped session") + self.session = None + self.iterator = None + if self._env: + self._env.close() + self._env = None + self._closed = True + + @property + def closed(self): + """``False`` if data can be accessed, otherwise ``True``.""" + return self._closed + + def __enter__(self): + self._env.enter_context(env_ctx_if_needed()) + logging.getLogger("fiona.ogrext").addFilter(self.field_skip_log_filter) + return self + + def __exit__(self, type, value, traceback): + logging.getLogger("fiona.ogrext").removeFilter(self.field_skip_log_filter) + self.close() + + def __del__(self): + # Note: you can't count on this being called. Call close() explicitly + # or use the context manager protocol ("with"). + if not self._closed: + self.close() + + +ALL_GEOMETRY_TYPES = { + geom_type + for geom_type in GEOMETRY_TYPES.values() + if "3D " not in geom_type and geom_type != "None" +} +ALL_GEOMETRY_TYPES.add("None") + + +def _get_valid_geom_types(schema, driver): + """Returns a set of geometry types the schema will accept""" + schema_geom_type = schema["geometry"] + if isinstance(schema_geom_type, str) or schema_geom_type is None: + schema_geom_type = (schema_geom_type,) + valid_types = set() + for geom_type in schema_geom_type: + geom_type = str(geom_type).lstrip("3D ") + if geom_type == "Unknown" or geom_type == "Any": + valid_types.update(ALL_GEOMETRY_TYPES) + else: + if geom_type not in ALL_GEOMETRY_TYPES: + raise UnsupportedGeometryTypeError(geom_type) + valid_types.add(geom_type) + + # shapefiles don't differentiate between single/multi geometries, except points + if driver == "ESRI Shapefile" and "Point" not in valid_types: + for geom_type in list(valid_types): + if not geom_type.startswith("Multi"): + valid_types.add("Multi" + geom_type) + + return valid_types + + +def get_filetype(bytesbuf): + """Detect compression type of bytesbuf. + + ZIP only. TODO: add others relevant to GDAL/OGR.""" + if bytesbuf[:4].startswith(b"PK\x03\x04"): + return "zip" + else: + return "" + + +class BytesCollection(Collection): + """BytesCollection takes a buffer of bytes and maps that to + a virtual file that can then be opened by fiona. + """ + + def __init__(self, bytesbuf, **kwds): + """Takes buffer of bytes whose contents is something we'd like + to open with Fiona and maps it to a virtual file. + + """ + self._closed = True + + if not isinstance(bytesbuf, bytes): + raise ValueError("input buffer must be bytes") + + # Hold a reference to the buffer, as bad things will happen if + # it is garbage collected while in use. + self.bytesbuf = bytesbuf + + # Map the buffer to a file. If the buffer contains a zipfile + # we take extra steps in naming the buffer and in opening + # it. If the requested driver is for GeoJSON, we append an an + # appropriate extension to ensure the driver reads it. + filetype = get_filetype(self.bytesbuf) + ext = "" + if filetype == "zip": + ext = ".zip" + elif kwds.get("driver") == "GeoJSON": + ext = ".json" + self.virtual_file = buffer_to_virtual_file(self.bytesbuf, ext=ext) + + # Instantiate the parent class. + super().__init__(self.virtual_file, vsi=filetype, **kwds) + self._closed = False + + def close(self): + """Removes the virtual file associated with the class.""" + super().close() + if self.virtual_file: + remove_virtual_file(self.virtual_file) + self.virtual_file = None + self.bytesbuf = None + + def __repr__(self): + return "<{} BytesCollection '{}', mode '{}' at {}>".format( + self.closed and "closed" or "open", + self.path + ":" + str(self.name), + self.mode, + hex(id(self)), + ) diff --git a/.venv/lib/python3.12/site-packages/fiona/compat.py b/.venv/lib/python3.12/site-packages/fiona/compat.py new file mode 100644 index 00000000..79f67cef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/compat.py @@ -0,0 +1,12 @@ +from collections import UserDict +from collections.abc import Mapping + +DICT_TYPES = (dict, Mapping, UserDict) + + +def strencode(instr, encoding="utf-8"): + try: + instr = instr.encode(encoding) + except (UnicodeDecodeError, AttributeError): + pass + return instr diff --git a/.venv/lib/python3.12/site-packages/fiona/crs.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/crs.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..d986bc99 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/crs.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/crs.pxd b/.venv/lib/python3.12/site-packages/fiona/crs.pxd new file mode 100644 index 00000000..8c61a23a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/crs.pxd @@ -0,0 +1,11 @@ +include "gdal.pxi" + + +cdef class CRS: + cdef OGRSpatialReferenceH _osr + cdef object _data + cdef object _epsg + cdef object _wkt + + +cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) diff --git a/.venv/lib/python3.12/site-packages/fiona/drvsupport.py b/.venv/lib/python3.12/site-packages/fiona/drvsupport.py new file mode 100644 index 00000000..e4790558 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/drvsupport.py @@ -0,0 +1,415 @@ +import os + +from fiona.env import Env +from fiona._env import get_gdal_version_tuple + + +_GDAL_VERSION = get_gdal_version_tuple() + +# Here is the list of available drivers as (name, modes) tuples. Currently, +# we only expose the defaults (excepting FileGDB). We also don't expose +# the CSV or GeoJSON drivers. Use Python's csv and json modules instead. +# Might still exclude a few more of these after making a pass through the +# entries for each at https://gdal.org/drivers/vector/index.html to screen +# out the multi-layer formats. + +supported_drivers = dict( + [ + # OGR Vector Formats + # Format Name Code Creation Georeferencing Compiled by default + # Aeronav FAA files AeronavFAA No Yes Yes + ("AeronavFAA", "r"), + # ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects + # Arc/Info Binary Coverage AVCBin No Yes Yes + # multi-layer + # ("AVCBin", "r"), + # Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes + # multi-layer + # ("AVCE00", "r"), + # Arc/Info Generate ARCGEN No No Yes + ("ARCGEN", "r"), + # Atlas BNA BNA Yes No Yes + ("BNA", "rw"), + # AutoCAD DWG DWG No No No + # AutoCAD DXF DXF Yes No Yes + ("DXF", "rw"), + # Comma Separated Value (.csv) CSV Yes No Yes + ("CSV", "raw"), + # CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl + # DODS/OPeNDAP DODS No Yes No, needs libdap + # EDIGEO EDIGEO No Yes Yes + # multi-layer? Hard to tell from the OGR docs + # ("EDIGEO", "r"), + # ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl + # ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library + # multi-layer + ("FileGDB", "raw"), + ("OpenFileGDB", "raw"), + # ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library + # ESRI ArcSDE SDE No Yes No, needs ESRI SDE + # ESRIJSON ESRIJSON No Yes Yes + ("ESRIJSON", "r"), + # ESRI Shapefile ESRI Shapefile Yes Yes Yes + ("ESRI Shapefile", "raw"), + # FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME + ("FlatGeobuf", "raw"), + # GeoJSON GeoJSON Yes Yes Yes + ("GeoJSON", "raw"), + # GeoJSONSeq GeoJSON sequences Yes Yes Yes + ("GeoJSONSeq", "raw"), + # Géoconcept Export Geoconcept Yes Yes Yes + # multi-layers + # ("Geoconcept", "raw"), + # Geomedia .mdb Geomedia No No No, needs ODBC library + # GeoPackage GPKG Yes Yes No, needs libsqlite3 + ("GPKG", "raw"), + # GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) + # Google Fusion Tables GFT Yes Yes No, needs libcurl + # GML GML Yes Yes Yes (read support needs Xerces or libexpat) + ("GML", "rw"), + # GMT GMT Yes Yes Yes + ("GMT", "rw"), + # GMT renamed to OGR_GMT for GDAL 2.x + ("OGR_GMT", "rw"), + # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) + # GPX GPX Yes Yes Yes (read support needs libexpat) + ("GPX", "rw"), + # GRASS GRASS No Yes No, needs libgrass + # GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes + # ("GPSTrackMaker", "rw"), + # Hydrographic Transfer Format HTF No Yes Yes + # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), + # Idrisi Vector (.VCT) Idrisi No Yes Yes + ("Idrisi", "r"), + # Informix DataBlade IDB Yes Yes No, needs Informix DataBlade + # INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) + # INGRES INGRES Yes No No, needs INGRESS + # KML KML Yes Yes Yes (read support needs libexpat) + # LIBKML LIBKML Yes Yes No, needs libkml + # Mapinfo File MapInfo File Yes Yes Yes + ("MapInfo File", "raw"), + # Microstation DGN DGN Yes No Yes + ("DGN", "raw"), + # Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE + # Memory Memory Yes Yes Yes + # MySQL MySQL No Yes No, needs MySQL library + # NAS - ALKIS NAS No Yes No, needs Xerces + # Oracle Spatial OCI Yes Yes No, needs OCI library + # ODBC ODBC No Yes No, needs ODBC library + # MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library + # Open Document Spreadsheet ODS Yes No No, needs libexpat + # OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library + # OpenAir OpenAir No Yes Yes + # multi-layer + # ("OpenAir", "r"), + # (Geo)Parquet + ("Parquet", "rw"), + # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) + ("PCIDSK", "raw"), + # PDS PDS No Yes Yes + ("PDS", "r"), + # PDS renamed to OGR_PDS for GDAL 2.x + ("OGR_PDS", "r"), + # PGDump PostgreSQL SQL dump Yes Yes Yes + # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) + # EPIInfo .REC REC No No Yes + # S-57 (ENC) S57 No Yes Yes + # multi-layer + ("S57", "r"), + # SDTS SDTS No Yes Yes + # multi-layer + # ("SDTS", "r"), + # SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes + # multi-layers + # ("SEGUKOOA", "r"), + # SEG-Y SEGY No No Yes + ("SEGY", "r"), + # Norwegian SOSI Standard SOSI No Yes No, needs FYBA library + # SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite + ("SQLite", "raw"), + # SUA SUA No Yes Yes + ("SUA", "r"), + # SVG SVG No Yes No, needs libexpat + ("TileDB", "raw"), + # TopoJSON TopoJSON No Yes Yes + ("TopoJSON", "r"), + # UK .NTF UK. NTF No Yes Yes + # multi-layer + # ("UK. NTF", "r"), + # U.S. Census TIGER/Line TIGER No Yes Yes + # multi-layer + # ("TIGER", "r"), + # VFK data VFK No Yes Yes + # multi-layer + # ("VFK", "r"), + # VRT - Virtual Datasource VRT No Yes Yes + # multi-layer + # ("VRT", "r"), + # OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl + # MS Excel format XLS No No No, needs libfreexl + # Office Open XML spreadsheet XLSX Yes No No, needs libexpat + # X-Plane/Flighgear aeronautical data XPLANE No Yes Yes + # multi-layer + # ("XPLANE", "r") + ] +) + +# Minimal gdal version for different modes +driver_mode_mingdal = { + "r": {"GPKG": (1, 11, 0), "GeoJSONSeq": (2, 4, 0), "FlatGeobuf": (3, 1, 0)}, + "w": { + "GPKG": (1, 11, 0), + "PCIDSK": (2, 0, 0), + "GeoJSONSeq": (2, 4, 0), + "FlatGeobuf": (3, 1, 3), + "OpenFileGDB": (3, 6, 0), + }, + "a": { + "GPKG": (1, 11, 0), + "PCIDSK": (2, 0, 0), + "GeoJSON": (2, 1, 0), + "GeoJSONSeq": (3, 6, 0), + "MapInfo File": (2, 0, 0), + "FlatGeobuf": (3, 5, 1), + "OpenFileGDB": (3, 6, 0), + }, +} + + +def _driver_supports_mode(driver, mode): + """ Returns True if driver supports mode, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if driver not in supported_drivers: + return False + if mode not in supported_drivers[driver]: + return False + if driver in driver_mode_mingdal[mode]: + if _GDAL_VERSION < driver_mode_mingdal[mode][driver]: + return False + return True + + +# Removes drivers in the supported_drivers dictionary that the +# machine's installation of OGR due to how it is compiled. +# OGR may not have optional libraries compiled or installed. +def _filter_supported_drivers(): + global supported_drivers + + with Env() as gdalenv: + ogrdrv_names = gdalenv.drivers().keys() + supported_drivers_copy = supported_drivers.copy() + for drv in supported_drivers.keys(): + if drv not in ogrdrv_names: + del supported_drivers_copy[drv] + + supported_drivers = supported_drivers_copy + + +_filter_supported_drivers() + + +def vector_driver_extensions(): + """ + Returns + ------- + dict: + Map of extensions to the driver. + """ + from fiona.meta import extensions # prevent circular import + + extension_to_driver = {} + for drv, modes in supported_drivers.items(): + # update extensions based on driver support + for extension in extensions(drv) or (): + if "w" in modes: + extension_to_driver[extension] = extension_to_driver.get(extension, drv) + return extension_to_driver + + +def driver_from_extension(path): + """ + Attempt to auto-detect driver based on the extension. + + Parameters + ---------- + path: str or pathlike object + The path to the dataset to write with. + + Returns + ------- + str: + The name of the driver for the extension. + """ + try: + # in case the path is a file handle + # or a partsed path + path = path.name + except AttributeError: + pass + + driver_extensions = vector_driver_extensions() + + try: + return driver_extensions[os.path.splitext(path)[-1].lstrip(".").lower()] + except KeyError: + raise ValueError("Unable to detect driver. Please specify driver.") + + +# driver_converts_to_str contains field type, driver combinations that +# are silently converted to string None: field type is always converted +# to str (2, 0, 0): starting from gdal 2.0 field type is not converted +# to string +_driver_converts_to_str = { + 'time': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GPKG': None, + 'GMT': None, + 'OGR_GMT': None + }, + 'datetime': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GML': (3, 1, 0), + }, + 'date': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GMT': None, + 'OGR_GMT': None, + 'GML': (3, 1, 0), + } +} + + +def _driver_converts_field_type_silently_to_str(driver, field_type): + """ Returns True if the driver converts the field_type silently to str, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _driver_converts_to_str and driver in _driver_converts_to_str[field_type]: + if _driver_converts_to_str[field_type][driver] is None: + return True + elif _GDAL_VERSION < _driver_converts_to_str[field_type][driver]: + return True + return False + + +# None: field type is never supported, (2, 0, 0) field type is supported starting with gdal 2.0 +_driver_field_type_unsupported = { + "time": { + "ESRI Shapefile": None, + "GPKG": (2, 0, 0), + "GPX": None, + "GPSTrackMaker": None, + "GML": (3, 1, 0), + "DGN": None, + "BNA": None, + "DXF": None, + "PCIDSK": (2, 1, 0), + "FileGDB": (3, 5, 0), + "FlatGeobuf": None, + "OpenFileGDB": None, + }, + 'datetime': { + 'ESRI Shapefile': None, + 'GPKG': (2, 0, 0), + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0) + }, + "date": { + "GPX": None, + "GPSTrackMaker": None, + "DGN": None, + "BNA": None, + "DXF": None, + "PCIDSK": (2, 1, 0), + "FileGDB": (3, 5, 0), + "FlatGeobuf": None, + "OpenFileGDB": None, + }, +} + + +def _driver_supports_field(driver, field_type): + """ Returns True if the driver supports the field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _driver_field_type_unsupported and driver in _driver_field_type_unsupported[field_type]: + if _driver_field_type_unsupported[field_type][driver] is None: + return False + elif _GDAL_VERSION < _driver_field_type_unsupported[field_type][driver]: + return False + + return True + + +# None: field type never supports timezones, (2, 0, 0): field type supports timezones with GDAL 2.0.0 +_drivers_not_supporting_timezones = { + 'datetime': { + 'MapInfo File': None, + 'GPKG': (3, 1, 0), + 'GPSTrackMaker': (3, 1, 1), + 'FileGDB': None, + 'SQLite': (2, 4, 0) + }, + "time": { + "MapInfo File": None, + "GPKG": None, + "GPSTrackMaker": None, + "GeoJSON": None, + "GeoJSONSeq": None, + "GML": None, + "CSV": None, + "GMT": None, + "OGR_GMT": None, + "SQLite": None, + }, +} + + +def _driver_supports_timezones(driver, field_type): + """ Returns True if the driver supports timezones for field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _drivers_not_supporting_timezones and driver in _drivers_not_supporting_timezones[field_type]: + if _drivers_not_supporting_timezones[field_type][driver] is None: + return False + elif _GDAL_VERSION < _drivers_not_supporting_timezones[field_type][driver]: + return False + return True + + +# None: driver never supports timezones, (2, 0, 0): driver supports timezones with GDAL 2.0.0 +_drivers_not_supporting_milliseconds = { + "GPSTrackMaker": None, + "FileGDB": None, + "OpenFileGDB": None, +} + + +def _driver_supports_milliseconds(driver): + """ Returns True if the driver supports milliseconds, False otherwise + + Note: this function is not part of Fiona's public API. + """ + # GDAL 2.0 introduced support for milliseconds + if _GDAL_VERSION.major < 2: + return False + + if driver in _drivers_not_supporting_milliseconds: + if _drivers_not_supporting_milliseconds[driver] is None: + return False + elif _drivers_not_supporting_milliseconds[driver] < _GDAL_VERSION: + return False + + return True diff --git a/.venv/lib/python3.12/site-packages/fiona/enums.py b/.venv/lib/python3.12/site-packages/fiona/enums.py new file mode 100644 index 00000000..fab2584f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/enums.py @@ -0,0 +1,31 @@ +"""Enumerations.""" + +from enum import Enum + + +class WktVersion(Enum): + """ + .. versionadded:: 1.9.0 + + Supported CRS WKT string versions. + """ + + #: WKT Version 2 from 2015 + WKT2_2015 = "WKT2_2015" + #: Alias for latest WKT Version 2 + WKT2 = "WKT2" + #: WKT Version 2 from 2019 + WKT2_2019 = "WKT2_2018" + #: WKT Version 1 GDAL Style + WKT1_GDAL = "WKT1_GDAL" + #: Alias for WKT Version 1 GDAL Style + WKT1 = "WKT1" + #: WKT Version 1 ESRI Style + WKT1_ESRI = "WKT1_ESRI" + + @classmethod + def _missing_(cls, value): + if value == "WKT2_2019": + # WKT2_2019 alias added in GDAL 3.2, use WKT2_2018 for compatibility + return WktVersion.WKT2_2019 + raise ValueError(f"Invalid value for WktVersion: {value}") diff --git a/.venv/lib/python3.12/site-packages/fiona/env.py b/.venv/lib/python3.12/site-packages/fiona/env.py new file mode 100644 index 00000000..47c5e63d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/env.py @@ -0,0 +1,690 @@ +"""Fiona's GDAL/AWS environment""" + +from functools import wraps, total_ordering +from inspect import getfullargspec +import logging +import os +import re +import threading +import warnings + +import attr + +from fiona._env import ( + GDALDataFinder, + GDALEnv, + PROJDataFinder, + calc_gdal_version_num, + get_gdal_config, + get_gdal_release_name, + get_gdal_version_num, + set_gdal_config, + set_proj_data_search_path, +) +from fiona.errors import EnvError, FionaDeprecationWarning, GDALVersionError +from fiona.session import Session, DummySession + + +class ThreadEnv(threading.local): + def __init__(self): + self._env = None # Initialises in each thread + + # When the outermost 'fiona.Env()' executes '__enter__' it + # probes the GDAL environment to see if any of the supplied + # config options already exist, the assumption being that they + # were set with 'osgeo.gdal.SetConfigOption()' or possibly + # 'fiona.env.set_gdal_config()'. The discovered options are + # reinstated when the outermost Fiona environment exits. + # Without this check any environment options that are present in + # the GDAL environment and are also passed to 'fiona.Env()' + # will be unset when 'fiona.Env()' tears down, regardless of + # their value. For example: + # + # from osgeo import gdal import fiona + # + # gdal.SetConfigOption('key', 'value') + # with fiona.Env(key='something'): + # pass + # + # The config option 'key' would be unset when 'Env()' exits. + # A more comprehensive solution would also leverage + # https://trac.osgeo.org/gdal/changeset/37273 but this gets + # Fiona + older versions of GDAL halfway there. One major + # assumption is that environment variables are not set directly + # with 'osgeo.gdal.SetConfigOption()' OR + # 'fiona.env.set_gdal_config()' inside of a 'fiona.Env()'. + self._discovered_options = None + + +local = ThreadEnv() + +log = logging.getLogger(__name__) + + +class Env: + """Abstraction for GDAL and AWS configuration + + The GDAL library is stateful: it has a registry of format drivers, + an error stack, and dozens of configuration options. + + Fiona's approach to working with GDAL is to wrap all the state + up using a Python context manager (see PEP 343, + https://www.python.org/dev/peps/pep-0343/). When the context is + entered GDAL drivers are registered, error handlers are + configured, and configuration options are set. When the context + is exited, drivers are removed from the registry and other + configurations are removed. + + Example: + + with fiona.Env(GDAL_CACHEMAX=512) as env: + # All drivers are registered, GDAL's raster block cache + # size is set to 512MB. + # Commence processing... + ... + # End of processing. + + # At this point, configuration options are set to their + # previous (possible unset) values. + + A boto3 session or boto3 session constructor arguments + `aws_access_key_id`, `aws_secret_access_key`, `aws_session_token` + may be passed to Env's constructor. In the latter case, a session + will be created as soon as needed. AWS credentials are configured + for GDAL as needed. + """ + + @classmethod + def default_options(cls): + """Default configuration options + + Parameters + ---------- + None + + Returns + ------- + dict + + """ + return { + "CHECK_WITH_INVERT_PROJ": True, + "GTIFF_IMPLICIT_JPEG_OVR": False, + "FIONA_ENV": True, + } + + def __init__( + self, + session=None, + aws_unsigned=False, + profile_name=None, + session_class=Session.aws_or_dummy, + **options + ): + """Create a new GDAL/AWS environment. + Note: this class is a context manager. GDAL isn't configured + until the context is entered via `with fiona.Env():` + + Parameters + ---------- + session : optional + A Session object. + aws_unsigned : bool, optional + Do not sign cloud requests. + profile_name : str, optional + A shared credentials profile name, as per boto3. + session_class : Session, optional + A sub-class of Session. + **options : optional + A mapping of GDAL configuration options, e.g., + `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. + + Returns + ------- + Env + + Notes + ----- + We raise EnvError if the GDAL config options + AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY are given. AWS + credentials are handled exclusively by boto3. + + Examples + -------- + >>> with Env(CPL_DEBUG=True, CPL_CURL_VERBOSE=True): + ... with fiona.open("zip+https://example.com/a.zip") as col: + ... print(col.profile) + + For access to secured cloud resources, a Fiona Session or a + foreign session object may be passed to the constructor. + + >>> import boto3 + >>> from fiona.session import AWSSession + >>> boto3_session = boto3.Session(...) + >>> with Env(AWSSession(boto3_session)): + ... with fiona.open("zip+s3://example/a.zip") as col: + ... print(col.profile + + """ + aws_access_key_id = options.pop("aws_access_key_id", None) + # Warn deprecation in 1.9, remove in 2.0. + if aws_access_key_id: + warnings.warn( + "Passing abstract session keyword arguments is deprecated. " + "Pass a Fiona AWSSession object instead.", + FionaDeprecationWarning, + ) + + aws_secret_access_key = options.pop("aws_secret_access_key", None) + aws_session_token = options.pop("aws_session_token", None) + region_name = options.pop("region_name", None) + + if not {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}.isdisjoint(options): + raise EnvError( + "GDAL's AWS config options can not be directly set. " + "AWS credentials are handled exclusively by boto3." + ) + + if session: + # Passing a session via keyword argument is the canonical + # way to configure access to secured cloud resources. + # Warn deprecation in 1.9, remove in 2.0. + if not isinstance(session, Session): + warnings.warn( + "Passing a boto3 session is deprecated. Pass a Fiona AWSSession object instead.", + FionaDeprecationWarning, + ) + session = Session.aws_or_dummy(session=session) + + self.session = session + + elif aws_access_key_id or profile_name or aws_unsigned: + self.session = Session.aws_or_dummy( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + region_name=region_name, + profile_name=profile_name, + aws_unsigned=aws_unsigned, + ) + + elif {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}.issubset(os.environ.keys()): + self.session = Session.from_environ() + + else: + self.session = DummySession() + + self.options = options.copy() + self.context_options = {} + + @classmethod + def from_defaults(cls, *args, **kwargs): + """Create an environment with default config options + + Parameters + ---------- + args : optional + Positional arguments for Env() + kwargs : optional + Keyword arguments for Env() + + Returns + ------- + Env + + Notes + ----- + The items in kwargs will be overlaid on the default values. + + """ + options = Env.default_options() + options.update(**kwargs) + return Env(*args, **options) + + def credentialize(self): + """Get credentials and configure GDAL + + Note well: this method is a no-op if the GDAL environment + already has credentials, unless session is not None. + + Returns + ------- + None + + """ + cred_opts = self.session.get_credential_options() + self.options.update(**cred_opts) + setenv(**cred_opts) + + def drivers(self): + """Return a mapping of registered drivers.""" + return local._env.drivers() + + def _dump_open_datasets(self): + """Writes descriptions of open datasets to stderr + + For debugging and testing purposes. + """ + return local._env._dump_open_datasets() + + def __enter__(self): + if local._env is None: + self._has_parent_env = False + + # See note directly above where _discovered_options is globally + # defined. This MUST happen before calling 'defenv()'. + local._discovered_options = {} + # Don't want to reinstate the "RASTERIO_ENV" option. + probe_env = {k for k in self.options.keys() if k != "RASTERIO_ENV"} + for key in probe_env: + val = get_gdal_config(key, normalize=False) + if val is not None: + local._discovered_options[key] = val + + defenv(**self.options) + self.context_options = {} + else: + self._has_parent_env = True + self.context_options = getenv() + setenv(**self.options) + + self.credentialize() + return self + + def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): + delenv() + if self._has_parent_env: + defenv() + setenv(**self.context_options) + else: + # See note directly above where _discovered_options is globally + # defined. + while local._discovered_options: + key, val = local._discovered_options.popitem() + set_gdal_config(key, val, normalize=False) + + local._discovered_options = None + + +def defenv(**options): + """Create a default environment if necessary.""" + if not local._env: + local._env = GDALEnv() + local._env.update_config_options(**options) + + local._env.start() + + +def getenv(): + """Get a mapping of current options.""" + if not local._env: + raise EnvError("No GDAL environment exists") + else: + return local._env.options.copy() + + +def hasenv(): + return bool(local._env) + + +def setenv(**options): + """Set options in the existing environment.""" + if not local._env: + raise EnvError("No GDAL environment exists") + else: + local._env.update_config_options(**options) + + +def hascreds(): + warnings.warn("Please use Env.session.hascreds() instead", FionaDeprecationWarning) + return local._env is not None and all( + key in local._env.get_config_options() + for key in ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"] + ) + + +def delenv(): + """Delete options in the existing environment.""" + if not local._env: + raise EnvError("No GDAL environment exists") + else: + local._env.clear_config_options() + + local._env.stop() + local._env = None + + +class NullContextManager: + def __init__(self): + pass + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + +def env_ctx_if_needed(): + """Return an Env if one does not exist + + Returns + ------- + Env or a do-nothing context manager + + """ + if local._env: + return NullContextManager() + else: + return Env.from_defaults() + + +def ensure_env(f): + """A decorator that ensures an env exists before a function + calls any GDAL C functions. + + Parameters + ---------- + f : function + A function. + + Returns + ------- + A function wrapper. + + Notes + ----- + If there is already an existing environment, the wrapper does + nothing and immediately calls f with the given arguments. + + """ + + @wraps(f) + def wrapper(*args, **kwargs): + if local._env: + return f(*args, **kwargs) + else: + with Env.from_defaults(): + return f(*args, **kwargs) + + return wrapper + + +def ensure_env_with_credentials(f): + """Ensures a config environment exists and has credentials. + + Parameters + ---------- + f : function + A function. + + Returns + ------- + A function wrapper. + + Notes + ----- + The function wrapper checks the first argument of f and + credentializes the environment if the first argument is a URI with + scheme "s3". + + If there is already an existing environment, the wrapper does + nothing and immediately calls f with the given arguments. + + """ + + @wraps(f) + def wrapper(*args, **kwds): + if local._env: + env_ctor = Env + else: + env_ctor = Env.from_defaults + + fp_arg = kwds.get("fp", None) or args[0] + + if isinstance(fp_arg, str): + session_cls = Session.cls_from_path(fp_arg) + + if local._env and session_cls.hascreds(getenv()): + session_cls = DummySession + + session = session_cls() + + else: + session = DummySession() + + with env_ctor(session=session): + return f(*args, **kwds) + + return wrapper + + +@attr.s(slots=True) +@total_ordering +class GDALVersion: + """Convenience class for obtaining GDAL major and minor version + components and comparing between versions. This is highly + simplistic and assumes a very normal numbering scheme for versions + and ignores everything except the major and minor components. + + """ + + major = attr.ib(default=0, validator=attr.validators.instance_of(int)) + minor = attr.ib(default=0, validator=attr.validators.instance_of(int)) + + def __eq__(self, other): + return (self.major, self.minor) == tuple(other.major, other.minor) + + def __lt__(self, other): + return (self.major, self.minor) < tuple(other.major, other.minor) + + def __repr__(self): + return f"GDALVersion(major={self.major}, minor={self.minor})" + + def __str__(self): + return f"{self.major}.{self.minor}" + + @classmethod + def parse(cls, input): + """ + Parses input tuple or string to GDALVersion. If input is a GDALVersion + instance, it is returned. + + Parameters + ---------- + input: tuple of (major, minor), string, or instance of GDALVersion + + Returns + ------- + GDALVersion instance + + """ + if isinstance(input, cls): + return input + if isinstance(input, tuple): + return cls(*input) + elif isinstance(input, str): + # Extract major and minor version components. + # alpha, beta, rc suffixes ignored + match = re.search(r"^\d+\.\d+", input) + if not match: + raise ValueError( + "value does not appear to be a valid GDAL version " + f"number: {input}" + ) + major, minor = (int(c) for c in match.group().split(".")) + return cls(major=major, minor=minor) + + raise TypeError("GDALVersion can only be parsed from a string or tuple") + + @classmethod + def runtime(cls): + """Return GDALVersion of current GDAL runtime""" + return cls.parse(get_gdal_release_name()) + + def at_least(self, other): + other = self.__class__.parse(other) + return self >= other + + +def require_gdal_version( + version, param=None, values=None, is_max_version=False, reason="" +): + """A decorator that ensures the called function or parameters are supported + by the runtime version of GDAL. Raises GDALVersionError if conditions + are not met. + + Examples: + \b + @require_gdal_version('2.2') + def some_func(): + + calling `some_func` with a runtime version of GDAL that is < 2.2 raises a + GDALVersionError. + + \b + @require_gdal_version('2.2', param='foo') + def some_func(foo='bar'): + + calling `some_func` with parameter `foo` of any value on GDAL < 2.2 raises + a GDALVersionError. + + \b + @require_gdal_version('2.2', param='foo', values=('bar',)) + def some_func(foo=None): + + calling `some_func` with parameter `foo` and value `bar` on GDAL < 2.2 + raises a GDALVersionError. + + + Parameters + ------------ + version: tuple, string, or GDALVersion + param: string (optional, default: None) + If `values` are absent, then all use of this parameter with a value + other than default value requires at least GDAL `version`. + values: tuple, list, or set (optional, default: None) + contains values that require at least GDAL `version`. `param` + is required for `values`. + is_max_version: bool (optional, default: False) + if `True` indicates that the version provided is the maximum version + allowed, instead of requiring at least that version. + reason: string (optional: default: '') + custom error message presented to user in addition to message about + GDAL version. Use this to provide an explanation of what changed + if necessary context to the user. + + Returns + --------- + wrapped function + + """ + if values is not None: + if param is None: + raise ValueError("require_gdal_version: param must be provided with values") + + if not isinstance(values, (tuple, list, set)): + raise ValueError( + "require_gdal_version: values must be a tuple, list, or set" + ) + + version = GDALVersion.parse(version) + runtime = GDALVersion.runtime() + inequality = ">=" if runtime < version else "<=" + reason = f"\n{reason}" if reason else reason + + def decorator(f): + @wraps(f) + def wrapper(*args, **kwds): + if (runtime < version and not is_max_version) or ( + is_max_version and runtime > version + ): + + if param is None: + raise GDALVersionError( + f"GDAL version must be {inequality} {version}{reason}" + ) + + # normalize args and kwds to dict + argspec = getfullargspec(f) + full_kwds = kwds.copy() + + if argspec.args: + full_kwds.update(dict(zip(argspec.args[: len(args)], args))) + + if argspec.defaults: + defaults = dict( + zip(reversed(argspec.args), reversed(argspec.defaults)) + ) + else: + defaults = {} + + if param in full_kwds: + if values is None: + if param not in defaults or ( + full_kwds[param] != defaults[param] + ): + raise GDALVersionError( + f'usage of parameter "{param}" requires ' + f"GDAL {inequality} {version}{reason}" + ) + + elif full_kwds[param] in values: + raise GDALVersionError( + f'parameter "{param}={full_kwds[param]}" requires ' + f"GDAL {inequality} {version}{reason}" + ) + + return f(*args, **kwds) + + return wrapper + + return decorator + + +# Patch the environment if needed, such as in the installed wheel case. + +if "GDAL_DATA" not in os.environ: + + path = GDALDataFinder().search_wheel() + + if path: + log.debug("GDAL data found in package: path=%r.", path) + set_gdal_config("GDAL_DATA", path) + + # See https://github.com/mapbox/rasterio/issues/1631. + elif GDALDataFinder().find_file("header.dxf"): + log.debug("GDAL data files are available at built-in paths.") + + else: + path = GDALDataFinder().search() + + if path: + set_gdal_config("GDAL_DATA", path) + log.debug("GDAL data found in other locations: path=%r.", path) + +if 'PROJ_DATA' in os.environ: + # PROJ 9.1+ + path = os.environ["PROJ_DATA"] + set_proj_data_search_path(path) + +elif "PROJ_LIB" in os.environ: + # PROJ < 9.1 + path = os.environ["PROJ_LIB"] + set_proj_data_search_path(path) + +elif PROJDataFinder().search_wheel(): + path = PROJDataFinder().search_wheel() + log.debug("PROJ data found in package: path=%r.", path) + set_proj_data_search_path(path) + +# See https://github.com/mapbox/rasterio/issues/1631. +elif PROJDataFinder().has_data(): + log.debug("PROJ data files are available at built-in paths.") + +else: + path = PROJDataFinder().search() + + if path: + log.debug("PROJ data found in other locations: path=%r.", path) + set_proj_data_search_path(path) diff --git a/.venv/lib/python3.12/site-packages/fiona/errors.py b/.venv/lib/python3.12/site-packages/fiona/errors.py new file mode 100644 index 00000000..8f857d0e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/errors.py @@ -0,0 +1,95 @@ +# Errors. + + +class FionaError(Exception): + """Base Fiona error""" + + +class FionaValueError(FionaError, ValueError): + """Fiona-specific value errors""" + + +class AttributeFilterError(FionaValueError): + """Error processing SQL WHERE clause with the dataset.""" + + +class DriverError(FionaValueError): + """Encapsulates unsupported driver and driver mode errors.""" + + +class SchemaError(FionaValueError): + """When a schema mapping has no properties or no geometry.""" + + +class CRSError(FionaValueError): + """When a crs mapping has neither init or proj items.""" + + +class UnsupportedOperation(FionaError): + """Raised when reading from a file opened in 'w' mode""" + + +class DataIOError(OSError): + """IO errors involving driver registration or availability.""" + + +class DriverIOError(OSError): + """A format specific driver error.""" + + +class DriverSupportError(DriverIOError): + """Driver does not support schema""" + + +class DatasetDeleteError(OSError): + """Failure to delete a dataset""" + + +class FieldNameEncodeError(UnicodeEncodeError): + """Failure to encode a field name.""" + + +class UnsupportedGeometryTypeError(KeyError): + """When a OGR geometry type isn't supported by Fiona.""" + + +class GeometryTypeValidationError(FionaValueError): + """Tried to write a geometry type not specified in the schema""" + + +class TransactionError(RuntimeError): + """Failure relating to GDAL transactions""" + + +class EnvError(FionaError): + """Environment Errors""" + + +class GDALVersionError(FionaError): + """Raised if the runtime version of GDAL does not meet the required + version of GDAL. + """ + + +class TransformError(FionaError): + """Raised if a coordinate transformation fails.""" + + +class OpenerRegistrationError(FionaError): + """Raised when a Python file opener can not be registered.""" + + +class PathError(FionaError): + """Raised when a dataset path is malformed or invalid""" + + +class FionaDeprecationWarning(DeprecationWarning): + """A warning about deprecation of Fiona features""" + + +class FeatureWarning(UserWarning): + """A warning about serialization of a feature""" + + +class ReduceError(FionaError): + """"Raised when reduce operation fails.""" diff --git a/.venv/lib/python3.12/site-packages/fiona/features.py b/.venv/lib/python3.12/site-packages/fiona/features.py new file mode 100644 index 00000000..4afd1f3b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/features.py @@ -0,0 +1,316 @@ +"""Operations on GeoJSON feature and geometry objects.""" + +from collections import UserDict +from functools import wraps +import itertools +from typing import Generator, Iterable, Mapping, Union + +from fiona.transform import transform_geom # type: ignore +import shapely # type: ignore +import shapely.ops # type: ignore +from shapely.geometry import mapping, shape # type: ignore +from shapely.geometry.base import BaseGeometry, BaseMultipartGeometry # type: ignore + +from .errors import ReduceError +from ._vendor import snuggs + +# Patch snuggs's func_map, extending it with Python builtins, geometry +# methods and attributes, and functions exported in the shapely module +# (such as set_precision). + + +class FuncMapper(UserDict, Mapping): + """Resolves functions from names in pipeline expressions.""" + + def __getitem__(self, key): + """Get a function by its name.""" + if key in self.data: + return self.data[key] + elif key in __builtins__ and not key.startswith("__"): + return __builtins__[key] + elif key in dir(shapely): + return lambda g, *args, **kwargs: getattr(shapely, key)(g, *args, **kwargs) + elif key in dir(shapely.ops): + return lambda g, *args, **kwargs: getattr(shapely.ops, key)( + g, *args, **kwargs + ) + else: + return ( + lambda g, *args, **kwargs: getattr(g, key)(*args, **kwargs) + if callable(getattr(g, key)) + else getattr(g, key) + ) + + +def collect(geoms: Iterable) -> object: + """Turn a sequence of geometries into a single GeometryCollection. + + Parameters + ---------- + geoms : Iterable + A sequence of geometry objects. + + Returns + ------- + Geometry + + """ + return shapely.GeometryCollection(list(geoms)) + + +def dump(geom: Union[BaseGeometry, BaseMultipartGeometry]) -> Generator: + """Get the individual parts of a geometry object. + + If the given geometry object has a single part, e.g., is an + instance of LineString, Point, or Polygon, this function yields a + single result, the geometry itself. + + Parameters + ---------- + geom : a shapely geometry object. + + Yields + ------ + A shapely geometry object. + + """ + if hasattr(geom, "geoms"): + parts = geom.geoms + else: + parts = [geom] + for part in parts: + yield part + + +def identity(obj: object) -> object: + """Get back the given argument. + + To help in making expression lists, where the first item must be a + callable object. + + Parameters + ---------- + obj : objeect + + Returns + ------- + obj + + """ + return obj + + +def vertex_count(obj: object) -> int: + """Count the vertices of a GeoJSON-like geometry object. + + Parameters + ---------- + obj: object + A GeoJSON-like mapping or an object that provides + __geo_interface__. + + Returns + ------- + int + + """ + shp = shape(obj) + if hasattr(shp, "geoms"): + return sum(vertex_count(part) for part in shp.geoms) + elif hasattr(shp, "exterior"): + return vertex_count(shp.exterior) + sum( + vertex_count(ring) for ring in shp.interiors + ) + else: + return len(shp.coords) + + +def binary_projectable_property_wrapper(func): + """Project func's geometry args before computing a property. + + Parameters + ---------- + func : callable + Signature is func(geom1, geom2, *args, **kwargs) + + Returns + ------- + callable + Signature is func(geom1, geom2, projected=True, *args, **kwargs) + + """ + + @wraps(func) + def wrapper(geom1, geom2, *args, projected=True, **kwargs): + if projected: + geom1 = shape(transform_geom("OGC:CRS84", "EPSG:6933", mapping(geom1))) + geom2 = shape(transform_geom("OGC:CRS84", "EPSG:6933", mapping(geom2))) + + return func(geom1, geom2, *args, **kwargs) + + return wrapper + + +def unary_projectable_property_wrapper(func): + """Project func's geometry arg before computing a property. + + Parameters + ---------- + func : callable + Signature is func(geom1, *args, **kwargs) + + Returns + ------- + callable + Signature is func(geom1, projected=True, *args, **kwargs) + + """ + + @wraps(func) + def wrapper(geom, *args, projected=True, **kwargs): + if projected: + geom = shape(transform_geom("OGC:CRS84", "EPSG:6933", mapping(geom))) + + return func(geom, *args, **kwargs) + + return wrapper + + +def unary_projectable_constructive_wrapper(func): + """Project func's geometry arg before constructing a new geometry. + + Parameters + ---------- + func : callable + Signature is func(geom1, *args, **kwargs) + + Returns + ------- + callable + Signature is func(geom1, projected=True, *args, **kwargs) + + """ + + @wraps(func) + def wrapper(geom, *args, projected=True, **kwargs): + if projected: + geom = shape(transform_geom("OGC:CRS84", "EPSG:6933", mapping(geom))) + product = func(geom, *args, **kwargs) + return shape(transform_geom("EPSG:6933", "OGC:CRS84", mapping(product))) + else: + return func(geom, *args, **kwargs) + + return wrapper + + +area = unary_projectable_property_wrapper(shapely.area) +buffer = unary_projectable_constructive_wrapper(shapely.buffer) +distance = binary_projectable_property_wrapper(shapely.distance) +set_precision = unary_projectable_constructive_wrapper(shapely.set_precision) +simplify = unary_projectable_constructive_wrapper(shapely.simplify) +length = unary_projectable_property_wrapper(shapely.length) + +snuggs.func_map = FuncMapper( + area=area, + buffer=buffer, + collect=collect, + distance=distance, + dump=dump, + identity=identity, + length=length, + simplify=simplify, + set_precision=set_precision, + vertex_count=vertex_count, + **{ + k: getattr(itertools, k) + for k in dir(itertools) + if not k.startswith("_") and callable(getattr(itertools, k)) + }, +) + + +def map_feature( + expression: str, feature: Mapping, dump_parts: bool = False +) -> Generator: + """Map a pipeline expression to a feature. + + Yields one or more values. + + Parameters + ---------- + expression : str + A snuggs expression. The outermost parentheses are optional. + feature : dict + A Fiona feature object. + dump_parts : bool, optional (default: False) + If True, the parts of the feature's geometry are turned into + new features. + + Yields + ------ + object + + """ + if not (expression.startswith("(") and expression.endswith(")")): + expression = f"({expression})" + + try: + geom = shape(feature.get("geometry", None)) + if dump_parts and hasattr(geom, "geoms"): + parts = geom.geoms + else: + parts = [geom] + except (AttributeError, KeyError): + parts = [None] + + for part in parts: + result = snuggs.eval(expression, g=part, f=feature) + if isinstance(result, (str, float, int, Mapping)): + yield result + elif isinstance(result, (BaseGeometry, BaseMultipartGeometry)): + yield mapping(result) + else: + try: + for item in result: + if isinstance(item, (BaseGeometry, BaseMultipartGeometry)): + item = mapping(item) + yield item + except TypeError: + yield result + + +def reduce_features(expression: str, features: Iterable[Mapping]) -> Generator: + """Reduce a collection of features to a single value. + + The pipeline is a string that, when evaluated by snuggs, produces + a new value. The name of the input feature collection in the + context of the pipeline is "c". + + Parameters + ---------- + pipeline : str + Geometry operation pipeline such as "(unary_union c)". + features : iterable + A sequence of Fiona feature objects. + + Yields + ------ + object + + Raises + ------ + ReduceError + + """ + if not (expression.startswith("(") and expression.endswith(")")): + expression = f"({expression})" + + collection = (shape(feat["geometry"]) for feat in features) + result = snuggs.eval(expression, c=collection) + + if isinstance(result, (str, float, int, Mapping)): + yield result + elif isinstance(result, (BaseGeometry, BaseMultipartGeometry)): + yield mapping(result) + else: + raise ReduceError("Expression failed to reduce to a single value.") diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__init__.py b/.venv/lib/python3.12/site-packages/fiona/fio/__init__.py new file mode 100644 index 00000000..c23008d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/__init__.py @@ -0,0 +1,19 @@ +"""Fiona's command line interface""" + +from functools import wraps + + +def with_context_env(f): + """Pops the Fiona Env from the passed context and executes the + wrapped func in the context of that obj. + + Click's pass_context decorator must precede this decorator, or else + there will be no context in the wrapper args. + """ + @wraps(f) + def wrapper(*args, **kwds): + ctx = args[0] + env = ctx.obj.pop('env') + with env: + return f(*args, **kwds) + return wrapper diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d66dbcfb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/bounds.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/bounds.cpython-312.pyc new file mode 100644 index 00000000..33c780ac Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/bounds.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/calc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/calc.cpython-312.pyc new file mode 100644 index 00000000..5b269faa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/calc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/cat.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/cat.cpython-312.pyc new file mode 100644 index 00000000..c0ebf0bb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/cat.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/collect.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/collect.cpython-312.pyc new file mode 100644 index 00000000..44549f2d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/collect.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/distrib.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/distrib.cpython-312.pyc new file mode 100644 index 00000000..f32b47e1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/distrib.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/dump.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/dump.cpython-312.pyc new file mode 100644 index 00000000..1a1dc573 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/dump.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/env.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/env.cpython-312.pyc new file mode 100644 index 00000000..12b83848 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/env.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/features.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/features.cpython-312.pyc new file mode 100644 index 00000000..0ef92bee Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/features.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/helpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 00000000..4d2afee7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/helpers.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/info.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/info.cpython-312.pyc new file mode 100644 index 00000000..c7532976 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/info.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/insp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/insp.cpython-312.pyc new file mode 100644 index 00000000..5ecb8f51 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/insp.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/load.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/load.cpython-312.pyc new file mode 100644 index 00000000..5ab45c3f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/load.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/ls.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/ls.cpython-312.pyc new file mode 100644 index 00000000..bf12c5c1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/ls.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/main.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/main.cpython-312.pyc new file mode 100644 index 00000000..c931f4dd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/main.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/options.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/options.cpython-312.pyc new file mode 100644 index 00000000..54ef8d8e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/options.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/rm.cpython-312.pyc b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/rm.cpython-312.pyc new file mode 100644 index 00000000..36f1e0e2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/fio/__pycache__/rm.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/bounds.py b/.venv/lib/python3.12/site-packages/fiona/fio/bounds.py new file mode 100644 index 00000000..571346e2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/bounds.py @@ -0,0 +1,89 @@ +"""$ fio bounds""" + +import json + +import click +from cligj import precision_opt, use_rs_opt + +import fiona +from fiona.fio.helpers import obj_gen +from fiona.fio import with_context_env +from fiona.model import ObjectEncoder + + +@click.command(short_help="Print the extent of GeoJSON objects") +@precision_opt +@click.option('--explode/--no-explode', default=False, + help="Explode collections into features (default: no).") +@click.option('--with-id/--without-id', default=False, + help="Print GeoJSON ids and bounding boxes together " + "(default: without).") +@click.option('--with-obj/--without-obj', default=False, + help="Print GeoJSON objects and bounding boxes together " + "(default: without).") +@use_rs_opt +@click.pass_context +@with_context_env +def bounds(ctx, precision, explode, with_id, with_obj, use_rs): + """Print the bounding boxes of GeoJSON objects read from stdin. + + Optionally explode collections and print the bounds of their + features. + + To print identifiers for input objects along with their bounds + as a {id: identifier, bbox: bounds} JSON object, use --with-id. + + To print the input objects themselves along with their bounds + as GeoJSON object, use --with-obj. This has the effect of updating + input objects with {id: identifier, bbox: bounds}. + + """ + stdin = click.get_text_stream('stdin') + source = obj_gen(stdin) + + for i, obj in enumerate(source): + obj_id = obj.get("id", "collection:" + str(i)) + xs = [] + ys = [] + features = obj.get("features") or [obj] + + for j, feat in enumerate(features): + feat_id = feat.get("id", "feature:" + str(i)) + w, s, e, n = fiona.bounds(feat) + + if precision > 0: + w, s, e, n = (round(v, precision) for v in (w, s, e, n)) + if explode: + + if with_id: + rec = {"parent": obj_id, "id": feat_id, "bbox": (w, s, e, n)} + elif with_obj: + feat.update(parent=obj_id, bbox=(w, s, e, n)) + rec = feat + else: + rec = (w, s, e, n) + + if use_rs: + click.echo('\x1e', nl=False) + + click.echo(json.dumps(rec, cls=ObjectEncoder)) + + else: + xs.extend([w, e]) + ys.extend([s, n]) + + if not explode: + w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) + + if with_id: + rec = {"id": obj_id, "bbox": (w, s, e, n)} + elif with_obj: + obj.update(id=obj_id, bbox=(w, s, e, n)) + rec = obj + else: + rec = (w, s, e, n) + + if use_rs: + click.echo("\x1e", nl=False) + + click.echo(json.dumps(rec, cls=ObjectEncoder)) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/calc.py b/.venv/lib/python3.12/site-packages/fiona/fio/calc.py new file mode 100644 index 00000000..643c3a93 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/calc.py @@ -0,0 +1,63 @@ +import json + +import click +from cligj import use_rs_opt + +from .helpers import obj_gen, eval_feature_expression +from fiona.fio import with_context_env +from fiona.model import ObjectEncoder + + +@click.command(short_help="Calculate GeoJSON property by Python expression") +@click.argument('property_name') +@click.argument('expression') +@click.option('--overwrite', is_flag=True, default=False, + help="Overwrite properties, default: False") +@use_rs_opt +@click.pass_context +@with_context_env +def calc(ctx, property_name, expression, overwrite, use_rs): + """ + Create a new property on GeoJSON features using the specified expression. + + \b + The expression is evaluated in a restricted namespace containing: + - sum, pow, min, max and the imported math module + - shape (optional, imported from shapely.geometry if available) + - bool, int, str, len, float type conversions + - f (the feature to be evaluated, + allows item access via javascript-style dot notation using munch) + + The expression will be evaluated for each feature and its + return value will be added to the properties + as the specified property_name. Existing properties will not + be overwritten by default (an Exception is raised). + + Example + + \b + $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" + + """ + stdin = click.get_text_stream('stdin') + source = obj_gen(stdin) + + for i, obj in enumerate(source): + features = obj.get("features") or [obj] + + for j, feat in enumerate(features): + + if not overwrite and property_name in feat["properties"]: + raise click.UsageError( + f"{property_name} already exists in properties; " + "rename or use --overwrite" + ) + + feat["properties"][property_name] = eval_feature_expression( + feat, expression + ) + + if use_rs: + click.echo("\x1e", nl=False) + + click.echo(json.dumps(feat, cls=ObjectEncoder)) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/cat.py b/.venv/lib/python3.12/site-packages/fiona/fio/cat.py new file mode 100644 index 00000000..ace1548c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/cat.py @@ -0,0 +1,139 @@ +"""fio-cat""" + +import json +import warnings + +import click +import cligj + +import fiona +from fiona.transform import transform_geom +from fiona.model import Feature, ObjectEncoder +from fiona.fio import options, with_context_env +from fiona.fio.helpers import recursive_round +from fiona.errors import AttributeFilterError + +warnings.simplefilter("default") + + +# Cat command +@click.command(short_help="Concatenate and print the features of datasets") +@click.argument("files", nargs=-1, required=True, metavar="INPUTS...") +@click.option( + "--layer", + default=None, + multiple=True, + callback=options.cb_multilayer, + help="Input layer(s), specified as 'fileindex:layer` " + "For example, '1:foo,2:bar' will concatenate layer foo " + "from file 1 and layer bar from file 2", +) +@cligj.precision_opt +@cligj.indent_opt +@cligj.compact_opt +@click.option( + "--ignore-errors/--no-ignore-errors", + default=False, + help="log errors but do not stop serialization.", +) +@options.dst_crs_opt +@cligj.use_rs_opt +@click.option( + "--bbox", + default=None, + metavar="w,s,e,n", + help="filter for features intersecting a bounding box", +) +@click.option( + "--where", + default=None, + help="attribute filter using SQL where clause", +) +@click.option( + "--cut-at-antimeridian", + is_flag=True, + default=False, + help="Optionally cut geometries at the anti-meridian. To be used only for a geographic destination CRS.", +) +@click.option('--where', default=None, + help="attribute filter using SQL where clause") +@options.open_opt +@click.pass_context +@with_context_env +def cat( + ctx, + files, + precision, + indent, + compact, + ignore_errors, + dst_crs, + use_rs, + bbox, + where, + cut_at_antimeridian, + layer, + open_options, +): + """ + Concatenate and print the features of input datasets as a sequence of + GeoJSON features. + + When working with a multi-layer dataset the first layer is used by default. + Use the '--layer' option to select a different layer. + + """ + dump_kwds = {"sort_keys": True} + if indent: + dump_kwds["indent"] = indent + if compact: + dump_kwds["separators"] = (",", ":") + + # Validate file idexes provided in --layer option + # (can't pass the files to option callback) + if layer: + options.validate_multilayer_file_index(files, layer) + + # first layer is the default + for i in range(1, len(files) + 1): + if str(i) not in layer.keys(): + layer[str(i)] = [0] + + try: + if bbox: + try: + bbox = tuple(map(float, bbox.split(","))) + except ValueError: + bbox = json.loads(bbox) + + for i, path in enumerate(files, 1): + for lyr in layer[str(i)]: + with fiona.open(path, layer=lyr, **open_options) as src: + for i, feat in src.items(bbox=bbox, where=where): + geom = feat.geometry + + if dst_crs: + geom = transform_geom( + src.crs, + dst_crs, + geom, + antimeridian_cutting=cut_at_antimeridian, + ) + + if precision >= 0: + geom = recursive_round(geom, precision) + + feat = Feature( + id=feat.id, + properties=feat.properties, + geometry=geom, + bbox=fiona.bounds(geom), + ) + + if use_rs: + click.echo("\x1e", nl=False) + + click.echo(json.dumps(feat, cls=ObjectEncoder, **dump_kwds)) + + except AttributeFilterError as e: + raise click.BadParameter("'where' clause is invalid: " + str(e)) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/collect.py b/.venv/lib/python3.12/site-packages/fiona/fio/collect.py new file mode 100644 index 00000000..5d61211d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/collect.py @@ -0,0 +1,245 @@ +"""fio-collect""" + +from functools import partial +import json +import logging + +import click +import cligj + +from fiona.fio import helpers, options, with_context_env +from fiona.model import Geometry, ObjectEncoder +from fiona.transform import transform_geom + + +@click.command(short_help="Collect a sequence of features.") +@cligj.precision_opt +@cligj.indent_opt +@cligj.compact_opt +@click.option( + "--record-buffered/--no-record-buffered", + default=False, + help="Economical buffering of writes at record, not collection " + "(default), level.", +) +@click.option( + "--ignore-errors/--no-ignore-errors", + default=False, + help="log errors but do not stop serialization.", +) +@options.src_crs_opt +@click.option( + "--with-ld-context/--without-ld-context", + default=False, + help="add a JSON-LD context to JSON output.", +) +@click.option( + "--add-ld-context-item", + multiple=True, + help="map a term to a URI and add it to the output's JSON LD " "context.", +) +@click.option( + "--parse/--no-parse", + default=True, + help="load and dump the geojson feature (default is True)", +) +@click.pass_context +@with_context_env +def collect( + ctx, + precision, + indent, + compact, + record_buffered, + ignore_errors, + src_crs, + with_ld_context, + add_ld_context_item, + parse, +): + """Make a GeoJSON feature collection from a sequence of GeoJSON + features and print it.""" + logger = logging.getLogger(__name__) + stdin = click.get_text_stream("stdin") + sink = click.get_text_stream("stdout") + + dump_kwds = {"sort_keys": True} + if indent: + dump_kwds["indent"] = indent + if compact: + dump_kwds["separators"] = (",", ":") + item_sep = compact and "," or ", " + + if src_crs: + if not parse: + raise click.UsageError("Can't specify --src-crs with --no-parse") + transformer = partial( + transform_geom, + src_crs, + "EPSG:4326", + antimeridian_cutting=True, + precision=precision, + ) + else: + + def transformer(x): + return x + + first_line = next(stdin) + + # If parsing geojson + if parse: + # If input is RS-delimited JSON sequence. + if first_line.startswith("\x1e"): + + def feature_text_gen(): + buffer = first_line.strip("\x1e") + for line in stdin: + if line.startswith("\x1e"): + if buffer: + feat = json.loads(buffer) + feat["geometry"] = transformer( + Geometry.from_dict(**feat["geometry"]) + ) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) + buffer = line.strip("\x1e") + else: + buffer += line + else: + feat = json.loads(buffer) + feat["geometry"] = transformer( + Geometry.from_dict(**feat["geometry"]) + ) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) + + else: + + def feature_text_gen(): + feat = json.loads(first_line) + feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"])) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) + + for line in stdin: + feat = json.loads(line) + feat["geometry"] = transformer( + Geometry.from_dict(**feat["geometry"]) + ) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) + + # If *not* parsing geojson + else: + # If input is RS-delimited JSON sequence. + if first_line.startswith("\x1e"): + + def feature_text_gen(): + buffer = first_line.strip("\x1e") + for line in stdin: + if line.startswith("\x1e"): + if buffer: + yield buffer + buffer = line.strip("\x1e") + else: + buffer += line + else: + yield buffer + + else: + + def feature_text_gen(): + yield first_line + yield from stdin + + source = feature_text_gen() + + if record_buffered: + # Buffer GeoJSON data at the feature level for smaller + # memory footprint. + indented = bool(indent) + rec_indent = "\n" + " " * (2 * (indent or 0)) + + collection = {"type": "FeatureCollection", "features": []} + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + + head, tail = json.dumps(collection, cls=ObjectEncoder, **dump_kwds).split("[]") + + sink.write(head) + sink.write("[") + + # Try the first record. + try: + i, first = 0, next(source) + if with_ld_context: + first = helpers.id_record(first) + if indented: + sink.write(rec_indent) + sink.write(first.replace("\n", rec_indent)) + except StopIteration: + pass + except Exception as exc: + # Ignoring errors is *not* the default. + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " "continuing", i, exc + ) + else: + # Log error and close up the GeoJSON, leaving it + # more or less valid no matter what happens above. + logger.critical( + "failed to serialize file record %d (%s), " "quitting", i, exc + ) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise + + # Because trailing commas aren't valid in JSON arrays + # we'll write the item separator before each of the + # remaining features. + for i, rec in enumerate(source, 1): + try: + if with_ld_context: + rec = helpers.id_record(rec) + if indented: + sink.write(rec_indent) + sink.write(item_sep) + sink.write(rec.replace("\n", rec_indent)) + except Exception as exc: + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " "continuing", + i, + exc, + ) + else: + logger.critical( + "failed to serialize file record %d (%s), " "quitting", + i, + exc, + ) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise + + # Close up the GeoJSON after writing all features. + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + + else: + # Buffer GeoJSON data at the collection level. The default. + collection = {"type": "FeatureCollection", "features": []} + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + + head, tail = json.dumps(collection, cls=ObjectEncoder, **dump_kwds).split("[]") + sink.write(head) + sink.write("[") + sink.write(",".join(source)) + sink.write("]") + sink.write(tail) + sink.write("\n") diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/distrib.py b/.venv/lib/python3.12/site-packages/fiona/fio/distrib.py new file mode 100644 index 00000000..46267b5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/distrib.py @@ -0,0 +1,35 @@ +"""$ fio distrib""" + +import json + +import click +import cligj + +from fiona.fio import helpers, with_context_env +from fiona.model import ObjectEncoder + + +@click.command() +@cligj.use_rs_opt +@click.pass_context +@with_context_env +def distrib(ctx, use_rs): + """Distribute features from a collection. + + Print the features of GeoJSON objects read from stdin. + + """ + stdin = click.get_text_stream('stdin') + source = helpers.obj_gen(stdin) + + for i, obj in enumerate(source): + obj_id = obj.get("id", "collection:" + str(i)) + features = obj.get("features") or [obj] + for j, feat in enumerate(features): + if obj.get("type") == "FeatureCollection": + feat["parent"] = obj_id + feat_id = feat.get("id", "feature:" + str(i)) + feat["id"] = feat_id + if use_rs: + click.echo("\x1e", nl=False) + click.echo(json.dumps(feat, cls=ObjectEncoder)) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/dump.py b/.venv/lib/python3.12/site-packages/fiona/fio/dump.py new file mode 100644 index 00000000..6b38a4e0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/dump.py @@ -0,0 +1,198 @@ +"""fio-dump""" + +from functools import partial +import json +import logging + +import click +import cligj + +import fiona +from fiona.fio import helpers, options, with_context_env +from fiona.model import Feature, ObjectEncoder +from fiona.transform import transform_geom + + +@click.command(short_help="Dump a dataset to GeoJSON.") +@click.argument('input', required=True) +@click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, + help="Print information about a specific layer. The first " + "layer is used by default. Layers use zero-based " + "numbering when accessed by index.") +@click.option('--encoding', help="Specify encoding of the input file.") +@cligj.precision_opt +@cligj.indent_opt +@cligj.compact_opt +@click.option('--record-buffered/--no-record-buffered', default=False, + help="Economical buffering of writes at record, not collection " + "(default), level.") +@click.option('--ignore-errors/--no-ignore-errors', default=False, + help="log errors but do not stop serialization.") +@click.option('--with-ld-context/--without-ld-context', default=False, + help="add a JSON-LD context to JSON output.") +@click.option('--add-ld-context-item', multiple=True, + help="map a term to a URI and add it to the output's JSON LD " + "context.") +@options.open_opt +@click.pass_context +@with_context_env +def dump( + ctx, + input, + encoding, + precision, + indent, + compact, + record_buffered, + ignore_errors, + with_ld_context, + add_ld_context_item, + layer, + open_options, +): + + """Dump a dataset either as a GeoJSON feature collection (the default) + or a sequence of GeoJSON features.""" + + logger = logging.getLogger(__name__) + sink = click.get_text_stream('stdout') + + dump_kwds = {'sort_keys': True} + if indent: + dump_kwds['indent'] = indent + if compact: + dump_kwds['separators'] = (',', ':') + item_sep = compact and ',' or ', ' + + if encoding: + open_options["encoding"] = encoding + if layer: + open_options["layer"] = layer + + def transformer(crs, feat): + tg = partial( + transform_geom, + crs, + "EPSG:4326", + antimeridian_cutting=True, + precision=precision, + ) + return Feature( + id=feat.id, properties=feat.properties, geometry=tg(feat.geometry) + ) + + with fiona.open(input, **open_options) as source: + meta = source.meta + meta["fields"] = dict(source.schema["properties"].items()) + + if record_buffered: + # Buffer GeoJSON data at the feature level for smaller + # memory footprint. + indented = bool(indent) + rec_indent = "\n" + " " * (2 * (indent or 0)) + + collection = { + "type": "FeatureCollection", + "fiona:schema": meta["schema"], + "fiona:crs": meta["crs"], + "features": [], + } + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + + head, tail = json.dumps(collection, **dump_kwds).split("[]") + + sink.write(head) + sink.write("[") + + itr = iter(source) + + # Try the first record. + try: + i, first = 0, next(itr) + first = transformer(first) + if with_ld_context: + first = helpers.id_record(first) + if indented: + sink.write(rec_indent) + sink.write( + json.dumps(first, cls=ObjectEncoder, **dump_kwds).replace( + "\n", rec_indent + ) + ) + except StopIteration: + pass + except Exception as exc: + # Ignoring errors is *not* the default. + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " "continuing", i, exc + ) + else: + # Log error and close up the GeoJSON, leaving it + # more or less valid no matter what happens above. + logger.critical( + "failed to serialize file record %d (%s), " "quitting", i, exc + ) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise + + # Because trailing commas aren't valid in JSON arrays + # we'll write the item separator before each of the + # remaining features. + for i, rec in enumerate(itr, 1): + rec = transformer(rec) + try: + if with_ld_context: + rec = helpers.id_record(rec) + if indented: + sink.write(rec_indent) + sink.write(item_sep) + sink.write( + json.dumps(rec, cls=ObjectEncoder, **dump_kwds).replace( + "\n", rec_indent + ) + ) + except Exception as exc: + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " + "continuing", + i, exc) + else: + logger.critical( + "failed to serialize file record %d (%s), " + "quitting", + i, exc) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise + + # Close up the GeoJSON after writing all features. + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + + else: + # Buffer GeoJSON data at the collection level. The default. + collection = { + "type": "FeatureCollection", + "fiona:schema": meta["schema"], + "fiona:crs": meta["crs"].to_string(), + } + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + collection["features"] = [ + helpers.id_record(transformer(rec)) for rec in source + ] + else: + collection["features"] = [ + transformer(source.crs, rec) for rec in source + ] + json.dump(collection, sink, cls=ObjectEncoder, **dump_kwds) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/env.py b/.venv/lib/python3.12/site-packages/fiona/fio/env.py new file mode 100644 index 00000000..95cc1b0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/env.py @@ -0,0 +1,38 @@ +"""$ fio env""" + +import json +import os + +import click + +import fiona +from fiona._env import GDALDataFinder, PROJDataFinder + + +@click.command(short_help="Print information about the fio environment.") +@click.option('--formats', 'key', flag_value='formats', default=True, + help="Enumerate the available formats.") +@click.option('--credentials', 'key', flag_value='credentials', default=False, + help="Print credentials.") +@click.option('--gdal-data', 'key', flag_value='gdal_data', default=False, + help="Print GDAL data path.") +@click.option('--proj-data', 'key', flag_value='proj_data', default=False, + help="Print PROJ data path.") +@click.pass_context +def env(ctx, key): + """Print information about the Fiona environment: available + formats, etc. + """ + stdout = click.get_text_stream('stdout') + with ctx.obj['env'] as env: + if key == 'formats': + for k, v in sorted(fiona.supported_drivers.items()): + modes = ', '.join("'" + m + "'" for m in v) + stdout.write(f"{k} (modes {modes})\n") + stdout.write('\n') + elif key == 'credentials': + click.echo(json.dumps(env.session.credentials)) + elif key == 'gdal_data': + click.echo(os.environ.get('GDAL_DATA') or GDALDataFinder().search()) + elif key == 'proj_data': + click.echo(os.environ.get('PROJ_DATA', os.environ.get('PROJ_LIB')) or PROJDataFinder().search()) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/features.py b/.venv/lib/python3.12/site-packages/fiona/fio/features.py new file mode 100644 index 00000000..201faf82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/features.py @@ -0,0 +1,267 @@ +"""Fiona CLI commands.""" + +from collections import defaultdict +from copy import copy +import itertools +import json +import logging +import warnings + +import click +from cligj import use_rs_opt # type: ignore + +from fiona.features import map_feature, reduce_features +from fiona.fio import with_context_env +from fiona.fio.helpers import obj_gen, eval_feature_expression # type: ignore + +log = logging.getLogger(__name__) + + +@click.command( + "map", + short_help="Map a pipeline expression over GeoJSON features.", +) +@click.argument("pipeline") +@click.option( + "--raw", + "-r", + is_flag=True, + default=False, + help="Print raw result, do not wrap in a GeoJSON Feature.", +) +@click.option( + "--no-input", + "-n", + is_flag=True, + default=False, + help="Do not read input from stream.", +) +@click.option( + "--dump-parts", + is_flag=True, + default=False, + help="Dump parts of geometries to create new inputs before evaluating pipeline.", +) +@use_rs_opt +def map_cmd(pipeline, raw, no_input, dump_parts, use_rs): + """Map a pipeline expression over GeoJSON features. + + Given a sequence of GeoJSON features (RS-delimited or not) on stdin + this prints copies with geometries that are transformed using a + provided transformation pipeline. In "raw" output mode, this + command prints pipeline results without wrapping them in a feature + object. + + The pipeline is a string that, when evaluated by fio-map, produces + a new geometry object. The pipeline consists of expressions in the + form of parenthesized lists that may contain other expressions. + The first item in a list is the name of a function or method, or an + expression that evaluates to a function. The second item is the + function's first argument or the object to which the method is + bound. The remaining list items are the positional and keyword + arguments for the named function or method. The names of the input + feature and its geometry in the context of these expressions are + "f" and "g". + + For example, this pipeline expression + + '(simplify (buffer g 100.0) 5.0)' + + buffers input geometries and then simplifies them so that no + vertices are closer than 5 units. Keyword arguments for the shapely + methods are supported. A keyword argument is preceded by ':' and + followed immediately by its value. For example: + + '(simplify g 5.0 :preserve_topology true)' + + and + + '(buffer g 100.0 :resolution 8 :join_style 1)' + + Numerical and string arguments may be replaced by expressions. The + buffer distance could be a function of a geometry's area. + + '(buffer g (/ (area g) 100.0))' + + """ + if no_input: + features = [None] + else: + stdin = click.get_text_stream("stdin") + features = obj_gen(stdin) + + for feat in features: + for i, value in enumerate(map_feature(pipeline, feat, dump_parts=dump_parts)): + if use_rs: + click.echo("\x1e", nl=False) + if raw: + click.echo(json.dumps(value)) + else: + new_feat = copy(feat) + new_feat["id"] = f"{feat.get('id', '0')}:{i}" + new_feat["geometry"] = value + click.echo(json.dumps(new_feat)) + + +@click.command( + "filter", + short_help="Evaluate pipeline expressions to filter GeoJSON features.", +) +@click.argument("pipeline") +@use_rs_opt +@click.option( + "--snuggs-only", + "-s", + is_flag=True, + default=False, + help="Strictly require snuggs style expressions and skip check for type of expression.", +) +@click.pass_context +@with_context_env +def filter_cmd(ctx, pipeline, use_rs, snuggs_only): + """Evaluate pipeline expressions to filter GeoJSON features. + + The pipeline is a string that, when evaluated, gives a new value for + each input feature. If the value evaluates to True, the feature + passes through the filter. Otherwise the feature does not pass. + + The pipeline consists of expressions in the form of parenthesized + lists that may contain other expressions. The first item in a list + is the name of a function or method, or an expression that evaluates + to a function. The second item is the function's first argument or + the object to which the method is bound. The remaining list items + are the positional and keyword arguments for the named function or + method. The names of the input feature and its geometry in the + context of these expressions are "f" and "g". + + For example, this pipeline expression + + '(< (distance g (Point 4 43)) 1)' + + lets through all features that are less than one unit from the given + point and filters out all other features. + + *New in version 1.10*: these parenthesized list expressions. + + The older style Python expressions like + + 'f.properties.area > 1000.0' + + are deprecated and will not be supported in version 2.0. + + """ + stdin = click.get_text_stream("stdin") + features = obj_gen(stdin) + + if not snuggs_only: + try: + from pyparsing.exceptions import ParseException + from fiona._vendor.snuggs import ExpressionError, expr + + if not pipeline.startswith("("): + test_string = f"({pipeline})" + expr.parseString(test_string) + except ExpressionError: + # It's a snuggs expression. + log.info("Detected a snuggs expression.") + pass + except ParseException: + # It's likely an old-style Python expression. + log.info("Detected a legacy Python expression.") + warnings.warn( + "This style of filter expression is deprecated. " + "Version 2.0 will only support the new parenthesized list expressions.", + FutureWarning, + ) + for i, obj in enumerate(features): + feats = obj.get("features") or [obj] + for j, feat in enumerate(feats): + if not eval_feature_expression(feat, pipeline): + continue + if use_rs: + click.echo("\x1e", nl=False) + click.echo(json.dumps(feat)) + return + + for feat in features: + for value in map_feature(pipeline, feat): + if value: + if use_rs: + click.echo("\x1e", nl=False) + click.echo(json.dumps(feat)) + + +@click.command("reduce", short_help="Reduce a stream of GeoJSON features to one value.") +@click.argument("pipeline") +@click.option( + "--raw", + "-r", + is_flag=True, + default=False, + help="Print raw result, do not wrap in a GeoJSON Feature.", +) +@use_rs_opt +@click.option( + "--zip-properties", + is_flag=True, + default=False, + help="Zip the items of input feature properties together for output.", +) +def reduce_cmd(pipeline, raw, use_rs, zip_properties): + """Reduce a stream of GeoJSON features to one value. + + Given a sequence of GeoJSON features (RS-delimited or not) on stdin + this prints a single value using a provided transformation pipeline. + + The pipeline is a string that, when evaluated, produces + a new geometry object. The pipeline consists of expressions in the + form of parenthesized lists that may contain other expressions. + The first item in a list is the name of a function or method, or an + expression that evaluates to a function. The second item is the + function's first argument or the object to which the method is + bound. The remaining list items are the positional and keyword + arguments for the named function or method. The set of geometries + of the input features in the context of these expressions is named + "c". + + For example, the pipeline expression + + '(unary_union c)' + + dissolves the geometries of input features. + + To keep the properties of input features while reducing them to a + single feature, use the --zip-properties flag. The properties of the + input features will surface in the output feature as lists + containing the input values. + + """ + stdin = click.get_text_stream("stdin") + features = (feat for feat in obj_gen(stdin)) + + if zip_properties: + prop_features, geom_features = itertools.tee(features) + properties = defaultdict(list) + for feat in prop_features: + for key, val in feat["properties"].items(): + properties[key].append(val) + else: + geom_features = features + properties = {} + + for result in reduce_features(pipeline, geom_features): + if use_rs: + click.echo("\x1e", nl=False) + if raw: + click.echo(json.dumps(result)) + else: + click.echo( + json.dumps( + { + "type": "Feature", + "properties": properties, + "geometry": result, + "id": "0", + } + ) + ) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/helpers.py b/.venv/lib/python3.12/site-packages/fiona/fio/helpers.py new file mode 100644 index 00000000..98e781cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/helpers.py @@ -0,0 +1,134 @@ +"""Helper objects needed by multiple CLI commands. + +""" + +from functools import partial +import json +import math +import warnings + +from fiona.model import Geometry, to_dict +from fiona._vendor.munch import munchify + + +warnings.simplefilter("default") + + +def obj_gen(lines, object_hook=None): + """Return a generator of JSON objects loaded from ``lines``.""" + first_line = next(lines) + if first_line.startswith("\x1e"): + + def gen(): + buffer = first_line.strip("\x1e") + for line in lines: + if line.startswith("\x1e"): + if buffer: + yield json.loads(buffer, object_hook=object_hook) + buffer = line.strip("\x1e") + else: + buffer += line + else: + yield json.loads(buffer, object_hook=object_hook) + + else: + + def gen(): + yield json.loads(first_line, object_hook=object_hook) + for line in lines: + yield json.loads(line, object_hook=object_hook) + + return gen() + + +def nullable(val, cast): + if val is None: + return None + else: + return cast(val) + + +def eval_feature_expression(feature, expression): + safe_dict = {"f": munchify(to_dict(feature))} + safe_dict.update( + { + "sum": sum, + "pow": pow, + "min": min, + "max": max, + "math": math, + "bool": bool, + "int": partial(nullable, int), + "str": partial(nullable, str), + "float": partial(nullable, float), + "len": partial(nullable, len), + } + ) + try: + from shapely.geometry import shape + + safe_dict["shape"] = shape + except ImportError: + pass + return eval(expression, {"__builtins__": None}, safe_dict) + + +def make_ld_context(context_items): + """Returns a JSON-LD Context object. + + See https://json-ld.org/spec/latest/json-ld/.""" + ctx = { + "@context": { + "geojson": "http://ld.geojson.org/vocab#", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "bbox": {"@container": "@list", "@id": "geojson:bbox"}, + "coordinates": "geojson:coordinates", + "datetime": "http://www.w3.org/2006/time#inXSDDateTime", + "description": "http://purl.org/dc/terms/description", + "features": {"@container": "@set", "@id": "geojson:features"}, + "geometry": "geojson:geometry", + "id": "@id", + "properties": "geojson:properties", + "start": "http://www.w3.org/2006/time#hasBeginning", + "stop": "http://www.w3.org/2006/time#hasEnding", + "title": "http://purl.org/dc/terms/title", + "type": "@type", + "when": "geojson:when", + } + } + for item in context_items or []: + t, uri = item.split("=") + ctx[t.strip()] = uri.strip() + return ctx + + +def id_record(rec): + """Converts a record's id to a blank node id and returns the record.""" + rec["id"] = f"_:f{rec['id']}" + return rec + + +def recursive_round(obj, precision): + """Recursively round coordinates.""" + if precision < 0: + return obj + if getattr(obj, "geometries", None): + return Geometry( + geometries=[recursive_round(part, precision) for part in obj.geometries] + ) + elif getattr(obj, "coordinates", None): + return Geometry( + coordinates=[recursive_round(part, precision) for part in obj.coordinates] + ) + if isinstance(obj, (int, float)): + return round(obj, precision) + else: + return [recursive_round(part, precision) for part in obj] diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/info.py b/.venv/lib/python3.12/site-packages/fiona/fio/info.py new file mode 100644 index 00000000..907dbcac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/info.py @@ -0,0 +1,78 @@ +"""$ fio info""" + + +import logging +import json + +import click +from cligj import indent_opt + +import fiona +import fiona.crs +from fiona.errors import DriverError +from fiona.fio import options, with_context_env + +logger = logging.getLogger(__name__) + + +@click.command() +# One or more files. +@click.argument('input', required=True) +@click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, + help="Print information about a specific layer. The first " + "layer is used by default. Layers use zero-based " + "numbering when accessed by index.") +@indent_opt +# Options to pick out a single metadata item and print it as +# a string. +@click.option('--count', 'meta_member', flag_value='count', + help="Print the count of features.") +@click.option('-f', '--format', '--driver', 'meta_member', flag_value='driver', + help="Print the format driver.") +@click.option('--crs', 'meta_member', flag_value='crs', + help="Print the CRS as a PROJ.4 string.") +@click.option('--bounds', 'meta_member', flag_value='bounds', + help="Print the boundary coordinates " + "(left, bottom, right, top).") +@click.option('--name', 'meta_member', flag_value='name', + help="Print the datasource's name.") +@options.open_opt +@click.pass_context +@with_context_env +def info(ctx, input, indent, meta_member, layer, open_options): + """ + Print information about a dataset. + + When working with a multi-layer dataset the first layer is used by default. + Use the '--layer' option to select a different layer. + + """ + with fiona.open(input, layer=layer, **open_options) as src: + info = src.meta + info.update(name=src.name) + + try: + info.update(bounds=src.bounds) + except DriverError: + info.update(bounds=None) + logger.debug( + "Setting 'bounds' to None - driver was not able to calculate bounds" + ) + + try: + info.update(count=len(src)) + except TypeError: + info.update(count=None) + logger.debug( + "Setting 'count' to None/null - layer does not support counting" + ) + + info["crs"] = src.crs.to_string() + + if meta_member: + if isinstance(info[meta_member], (list, tuple)): + click.echo(" ".join(map(str, info[meta_member]))) + else: + click.echo(info[meta_member]) + else: + click.echo(json.dumps(info, indent=indent)) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/insp.py b/.venv/lib/python3.12/site-packages/fiona/fio/insp.py new file mode 100644 index 00000000..ea4fba19 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/insp.py @@ -0,0 +1,43 @@ +"""$ fio insp""" + + +import code +import sys + +import click + +import fiona +from fiona.fio import options, with_context_env + + +@click.command(short_help="Open a dataset and start an interpreter.") +@click.argument("src_path", required=True) +@click.option( + "--ipython", "interpreter", flag_value="ipython", help="Use IPython as interpreter." +) +@options.open_opt +@click.pass_context +@with_context_env +def insp(ctx, src_path, interpreter, open_options): + """Open a collection within an interactive interpreter.""" + banner = ( + "Fiona %s Interactive Inspector (Python %s)\n" + 'Type "src.schema", "next(src)", or "help(src)" ' + "for more information." + % (fiona.__version__, ".".join(map(str, sys.version_info[:3]))) + ) + + with fiona.open(src_path, **open_options) as src: + scope = locals() + if not interpreter: + code.interact(banner, local=scope) + elif interpreter == "ipython": + import IPython + + IPython.InteractiveShell.banner1 = banner + IPython.start_ipython(argv=[], user_ns=scope) + else: + raise click.ClickException( + f"Interpreter {interpreter} is unsupported or missing " + "dependencies" + ) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/load.py b/.venv/lib/python3.12/site-packages/fiona/fio/load.py new file mode 100644 index 00000000..a4ba4eff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/load.py @@ -0,0 +1,114 @@ +"""$ fio load""" + +from functools import partial + +import click +import cligj + +import fiona +from fiona.fio import options, with_context_env +from fiona.model import Feature, Geometry +from fiona.transform import transform_geom + + +@click.command(short_help="Load GeoJSON to a dataset in another format.") +@click.argument("output", required=True) +@click.option("-f", "--format", "--driver", "driver", help="Output format driver name.") +@options.src_crs_opt +@click.option( + "--dst-crs", + "--dst_crs", + help="Destination CRS. Defaults to --src-crs when not given.", +) +@cligj.features_in_arg +@click.option( + "--layer", + metavar="INDEX|NAME", + callback=options.cb_layer, + help="Load features into specified layer. Layers use " + "zero-based numbering when accessed by index.", +) +@options.creation_opt +@options.open_opt +@click.option("--append", is_flag=True, help="Open destination layer in append mode.") +@click.pass_context +@with_context_env +def load( + ctx, + output, + driver, + src_crs, + dst_crs, + features, + layer, + creation_options, + open_options, + append, +): + """Load features from JSON to a file in another format. + + The input is a GeoJSON feature collection or optionally a sequence of + GeoJSON feature objects. + + """ + dst_crs = dst_crs or src_crs + + if src_crs and dst_crs and src_crs != dst_crs: + transformer = partial( + transform_geom, src_crs, dst_crs, antimeridian_cutting=True + ) + else: + + def transformer(x): + return Geometry.from_dict(**x) + + def feature_gen(): + """Convert stream of JSON to features. + + Yields + ------ + Feature + + """ + try: + for feat in features: + feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"])) + yield Feature.from_dict(**feat) + except TypeError: + raise click.ClickException("Invalid input.") + + source = feature_gen() + + # Use schema of first feature as a template. + # TODO: schema specified on command line? + try: + first = next(source) + except TypeError: + raise click.ClickException("Invalid input.") + + # TODO: this inference of a property's type from its value needs some + # work. It works reliably only for the basic JSON serializable types. + # The fio-load command does require JSON input but that may change + # someday. + schema = {"geometry": first.geometry.type} + schema["properties"] = { + k: type(v if v is not None else "").__name__ + for k, v in first.properties.items() + } + + if append: + opener = fiona.open(output, "a", layer=layer, **open_options) + else: + opener = fiona.open( + output, + "w", + driver=driver, + crs=dst_crs, + schema=schema, + layer=layer, + **creation_options + ) + + with opener as dst: + dst.write(first) + dst.writerecords(source) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/ls.py b/.venv/lib/python3.12/site-packages/fiona/fio/ls.py new file mode 100644 index 00000000..14af7b75 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/ls.py @@ -0,0 +1,24 @@ +"""$ fiona ls""" + + +import json + +import click +from cligj import indent_opt + +import fiona +from fiona.fio import options, with_context_env + + +@click.command() +@click.argument('input', required=True) +@indent_opt +@options.open_opt +@click.pass_context +@with_context_env +def ls(ctx, input, indent, open_options): + """ + List layers in a datasource. + """ + result = fiona.listlayers(input, **open_options) + click.echo(json.dumps(result, indent=indent)) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/main.py b/.venv/lib/python3.12/site-packages/fiona/fio/main.py new file mode 100644 index 00000000..1190a5cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/main.py @@ -0,0 +1,113 @@ +""" +Main click group for the CLI. Needs to be isolated for entry-point loading. +""" + + +import itertools +import logging +import sys + +import click +from click_plugins import with_plugins +from cligj import verbose_opt, quiet_opt + +if sys.version_info < (3, 10): + from importlib_metadata import entry_points +else: + from importlib.metadata import entry_points + +import fiona +from fiona import __version__ as fio_version +from fiona.session import AWSSession, DummySession +from fiona.fio.bounds import bounds +from fiona.fio.calc import calc +from fiona.fio.cat import cat +from fiona.fio.collect import collect +from fiona.fio.distrib import distrib +from fiona.fio.dump import dump +from fiona.fio.env import env +from fiona.fio.info import info +from fiona.fio.insp import insp +from fiona.fio.load import load +from fiona.fio.ls import ls +from fiona.fio.rm import rm + +# The "calc" extras require pyparsing and shapely. +try: + import pyparsing + import shapely + from fiona.fio.features import filter_cmd, map_cmd, reduce_cmd + + supports_calc = True +except ImportError: + supports_calc = False + + +def configure_logging(verbosity): + log_level = max(10, 30 - 10 * verbosity) + logging.basicConfig(stream=sys.stderr, level=log_level) + + +@with_plugins( + itertools.chain( + entry_points(group="fiona.fio_plugins"), + ) +) +@click.group() +@verbose_opt +@quiet_opt +@click.option( + "--aws-profile", + help="Select a profile from the AWS credentials file") +@click.option( + "--aws-no-sign-requests", + is_flag=True, + help="Make requests anonymously") +@click.option( + "--aws-requester-pays", + is_flag=True, + help="Requester pays data transfer costs") +@click.version_option(fio_version) +@click.version_option(fiona.__gdal_version__, '--gdal-version', + prog_name='GDAL') +@click.version_option(sys.version, '--python-version', prog_name='Python') +@click.pass_context +def main_group( + ctx, verbose, quiet, aws_profile, aws_no_sign_requests, + aws_requester_pays): + """Fiona command line interface. + """ + verbosity = verbose - quiet + configure_logging(verbosity) + ctx.obj = {} + ctx.obj["verbosity"] = verbosity + ctx.obj["aws_profile"] = aws_profile + envopts = {"CPL_DEBUG": (verbosity > 2)} + if aws_profile or aws_no_sign_requests: + session = AWSSession( + profile_name=aws_profile, + aws_unsigned=aws_no_sign_requests, + requester_pays=aws_requester_pays, + ) + else: + session = DummySession() + ctx.obj["env"] = fiona.Env(session=session, **envopts) + + +main_group.add_command(bounds) +main_group.add_command(calc) +main_group.add_command(cat) +main_group.add_command(collect) +main_group.add_command(distrib) +main_group.add_command(dump) +main_group.add_command(env) +main_group.add_command(info) +main_group.add_command(insp) +main_group.add_command(load) +main_group.add_command(ls) +main_group.add_command(rm) + +if supports_calc: + main_group.add_command(map_cmd) + main_group.add_command(filter_cmd) + main_group.add_command(reduce_cmd) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/options.py b/.venv/lib/python3.12/site-packages/fiona/fio/options.py new file mode 100644 index 00000000..9753b2f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/options.py @@ -0,0 +1,96 @@ +"""Common commandline options for `fio`""" + + +from collections import defaultdict + +import click + + +src_crs_opt = click.option('--src-crs', '--src_crs', help="Source CRS.") +dst_crs_opt = click.option('--dst-crs', '--dst_crs', help="Destination CRS.") + + +def cb_layer(ctx, param, value): + """Let --layer be a name or index.""" + if value is None or not value.isdigit(): + return value + else: + return int(value) + + +def cb_multilayer(ctx, param, value): + """ + Transform layer options from strings ("1:a,1:b", "2:a,2:c,2:z") to + { + '1': ['a', 'b'], + '2': ['a', 'c', 'z'] + } + """ + out = defaultdict(list) + for raw in value: + for v in raw.split(','): + ds, name = v.split(':') + out[ds].append(name) + return out + + +def cb_key_val(ctx, param, value): + """ + click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect + in a dictionary like the one below, which is what the CLI function receives. + If no value or `None` is received then an empty dictionary is returned. + + { + 'KEY1': 'VAL1', + 'KEY2': 'VAL2' + } + + Note: `==VAL` breaks this as `str.split('=', 1)` is used. + + """ + if not value: + return {} + else: + out = {} + for pair in value: + if "=" not in pair: + raise click.BadParameter( + f"Invalid syntax for KEY=VAL arg: {pair}" + ) + else: + k, v = pair.split("=", 1) + k = k.lower() + v = v.lower() + out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v + return out + + +def validate_multilayer_file_index(files, layerdict): + """ + Ensure file indexes provided in the --layer option are valid + """ + for key in layerdict.keys(): + if key not in [str(k) for k in range(1, len(files) + 1)]: + layer = key + ":" + layerdict[key][0] + raise click.BadParameter(f"Layer {layer} does not exist") + + +creation_opt = click.option( + "--co", + "--profile", + "creation_options", + metavar="NAME=VALUE", + multiple=True, + callback=cb_key_val, + help="Driver specific creation options. See the documentation for the selected output driver for more information.", +) + + +open_opt = click.option( + "--oo", + "open_options", + metavar="NAME=VALUE", + multiple=True, + callback=cb_key_val, + help="Driver specific open options. See the documentation for the selected output driver for more information.", +) diff --git a/.venv/lib/python3.12/site-packages/fiona/fio/rm.py b/.venv/lib/python3.12/site-packages/fiona/fio/rm.py new file mode 100644 index 00000000..df1b815e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/fio/rm.py @@ -0,0 +1,30 @@ +import click +import logging + +import fiona +from fiona.fio import with_context_env + + +logger = logging.getLogger(__name__) + + +@click.command(help="Remove a datasource or an individual layer.") +@click.argument("input", required=True) +@click.option("--layer", type=str, default=None, required=False, help="Name of layer to remove.") +@click.option("--yes", is_flag=True) +@click.pass_context +@with_context_env +def rm(ctx, input, layer, yes): + if layer is None: + kind = "datasource" + else: + kind = "layer" + + if not yes: + click.confirm(f"The {kind} will be removed. Are you sure?", abort=True) + + try: + fiona.remove(input, layer=layer) + except Exception: + logger.exception(f"Failed to remove {kind}.") + raise click.Abort() diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal.pxi b/.venv/lib/python3.12/site-packages/fiona/gdal.pxi new file mode 100644 index 00000000..9403bb2b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal.pxi @@ -0,0 +1,805 @@ +# GDAL API definitions. + +from libc.stdio cimport FILE + +cdef extern from "gdal_version.h": + int GDAL_COMPUTE_VERSION(int maj, int min, int rev) + + +cdef extern from "cpl_conv.h": + void * CPLMalloc (size_t) + void CPLFree (void *ptr) + void CPLSetThreadLocalConfigOption (char *key, char *val) + const char *CPLGetConfigOption (char *, char *) + void CPLSetConfigOption(const char* key, const char* val) + int CPLCheckForFile(char *, char **) + const char *CPLFindFile(const char *pszClass, const char *pszBasename) + + +cdef extern from "cpl_port.h": + ctypedef char **CSLConstList + + +cdef extern from "cpl_string.h": + char ** CSLAddNameValue(char **list, const char *name, const char *value) + char ** CSLSetNameValue(char **list, const char *name, const char *value) + void CSLDestroy(char **list) + char ** CSLAddString(char **list, const char *string) + int CSLCount(CSLConstList papszStrList) + char **CSLDuplicate(CSLConstList papszStrList) + int CSLFindName(CSLConstList papszStrList, const char *pszName) + int CSLFindString(CSLConstList papszStrList, const char *pszString) + int CSLFetchBoolean(CSLConstList papszStrList, const char *pszName, int default) + const char *CSLFetchNameValue(CSLConstList papszStrList, const char *pszName) + char **CSLMerge(char **first, CSLConstList second) + + +cdef extern from "cpl_error.h" nogil: + ctypedef enum CPLErr: + CE_None + CE_Debug + CE_Warning + CE_Failure + CE_Fatal + + ctypedef int CPLErrorNum + ctypedef void (*CPLErrorHandler)(CPLErr, int, const char*) + + void CPLError(CPLErr eErrClass, CPLErrorNum err_no, const char *template, ...) + void CPLErrorReset() + int CPLGetLastErrorNo() + const char* CPLGetLastErrorMsg() + CPLErr CPLGetLastErrorType() + void CPLPushErrorHandler(CPLErrorHandler handler) + void CPLPushErrorHandlerEx(CPLErrorHandler handler, void *userdata) + void CPLPopErrorHandler() + void CPLQuietErrorHandler(CPLErr eErrClass, CPLErrorNum nError, const char *pszErrorMsg) + + +cdef extern from "cpl_vsi.h" nogil: + ctypedef unsigned long long vsi_l_offset + ctypedef FILE VSILFILE + ctypedef struct VSIStatBufL: + long st_size + long st_mode + int st_mtime + ctypedef enum VSIRangeStatus: + VSI_RANGE_STATUS_UNKNOWN, + VSI_RANGE_STATUS_DATA, + VSI_RANGE_STATUS_HOLE, + + # GDAL Plugin System (GDAL 3.0+) + # Filesystem functions + ctypedef int (*VSIFilesystemPluginStatCallback)(void*, const char*, VSIStatBufL*, int) # Optional + ctypedef int (*VSIFilesystemPluginUnlinkCallback)(void*, const char*) # Optional + ctypedef int (*VSIFilesystemPluginRenameCallback)(void*, const char*, const char*) # Optional + ctypedef int (*VSIFilesystemPluginMkdirCallback)(void*, const char*, long) # Optional + ctypedef int (*VSIFilesystemPluginRmdirCallback)(void*, const char*) # Optional + ctypedef char** (*VSIFilesystemPluginReadDirCallback)(void*, const char*, int) # Optional + ctypedef char** (*VSIFilesystemPluginSiblingFilesCallback)(void*, const char*) # Optional (GDAL 3.2+) + ctypedef void* (*VSIFilesystemPluginOpenCallback)(void*, const char*, const char*) + # File functions + ctypedef vsi_l_offset (*VSIFilesystemPluginTellCallback)(void*) + ctypedef int (*VSIFilesystemPluginSeekCallback)(void*, vsi_l_offset, int) + ctypedef size_t (*VSIFilesystemPluginReadCallback)(void*, void*, size_t, size_t) + ctypedef int (*VSIFilesystemPluginReadMultiRangeCallback)(void*, int, void**, const vsi_l_offset*, const size_t*) # Optional + ctypedef VSIRangeStatus (*VSIFilesystemPluginGetRangeStatusCallback)(void*, vsi_l_offset, vsi_l_offset) # Optional + ctypedef int (*VSIFilesystemPluginEofCallback)(void*) # Mandatory? + ctypedef size_t (*VSIFilesystemPluginWriteCallback)(void*, const void*, size_t, size_t) + ctypedef int (*VSIFilesystemPluginFlushCallback)(void*) # Optional + ctypedef int (*VSIFilesystemPluginTruncateCallback)(void*, vsi_l_offset) + ctypedef int (*VSIFilesystemPluginCloseCallback)(void*) # Optional + # Plugin function container struct + ctypedef struct VSIFilesystemPluginCallbacksStruct: + void *pUserData + VSIFilesystemPluginStatCallback stat + VSIFilesystemPluginUnlinkCallback unlink + VSIFilesystemPluginRenameCallback rename + VSIFilesystemPluginMkdirCallback mkdir + VSIFilesystemPluginRmdirCallback rmdir + VSIFilesystemPluginReadDirCallback read_dir + VSIFilesystemPluginOpenCallback open + VSIFilesystemPluginTellCallback tell + VSIFilesystemPluginSeekCallback seek + VSIFilesystemPluginReadCallback read + VSIFilesystemPluginReadMultiRangeCallback read_multi_range + VSIFilesystemPluginGetRangeStatusCallback get_range_status + VSIFilesystemPluginEofCallback eof + VSIFilesystemPluginWriteCallback write + VSIFilesystemPluginFlushCallback flush + VSIFilesystemPluginTruncateCallback truncate + VSIFilesystemPluginCloseCallback close + size_t nBufferSize + size_t nCacheSize + VSIFilesystemPluginSiblingFilesCallback sibling_files + + int VSIInstallPluginHandler(const char*, const VSIFilesystemPluginCallbacksStruct*) + VSIFilesystemPluginCallbacksStruct* VSIAllocFilesystemPluginCallbacksStruct() + void VSIFreeFilesystemPluginCallbacksStruct(VSIFilesystemPluginCallbacksStruct*) + char** VSIGetFileSystemsPrefixes() + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + int VSIMkdir(const char *path, long mode) + char** VSIReadDir(const char *path) + int VSIRmdir(const char *path) + int VSIRmdirRecursive(const char *path) + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) + + +IF (CTE_GDAL_MAJOR_VERSION, CTE_GDAL_MINOR_VERSION) >= (3, 9): + cdef extern from "cpl_vsi.h" nogil: + int VSIRemovePluginHandler(const char*) + + +cdef extern from "ogr_core.h" nogil: + ctypedef int OGRErr + char *OGRGeometryTypeToName(int type) + + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTInteger64 + OFTInteger64List + OFTMaxType + + ctypedef int OGRFieldSubType + cdef int OFSTNone = 0 + cdef int OFSTBoolean = 1 + cdef int OFSTInt16 = 2 + cdef int OFSTFloat32 = 3 + cdef int OFSTJSON = 4 + cdef int OFSTUUID = 5 + cdef int OFSTMaxSubType = 5 + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + char * OGRGeometryTypeToName(int) + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + char * ODsCTransactions = "Transactions" + + +cdef extern from "ogr_srs_api.h" nogil: + ctypedef void * OGRCoordinateTransformationH + ctypedef void * OGRSpatialReferenceH + + OGRCoordinateTransformationH OCTNewCoordinateTransformation( + OGRSpatialReferenceH source, + OGRSpatialReferenceH dest) + void OCTDestroyCoordinateTransformation( + OGRCoordinateTransformationH source) + int OCTTransform(OGRCoordinateTransformationH ct, int nCount, double *x, + double *y, double *z) + int OSRAutoIdentifyEPSG(OGRSpatialReferenceH srs) + void OSRCleanup() + OGRSpatialReferenceH OSRClone(OGRSpatialReferenceH srs) + int OSRExportToProj4(OGRSpatialReferenceH srs, char **params) + int OSRExportToWkt(OGRSpatialReferenceH srs, char **params) + const char *OSRGetAuthorityName(OGRSpatialReferenceH srs, const char *key) + const char *OSRGetAuthorityCode(OGRSpatialReferenceH srs, const char *key) + int OSRImportFromEPSG(OGRSpatialReferenceH srs, int code) + int OSRImportFromProj4(OGRSpatialReferenceH srs, const char *proj) + int OSRImportFromWkt(OGRSpatialReferenceH srs, char **wkt) + int OSRIsGeographic(OGRSpatialReferenceH srs) + int OSRIsProjected(OGRSpatialReferenceH srs) + int OSRIsSame(OGRSpatialReferenceH srs1, OGRSpatialReferenceH srs2) + OGRSpatialReferenceH OSRNewSpatialReference(const char *wkt) + void OSRRelease(OGRSpatialReferenceH srs) + int OSRSetFromUserInput(OGRSpatialReferenceH srs, const char *input) + double OSRGetLinearUnits(OGRSpatialReferenceH srs, char **ppszName) + double OSRGetAngularUnits(OGRSpatialReferenceH srs, char **ppszName) + int OSREPSGTreatsAsLatLong(OGRSpatialReferenceH srs) + int OSREPSGTreatsAsNorthingEasting(OGRSpatialReferenceH srs) + OGRSpatialReferenceH *OSRFindMatches(OGRSpatialReferenceH srs, char **options, int *entries, int **matchConfidence) + void OSRFreeSRSArray(OGRSpatialReferenceH *srs) + ctypedef enum OSRAxisMappingStrategy: + OAMS_TRADITIONAL_GIS_ORDER + + const char* OSRGetName(OGRSpatialReferenceH hSRS) + void OSRSetAxisMappingStrategy(OGRSpatialReferenceH hSRS, OSRAxisMappingStrategy) + void OSRSetPROJSearchPaths(const char *const *papszPaths) + char ** OSRGetPROJSearchPaths() + OGRErr OSRExportToWktEx(OGRSpatialReferenceH, char ** ppszResult, + const char* const* papszOptions) + OGRErr OSRExportToPROJJSON(OGRSpatialReferenceH hSRS, + char ** ppszReturn, + const char* const* papszOptions) + void OSRGetPROJVersion (int *pnMajor, int *pnMinor, int *pnPatch) + +cdef extern from "gdal.h" nogil: + + ctypedef void * GDALMajorObjectH + ctypedef void * GDALDatasetH + ctypedef void * GDALRasterBandH + ctypedef void * GDALDriverH + ctypedef void * GDALColorTableH + ctypedef void * GDALRasterAttributeTableH + ctypedef void * GDALAsyncReaderH + + ctypedef long long GSpacing + ctypedef unsigned long long GIntBig + + ctypedef enum GDALDataType: + GDT_Unknown + GDT_Byte + GDT_UInt16 + GDT_Int16 + GDT_UInt32 + GDT_Int32 + GDT_Float32 + GDT_Float64 + GDT_CInt16 + GDT_CInt32 + GDT_CFloat32 + GDT_CFloat64 + GDT_TypeCount + + ctypedef enum GDALAccess: + GA_ReadOnly + GA_Update + + ctypedef enum GDALRWFlag: + GF_Read + GF_Write + + ctypedef enum GDALRIOResampleAlg: + GRIORA_NearestNeighbour + GRIORA_Bilinear + GRIORA_Cubic, + GRIORA_CubicSpline + GRIORA_Lanczos + GRIORA_Average + GRIORA_Mode + GRIORA_Gauss + + ctypedef enum GDALColorInterp: + GCI_Undefined + GCI_GrayIndex + GCI_PaletteIndex + GCI_RedBand + GCI_GreenBand + GCI_BlueBand + GCI_AlphaBand + GCI_HueBand + GCI_SaturationBand + GCI_LightnessBand + GCI_CyanBand + GCI_YCbCr_YBand + GCI_YCbCr_CbBand + GCI_YCbCr_CrBand + GCI_Max + + ctypedef struct GDALColorEntry: + short c1 + short c2 + short c3 + short c4 + + ctypedef struct GDAL_GCP: + char *pszId + char *pszInfo + double dfGCPPixel + double dfGCPLine + double dfGCPX + double dfGCPY + double dfGCPZ + + void GDALAllRegister() + void GDALDestroyDriverManager() + int GDALGetDriverCount() + GDALDriverH GDALGetDriver(int i) + const char *GDALGetDriverShortName(GDALDriverH driver) + const char *GDALGetDriverLongName(GDALDriverH driver) + const char* GDALGetDescription(GDALMajorObjectH obj) + void GDALSetDescription(GDALMajorObjectH obj, const char *text) + GDALDriverH GDALGetDriverByName(const char *name) + GDALDatasetH GDALOpen(const char *filename, GDALAccess access) # except -1 + GDALDatasetH GDALOpenShared(const char *filename, GDALAccess access) # except -1 + void GDALFlushCache(GDALDatasetH hds) + void GDALClose(GDALDatasetH hds) + GDALDriverH GDALGetDatasetDriver(GDALDatasetH hds) + int GDALGetGeoTransform(GDALDatasetH hds, double *transform) + const char *GDALGetProjectionRef(GDALDatasetH hds) + int GDALGetRasterXSize(GDALDatasetH hds) + int GDALGetRasterYSize(GDALDatasetH hds) + int GDALGetRasterCount(GDALDatasetH hds) + GDALRasterBandH GDALGetRasterBand(GDALDatasetH hds, int num) + GDALRasterBandH GDALGetOverview(GDALRasterBandH hband, int num) + int GDALGetRasterBandXSize(GDALRasterBandH hband) + int GDALGetRasterBandYSize(GDALRasterBandH hband) + const char *GDALGetRasterUnitType(GDALRasterBandH hband) + CPLErr GDALSetRasterUnitType(GDALRasterBandH hband, const char *val) + int GDALSetGeoTransform(GDALDatasetH hds, double *transform) + int GDALSetProjection(GDALDatasetH hds, const char *wkt) + void GDALGetBlockSize(GDALRasterBandH , int *xsize, int *ysize) + int GDALGetRasterDataType(GDALRasterBandH band) + double GDALGetRasterNoDataValue(GDALRasterBandH band, int *success) + int GDALSetRasterNoDataValue(GDALRasterBandH band, double value) + int GDALDatasetRasterIO(GDALRasterBandH band, int, int xoff, int yoff, + int xsize, int ysize, void *buffer, int width, + int height, int, int count, int *bmap, int poff, + int loff, int boff) + int GDALRasterIO(GDALRasterBandH band, int, int xoff, int yoff, int xsize, + int ysize, void *buffer, int width, int height, int, + int poff, int loff) + int GDALFillRaster(GDALRasterBandH band, double rvalue, double ivalue) + GDALDatasetH GDALCreate(GDALDriverH driver, const char *path, int width, + int height, int nbands, GDALDataType dtype, + const char **options) + GDALDatasetH GDALCreateCopy(GDALDriverH driver, const char *path, + GDALDatasetH hds, int strict, char **options, + void *progress_func, void *progress_data) + char** GDALGetMetadata(GDALMajorObjectH obj, const char *pszDomain) + int GDALSetMetadata(GDALMajorObjectH obj, char **papszMD, + const char *pszDomain) + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) + int GDALSetMetadataItem(GDALMajorObjectH obj, const char *pszName, + const char *pszValue, const char *pszDomain) + const GDALColorEntry *GDALGetColorEntry(GDALColorTableH table, int) + void GDALSetColorEntry(GDALColorTableH table, int i, + const GDALColorEntry *poEntry) + int GDALSetRasterColorTable(GDALRasterBandH band, GDALColorTableH table) + GDALColorTableH GDALGetRasterColorTable(GDALRasterBandH band) + GDALColorTableH GDALCreateColorTable(int) + void GDALDestroyColorTable(GDALColorTableH table) + int GDALGetColorEntryCount(GDALColorTableH table) + int GDALGetRasterColorInterpretation(GDALRasterBandH band) + int GDALSetRasterColorInterpretation(GDALRasterBandH band, GDALColorInterp) + int GDALGetMaskFlags(GDALRasterBandH band) + int GDALCreateDatasetMaskBand(GDALDatasetH hds, int flags) + void *GDALGetMaskBand(GDALRasterBandH band) + int GDALCreateMaskBand(GDALDatasetH hds, int flags) + int GDALGetOverviewCount(GDALRasterBandH band) + int GDALBuildOverviews(GDALDatasetH hds, const char *resampling, + int nOverviews, int *overviews, int nBands, + int *bands, void *progress_func, + void *progress_data) + int GDALCheckVersion(int nVersionMajor, int nVersionMinor, + const char *pszCallingComponentName) + const char* GDALVersionInfo(const char *pszRequest) + CPLErr GDALSetGCPs(GDALDatasetH hDS, int nGCPCount, const GDAL_GCP *pasGCPList, + const char *pszGCPProjection) + const GDAL_GCP *GDALGetGCPs(GDALDatasetH hDS) + int GDALGetGCPCount(GDALDatasetH hDS) + const char *GDALGetGCPProjection(GDALDatasetH hDS) + int GDALGetCacheMax() + void GDALSetCacheMax(int nBytes) + GIntBig GDALGetCacheMax64() + void GDALSetCacheMax64(GIntBig nBytes) + CPLErr GDALDeleteDataset(GDALDriverH, const char *) + char** GDALGetFileList(GDALDatasetH hDS) + CPLErr GDALCopyDatasetFiles (GDALDriverH hDriver, const char * pszNewName, const char * pszOldName) + + void * GDALOpenEx(const char * pszFilename, + unsigned int nOpenFlags, + const char *const *papszAllowedDrivers, + const char *const *papszOpenOptions, + const char *const *papszSiblingFiles + ) + int GDAL_OF_UPDATE + int GDAL_OF_READONLY + int GDAL_OF_VECTOR + int GDAL_OF_VERBOSE_ERROR + int GDALDatasetGetLayerCount(void * hds) + void * GDALDatasetGetLayer(void * hDS, int iLayer) + void * GDALDatasetGetLayerByName(void * hDS, char * pszName) + void GDALClose(void * hDS) + void * GDALCreate(void * hDriver, + const char * pszFilename, + int nXSize, + int nYSize, + int nBands, + GDALDataType eBandType, + char ** papszOptions) + void * GDALDatasetCreateLayer(void * hDS, + const char * pszName, + void * hSpatialRef, + int eType, + char ** papszOptions) + int GDALDatasetDeleteLayer(void * hDS, int iLayer) + void GDALFlushCache(void * hDS) + char * GDALGetDriverShortName(void * hDriver) + OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) + OGRErr GDALDatasetCommitTransaction (void * hDataset) + OGRErr GDALDatasetRollbackTransaction (void * hDataset) + int GDALDatasetTestCapability (void * hDataset, char *) + + +cdef extern from "ogr_api.h" nogil: + + ctypedef void * OGRLayerH + ctypedef void * OGRDataSourceH + ctypedef void * OGRSFDriverH + ctypedef void * OGRFieldDefnH + ctypedef void * OGRFeatureDefnH + ctypedef void * OGRFeatureH + ctypedef void * OGRGeometryH + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + void OGRRegisterAll() + void OGRCleanupAll() + int OGRGetDriverCount() + + char *OGR_Dr_GetName(OGRSFDriverH driver) + OGRDataSourceH OGR_Dr_CreateDataSource(OGRSFDriverH driver, + const char *path, char **options) + int OGR_Dr_DeleteDataSource(OGRSFDriverH driver, const char *path) + int OGR_DS_DeleteLayer(OGRDataSourceH datasource, int n) + OGRLayerH OGR_DS_CreateLayer(OGRDataSourceH datasource, const char *name, + OGRSpatialReferenceH crs, int geomType, + char **options) + OGRLayerH OGR_DS_ExecuteSQL(OGRDataSourceH, const char *name, + OGRGeometryH filter, const char *dialext) + void OGR_DS_Destroy(OGRDataSourceH datasource) + OGRSFDriverH OGR_DS_GetDriver(OGRLayerH layer_defn) + OGRLayerH OGR_DS_GetLayerByName(OGRDataSourceH datasource, + const char *name) + int OGR_DS_GetLayerCount(OGRDataSourceH datasource) + OGRLayerH OGR_DS_GetLayer(OGRDataSourceH datasource, int n) + void OGR_DS_ReleaseResultSet(OGRDataSourceH datasource, OGRLayerH results) + int OGR_DS_SyncToDisk(OGRDataSourceH datasource) + OGRFeatureH OGR_F_Create(OGRFeatureDefnH featuredefn) + void OGR_F_Destroy(OGRFeatureH feature) + long OGR_F_GetFID(OGRFeatureH feature) + int OGR_F_IsFieldSet(OGRFeatureH feature, int n) + int OGR_F_GetFieldAsDateTime(OGRFeatureH feature, int n, int *y, int *m, + int *d, int *h, int *m, int *s, int *z) + double OGR_F_GetFieldAsDouble(OGRFeatureH feature, int n) + int OGR_F_GetFieldAsInteger(OGRFeatureH feature, int n) + const char *OGR_F_GetFieldAsString(OGRFeatureH feature, int n) + char **OGR_F_GetFieldAsStringList( OGRFeatureH feature, int n) + int OGR_F_GetFieldCount(OGRFeatureH feature) + OGRFieldDefnH OGR_F_GetFieldDefnRef(OGRFeatureH feature, int n) + int OGR_F_GetFieldIndex(OGRFeatureH feature, const char *name) + OGRGeometryH OGR_F_GetGeometryRef(OGRFeatureH feature) + void OGR_F_SetFieldDateTime(OGRFeatureH feature, int n, int y, int m, + int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDouble(OGRFeatureH feature, int n, double value) + void OGR_F_SetFieldInteger(OGRFeatureH feature, int n, int value) + void OGR_F_SetFieldString(OGRFeatureH feature, int n, const char *value) + void OGR_F_SetFieldStringList(OGRFeatureH feature, int n, const char **value) + int OGR_F_SetGeometryDirectly(OGRFeatureH feature, OGRGeometryH geometry) + OGRFeatureDefnH OGR_FD_Create(const char *name) + int OGR_FD_GetFieldCount(OGRFeatureDefnH featuredefn) + OGRFieldDefnH OGR_FD_GetFieldDefn(OGRFeatureDefnH featuredefn, int n) + int OGR_FD_GetGeomType(OGRFeatureDefnH featuredefn) + const char *OGR_FD_GetName(OGRFeatureDefnH featuredefn) + OGRFieldDefnH OGR_Fld_Create(const char *name, int fieldtype) + void OGR_Fld_Destroy(OGRFieldDefnH) + char *OGR_Fld_GetNameRef(OGRFieldDefnH) + int OGR_Fld_GetPrecision(OGRFieldDefnH) + int OGR_Fld_GetType(OGRFieldDefnH) + int OGR_Fld_GetWidth(OGRFieldDefnH) + void OGR_Fld_Set(OGRFieldDefnH, const char *name, int fieldtype, int width, + int precision, int justification) + void OGR_Fld_SetPrecision(OGRFieldDefnH, int n) + void OGR_Fld_SetWidth(OGRFieldDefnH, int n) + OGRErr OGR_G_AddGeometryDirectly(OGRGeometryH geometry, OGRGeometryH part) + OGRErr OGR_G_RemoveGeometry(OGRGeometryH geometry, int i, int delete) + void OGR_G_AddPoint(OGRGeometryH geometry, double x, double y, double z) + void OGR_G_AddPoint_2D(OGRGeometryH geometry, double x, double y) + void OGR_G_CloseRings(OGRGeometryH geometry) + OGRGeometryH OGR_G_CreateGeometry(int wkbtypecode) + OGRGeometryH OGR_G_CreateGeometryFromJson(const char *json) + void OGR_G_DestroyGeometry(OGRGeometryH geometry) + char *OGR_G_ExportToJson(OGRGeometryH geometry) + OGRErr OGR_G_ExportToWkb(OGRGeometryH geometry, int endianness, char *buffer) + int OGR_G_GetCoordinateDimension(OGRGeometryH geometry) + int OGR_G_GetGeometryCount(OGRGeometryH geometry) + const char *OGR_G_GetGeometryName(OGRGeometryH geometry) + int OGR_G_GetGeometryType(OGRGeometryH geometry) + OGRGeometryH OGR_G_GetGeometryRef(OGRGeometryH geometry, int n) + int OGR_G_GetPointCount(OGRGeometryH geometry) + double OGR_G_GetX(OGRGeometryH geometry, int n) + double OGR_G_GetY(OGRGeometryH geometry, int n) + double OGR_G_GetZ(OGRGeometryH geometry, int n) + OGRErr OGR_G_ImportFromWkb(OGRGeometryH geometry, unsigned char *bytes, + int nbytes) + int OGR_G_WkbSize(OGRGeometryH geometry) + OGRErr OGR_L_CreateFeature(OGRLayerH layer, OGRFeatureH feature) + int OGR_L_CreateField(OGRLayerH layer, OGRFieldDefnH, int flexible) + OGRErr OGR_L_GetExtent(OGRLayerH layer, void *extent, int force) + OGRFeatureH OGR_L_GetFeature(OGRLayerH layer, int n) + int OGR_L_GetFeatureCount(OGRLayerH layer, int m) + OGRFeatureDefnH OGR_L_GetLayerDefn(OGRLayerH layer) + const char *OGR_L_GetName(OGRLayerH layer) + OGRFeatureH OGR_L_GetNextFeature(OGRLayerH layer) + OGRGeometryH OGR_L_GetSpatialFilter(OGRLayerH layer) + OGRSpatialReferenceH OGR_L_GetSpatialRef(OGRLayerH layer) + void OGR_L_ResetReading(OGRLayerH layer) + void OGR_L_SetSpatialFilter(OGRLayerH layer, OGRGeometryH geometry) + void OGR_L_SetSpatialFilterRect(OGRLayerH layer, double minx, double miny, + double maxx, double maxy) + int OGR_L_TestCapability(OGRLayerH layer, const char *name) + OGRSFDriverH OGRGetDriverByName(const char *) + OGRSFDriverH OGRGetDriver(int i) + OGRDataSourceH OGROpen(const char *path, int mode, void *x) + OGRDataSourceH OGROpenShared(const char *path, int mode, void *x) + int OGRReleaseDataSource(OGRDataSourceH datasource) + const char * OGR_Dr_GetName (void *driver) + int OGR_Dr_TestCapability (void *driver, const char *) + void * OGR_F_Create (void *featuredefn) + void OGR_F_Destroy (void *feature) + long OGR_F_GetFID (void *feature) + int OGR_F_IsFieldSet (void *feature, int n) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) + double OGR_F_GetFieldAsDouble (void *feature, int n) + int OGR_F_GetFieldAsInteger (void *feature, int n) + char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) + int OGR_F_GetFieldCount (void *feature) + void * OGR_F_GetFieldDefnRef (void *feature, int n) + int OGR_F_GetFieldIndex (void *feature, char *name) + void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) + void OGR_F_SetFieldDouble (void *feature, int n, double value) + void OGR_F_SetFieldInteger (void *feature, int n, int value) + void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) + void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 + int OGR_F_SetGeometryDirectly (void *feature, void *geometry) + void * OGR_FD_Create (char *name) + int OGR_FD_GetFieldCount (void *featuredefn) + void * OGR_FD_GetFieldDefn (void *featuredefn, int n) + int OGR_FD_GetGeomType (void *featuredefn) + char * OGR_FD_GetName (void *featuredefn) + OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) + void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) + void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) + OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) + long long OGR_F_GetFieldAsInteger64 (void *feature, int n) + void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) + int OGR_F_IsFieldNull(void *feature, int n) + OGRwkbGeometryType OGR_GT_GetLinear(OGRwkbGeometryType eType) + + +cdef extern from "gdalwarper.h" nogil: + + ctypedef enum GDALResampleAlg: + GRA_NearestNeighbour + GRA_Bilinear + GRA_Cubic + GRA_CubicSpline + GRA_Lanczos + GRA_Average + GRA_Mode + + ctypedef int (*GDALMaskFunc)( + void *pMaskFuncArg, int nBandCount, int eType, int nXOff, int nYOff, + int nXSize, int nYSize, unsigned char **papabyImageData, + int bMaskIsFloat, void *pMask) + + ctypedef int (*GDALTransformerFunc)( + void *pTransformerArg, int bDstToSrc, int nPointCount, double *x, + double *y, double *z, int *panSuccess) + + ctypedef struct GDALWarpOptions: + char **papszWarpOptions + double dfWarpMemoryLimit + GDALResampleAlg eResampleAlg + GDALDataType eWorkingDataType + GDALDatasetH hSrcDS + GDALDatasetH hDstDS + # 0 for all bands + int nBandCount + # List of source band indexes + int *panSrcBands + # List of destination band indexes + int *panDstBands + # The source band so use as an alpha (transparency) value, 0=disabled + int nSrcAlphaBand + # The dest. band so use as an alpha (transparency) value, 0=disabled + int nDstAlphaBand + # The "nodata" value real component for each input band, if NULL there isn't one */ + double *padfSrcNoDataReal + # The "nodata" value imaginary component - may be NULL even if real component is provided. */ + double *padfSrcNoDataImag + # The "nodata" value real component for each output band, if NULL there isn't one */ + double *padfDstNoDataReal + # The "nodata" value imaginary component - may be NULL even if real component is provided. */ + double *padfDstNoDataImag + # GDALProgressFunc() compatible progress reporting function, or NULL if there isn't one. */ + void *pfnProgress + # Callback argument to be passed to pfnProgress. */ + void *pProgressArg + # Type of spatial point transformer function */ + GDALTransformerFunc pfnTransformer + # Handle to image transformer setup structure */ + void *pTransformerArg + GDALMaskFunc *papfnSrcPerBandValidityMaskFunc + void **papSrcPerBandValidityMaskFuncArg + GDALMaskFunc pfnSrcValidityMaskFunc + void *pSrcValidityMaskFuncArg + GDALMaskFunc pfnSrcDensityMaskFunc + void *pSrcDensityMaskFuncArg + GDALMaskFunc pfnDstDensityMaskFunc + void *pDstDensityMaskFuncArg + GDALMaskFunc pfnDstValidityMaskFunc + void *pDstValidityMaskFuncArg + int (*pfnPreWarpChunkProcessor)(void *pKern, void *pArg) + void *pPreWarpProcessorArg + int (*pfnPostWarpChunkProcessor)(void *pKern, void *pArg) + void *pPostWarpProcessorArg + # Optional OGRPolygonH for a masking cutline. */ + OGRGeometryH hCutline + # Optional blending distance to apply across cutline in pixels, default is 0 + double dfCutlineBlendDist + + GDALWarpOptions *GDALCreateWarpOptions() + void GDALDestroyWarpOptions(GDALWarpOptions *options) + + GDALDatasetH GDALAutoCreateWarpedVRT( + GDALDatasetH hSrcDS, const char *pszSrcWKT, const char *pszDstWKT, + GDALResampleAlg eResampleAlg, double dfMaxError, + const GDALWarpOptions *psOptionsIn) + + GDALDatasetH GDALCreateWarpedVRT( + GDALDatasetH hSrcDS, int nPixels, int nLines, + double *padfGeoTransform, const GDALWarpOptions *psOptionsIn) + + +cdef extern from "gdal_alg.h" nogil: + + int GDALPolygonize(GDALRasterBandH band, GDALRasterBandH mask_band, + OGRLayerH layer, int fidx, char **options, + void *progress_func, void *progress_data) + int GDALFPolygonize(GDALRasterBandH band, GDALRasterBandH mask_band, + OGRLayerH layer, int fidx, char **options, + void *progress_func, void *progress_data) + int GDALSieveFilter(GDALRasterBandH src_band, GDALRasterBandH mask_band, + GDALRasterBandH dst_band, int size, int connectivity, + char **options, void *progress_func, + void *progress_data) + int GDALRasterizeGeometries(GDALDatasetH hds, int band_count, + int *dst_bands, int geom_count, + OGRGeometryH *geometries, + GDALTransformerFunc transform_func, + void *transform, double *pixel_values, + char **options, void *progress_func, + void *progress_data) + void *GDALCreateGenImgProjTransformer(GDALDatasetH src_hds, + const char *pszSrcWKT, GDALDatasetH dst_hds, + const char *pszDstWKT, + int bGCPUseOK, double dfGCPErrorThreshold, + int nOrder) + void *GDALCreateGenImgProjTransformer2(GDALDatasetH src_hds, GDALDatasetH dst_hds, char **options) + void *GDALCreateGenImgProjTransformer3( + const char *pszSrcWKT, const double *padfSrcGeoTransform, + const char *pszDstWKT, const double *padfDstGeoTransform) + void GDALSetGenImgProjTransformerDstGeoTransform(void *hTransformArg, double *padfGeoTransform) + int GDALGenImgProjTransform(void *pTransformArg, int bDstToSrc, + int nPointCount, double *x, double *y, + double *z, int *panSuccess) + void GDALDestroyGenImgProjTransformer(void *) + void *GDALCreateApproxTransformer(GDALTransformerFunc pfnRawTransformer, + void *pRawTransformerArg, + double dfMaxError) + int GDALApproxTransform(void *pTransformArg, int bDstToSrc, int npoints, + double *x, double *y, double *z, int *panSuccess) + void GDALDestroyApproxTransformer(void *) + void GDALApproxTransformerOwnsSubtransformer(void *, int) + int GDALFillNodata(GDALRasterBandH dst_band, GDALRasterBandH mask_band, + double max_search_distance, int deprecated, + int smoothing_iterations, char **options, + void *progress_func, void *progress_data) + int GDALChecksumImage(GDALRasterBandH band, int xoff, int yoff, int width, + int height) + int GDALSuggestedWarpOutput2( + GDALDatasetH hSrcDS, GDALTransformerFunc pfnRawTransformer, + void * pTransformArg, double * padfGeoTransformOut, int * pnPixels, + int * pnLines, double * padfExtent, int nOptions) diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoBW.svg b/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoBW.svg new file mode 100644 index 00000000..4ac8f6a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoBW.svg @@ -0,0 +1,138 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoColor.svg b/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoColor.svg new file mode 100644 index 00000000..da311ad8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoColor.svg @@ -0,0 +1,126 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoGS.svg b/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoGS.svg new file mode 100644 index 00000000..de00b72a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/GDALLogoGS.svg @@ -0,0 +1,126 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/LICENSE.TXT b/.venv/lib/python3.12/site-packages/fiona/gdal_data/LICENSE.TXT new file mode 100644 index 00000000..cd24b533 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/LICENSE.TXT @@ -0,0 +1,467 @@ + +GDAL/OGR Licensing +================== + +This file attempts to include all licenses that apply within the GDAL/OGR +source tree, in particular any that are supposed to be exposed to the end user +for credit requirements for instance. The contents of this file can be +displayed from GDAL commandline utilities using the --license commandline +switch. + + +GDAL/OGR General +---------------- + +In general GDAL/OGR is licensed under an MIT style license with the +following terms: + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +port/cpl_float.cpp +------------------ + +Copyright (c) 2002, Industrial Light & Magic, a division of Lucas +Digital Ltd. LLC + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. +* Neither the name of Industrial Light & Magic nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +frmts/hdf4/hdf-eos/* +-------------------- + + Copyright (C) 1996 Hughes and Applied Research Corporation + + Permission to use, modify, and distribute this software and its documentation + for any purpose without fee is hereby granted, provided that the above + copyright notice appear in all copies and that both that copyright notice and + this permission notice appear in supporting documentation. + + +frmts/pcraster/libcsf +--------------------- + +Copyright (c) 1997-2003, Utrecht University +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +* Neither the name of Utrecht University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +frmts/grib/degrib/* +------------------- + +The degrib and g2clib source code are modified versions of code produced +by NOAA NWS and are in the public domain subject to the following +restrictions: + +http://www.weather.gov/im/softa.htm + +DISCLAIMER The United States Government makes no warranty, expressed or +implied, as to the usefulness of the software and documentation for any +purpose. The U.S. Government, its instrumentalities, officers, employees, +and agents assumes no responsibility (1) for the use of the software and +documentation listed below, or (2) to provide technical support to users. + +http://www.weather.gov/disclaimer.php + + The information on government servers are in the public domain, unless +specifically annotated otherwise, and may be used freely by the public so +long as you do not 1) claim it is your own (e.g. by claiming copyright for +NWS information -- see below), 2) use it in a manner that implies an +endorsement or affiliation with NOAA/NWS, or 3) modify it in content and +then present it as official government material. You also cannot present +information of your own in a way that makes it appear to be official +government information.. + + The user assumes the entire risk related to its use of this data. NWS is +providing this data "as is," and NWS disclaims any and all warranties, +whether express or implied, including (without limitation) any implied +warranties of merchantability or fitness for a particular purpose. In no +event will NWS be liable to you or to any third party for any direct, +indirect, incidental, consequential, special or exemplary damages or lost +profit resulting from any use or misuse of this data. + + As required by 17 U.S.C. 403, third parties producing copyrighted works +consisting predominantly of the material appearing in NWS Web pages must +provide notice with such work(s) identifying the NWS material incorporated +and stating that such material is not subject to copyright protection. + +port/cpl_minizip* +----------------- + +This is version 2005-Feb-10 of the Info-ZIP copyright and license. +The definitive version of this document should be available at +ftp://ftp.info-zip.org/pub/infozip/license.html indefinitely. + + +Copyright (c) 1990-2005 Info-ZIP. All rights reserved. + +For the purposes of this copyright and license, "Info-ZIP" is defined as +the following set of individuals: + + Mark Adler, John Bush, Karl Davis, Harald Denker, Jean-Michel Dubois, + Jean-loup Gailly, Hunter Goatley, Ed Gordon, Ian Gorman, Chris Herborth, + Dirk Haase, Greg Hartwig, Robert Heath, Jonathan Hudson, Paul Kienitz, + David Kirschbaum, Johnny Lee, Onno van der Linden, Igor Mandrichenko, + Steve P. Miller, Sergio Monesi, Keith Owens, George Petrov, Greg Roelofs, + Kai Uwe Rommel, Steve Salisbury, Dave Smith, Steven M. Schweda, + Christian Spieler, Cosmin Truta, Antoine Verheijen, Paul von Behren, + Rich Wales, Mike White + +This software is provided "as is," without warranty of any kind, express +or implied. In no event shall Info-ZIP or its contributors be held liable +for any direct, indirect, incidental, special or consequential damages +arising out of the use of or inability to use this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + + 1. Redistributions of source code must retain the above copyright notice, + definition, disclaimer, and this list of conditions. + + 2. Redistributions in binary form (compiled executables) must reproduce + the above copyright notice, definition, disclaimer, and this list of + conditions in documentation and/or other materials provided with the + distribution. The sole exception to this condition is redistribution + of a standard UnZipSFX binary (including SFXWiz) as part of a + self-extracting archive; that is permitted without inclusion of this + license, as long as the normal SFX banner has not been removed from + the binary or disabled. + + 3. Altered versions--including, but not limited to, ports to new operating + systems, existing ports with new graphical interfaces, and dynamic, + shared, or static library versions--must be plainly marked as such + and must not be misrepresented as being the original source. Such + altered versions also must not be misrepresented as being Info-ZIP + releases--including, but not limited to, labeling of the altered + versions with the names "Info-ZIP" (or any variation thereof, including, + but not limited to, different capitalizations), "Pocket UnZip," "WiZ" + or "MacZip" without the explicit permission of Info-ZIP. Such altered + versions are further prohibited from misrepresentative use of the + Zip-Bugs or Info-ZIP e-mail addresses or of the Info-ZIP URL(s). + + 4. Info-ZIP retains the right to use the names "Info-ZIP," "Zip," "UnZip," + "UnZipSFX," "WiZ," "Pocket UnZip," "Pocket Zip," and "MacZip" for its + own source and binary releases. + + +ogr/ogrsf_frmts/dxf/intronurbs.cpp +---------------------------------- + +This code is derived from the code associated with the book "An Introduction +to NURBS" by David F. Rogers. More information on the book and the code is +available at: + + http://www.nar-associates.com/nurbs/ + + +Copyright (c) 2009, David F. Rogers +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the David F. Rogers nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +alg/thinplatespline.cpp +----------------------- + +IEEE754 log() code derived from: +@(#)e_log.c 1.3 95/01/18 + +Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved. + +Developed at SunSoft, a Sun Microsystems, Inc. business. +Permission to use, copy, modify, and distribute this +software is freely granted, provided that this notice +is preserved. + + +alg/libqhull +------------ + +Only applies when GDAL is compiled with internal qhull support + + + Qhull, Copyright (c) 1993-2012 + + C.B. Barber + Arlington, MA + + and + + The National Science and Technology Research Center for + Computation and Visualization of Geometric Structures + (The Geometry Center) + University of Minnesota + + email: qhull@qhull.org + +This software includes Qhull from C.B. Barber and The Geometry Center. +Qhull is copyrighted as noted above. Qhull is free software and may +be obtained via http from www.qhull.org. It may be freely copied, modified, +and redistributed under the following conditions: + +1. All copyright notices must remain intact in all files. + +2. A copy of this text file must be distributed along with any copies + of Qhull that you redistribute; this includes copies that you have + modified, or copies of programs or other software products that + include Qhull. + +3. If you modify Qhull, you must include a notice giving the + name of the person performing the modification, the date of + modification, and the reason for such modification. + +4. When distributing modified versions of Qhull, or other software + products that include Qhull, you must provide notice that the original + source code may be obtained as noted above. + +5. There is no warranty or other guarantee of fitness for Qhull, it is + provided solely "as is". Bug reports or fixes may be sent to + qhull_bug@qhull.org; the authors may or may not act on them as + they desire. + +frmts/pdf/pdfdataset.cpp (method PDFiumRenderPageBitmap()) +---------------------------------------------------------- + +Copyright 2014 PDFium Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +frmts/mrf/* (apply when MRF driver included in build) +------------------------------------------------------ + +Copyright (c) 2002-2012, California Institute of Technology. +All rights reserved. Based on Government Sponsored Research under contracts NAS7-1407 and/or NAS7-03001. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + 3. Neither the name of the California Institute of Technology (Caltech), its operating division the Jet Propulsion Laboratory (JPL), + the National Aeronautics and Space Administration (NASA), nor the names of its contributors may be used to + endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE CALIFORNIA INSTITUTE OF TECHNOLOGY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +~~~~~~~~ + +Copyright 2014-2015 Esri +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +cmake/modules/3.* (backported CMake find modules) +------------------------------------------------- + +CMake - Cross Platform Makefile Generator +Copyright 2000-2022 Kitware, Inc. and Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +* Neither the name of Kitware, Inc. nor the names of Contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +ogr/ogrsf_frmts/flatgeobuf +-------------------------- + +FlatGeobuf +++++++++++ + +ISC License + +Copyright (c) 2018, Bjorn Harrtell + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Flatbush +++++++++ + +ISC License + +Copyright (c) 2018, Vladimir Agafonkin + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. + +ogr/ogrsf_frmts/flatgeobuf/flatbuffers +-------------------------------------- + +Copyright 2021 Google Inc. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/MM_m_idofic.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/MM_m_idofic.csv new file mode 100644 index 00000000..38295148 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/MM_m_idofic.csv @@ -0,0 +1,321 @@ +PSIDGEODES;ID_GEODES;NOTA_CAT;NOTA_SPA;NOTA_ENG +ESRI:102022;Albers_Equal_Area-Africa-WGS84;;; +ESRI:102025;Albers_Equal_Area-Asia_North-WGS84;;; +EPSG:5070;Albers_Equal_Area-N_America-NAD83;https://epsg.io/5070;https://epsg.io/5070;https://epsg.io/5070 +Azimuthal_Equidistant;Azimuthal_Equidistant-0-90-WGS84;;; +EPSG:4088;Cilindrical_Equidistant-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +Cylindrical_Equal_Area;Cylindrical_Equal_Area-15-0-WGS84;;; +EPSG:22171;Gauss-Kruger_Faja1-PosGAR98;;; +EPSG:22172;Gauss-Kruger_Faja2-PosGAR98;;; +EPSG:22173;Gauss-Kruger_Faja3-PosGAR98;;; +EPSG:22174;Gauss-Kruger_Faja4-PosGAR98;;; +EPSG:22175;Gauss-Kruger_Faja5-PosGAR98;;; +EPSG:22176;Gauss-Kruger_Faja6-PosGAR98;;; +EPSG:22177;Gauss-Kruger_Faja7-PosGAR98;;; +EPSG:3763;Gauss-Kruger_Portugal-ETRS89;;; +PT-TM06/ETRS89;Gauss-Kruger_Portugal-ETRS89;;; +EPSG:20791;Gauss-Kruger_Portugal-Lisboa1937;;; +EPSG:2932;Gauss-Kruger_Qatar-QND;;; +EPSG:3116;Gauss-Kruger_Zona2-MAGNA;;; +SR-ORG:9111;Geostationary-WGS84;;; +Goode_Homolosine;Goode_Homolosine-WGS84;;; +ESRI:102017;LambertAzimEqualA-0-90-WGS84-Ellipsoide;https://epsg.io/102017;https://epsg.io/102017;https://epsg.io/102017 +EPSG:9821;LambertAzimEqualA-0-90-WGS84-Esfera;https://epsg.io/9821-method;https://epsg.io/9821-method;https://epsg.io/9821-method +Lambert_Azimuthal_Equal_Area;LambertAzimEqualA-0-90-WGS84-Esfera;;; +Lambert_Azimuthal_Equal_Area-0-90-WGS84;LambertAzimEqualA-0-90-WGS84-Esfera;;; +EPSG:3035;Lambert_Azimuthal_Equal_Area-1052-ETRS89;;; +urn:ogc:def:crs:EPSG::3035;Lambert_Azimuthal_Equal_Area-1052-ETRS89;;; +ETRS-LAEA;Lambert_Azimuthal_Equal_Area-1052-ETRS89;;; +SR-ORG:7297;Lambert_Azimuthal_Equal_Area-9-48-ETRS89;;; +ETRS-LCC;Lambert_Conformal_Conic-Europa-ETRS89;;; +EPSG:3034;Lambert_Conformal_Conic-Europa-ETRS89;;; +EPSG:2154;Lambert_Conformal_Conic-França-ETRS89;;; +EPSG:2062;Lambert_Conformal_Conic-Madrid1870;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26191;Lambert_Conformal_Conic-Maroc_N-Merchich;;; +EPSG:27561;Lambert_Conformal_Conic-ZoneI-NTF;;; +EPSG:27562;Lambert_Conformal_Conic-ZoneII-NTF;;; +EPSG:27563;Lambert_Conformal_Conic-ZoneIII-NTF;;; +Lambert_Conformal_Conic;Lambert_Conformal_Conic-ZoneIII-NTF;;; +EPSG:27573;Lambert_Conformal_Conic-ZoneIII_ext-NTF;;; +EPSG:27572;Lambert_Conformal_Conic-ZoneII_ext-NTF;;; +AUTO2:LCC,1,14.5,38,35,41;Lambert_Conformal_Conic_ICC_Mediterrani;;; +AUTO2:MERCATOR,1,0,0.0;Mercator-Equator-ED50-UB/ICC;;; +Mercator;Mercator-Equator-ED50-UB/ICC;;; +Mercator-ED50;Mercator-Equator-ED50-UB/ICC;;; +Mercator-ED50-UB/ICC;Mercator-Equator-ED50-UB/ICC;;; +Mercator-Ecuador-ED50-UB/ICC;Mercator-Equator-ED50-UB/ICC;;; +EPSG:3395;Mercator-Equator-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +AUTO2:MERCATOR,1,0,40.60;Mercator-IHM-485-60k-ED50-UB/ICC;;; +AUTO2:MERCATOR,1,0,41.42;Mercator-IHM-489-50k-ED50-UB/ICC;;; +AUTO2:MERCATOR_WGS84,1,0,41.42;Mercator-IHM-489-50k-WGS84;;; +EPSG:3785;Mercator-Popular-Visualisation-Sphere;;; +EPSG:3857;Mercator-Popular-Visualisation-Sphere;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +urn:ogc:def:crs:EPSG::3857;Mercator-Popular-Visualisation-Sphere;;; +EPSG:900913;Mercator-Popular-Visualisation-Sphere;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ESRI:102100;Mercator-Popular-Visualisation-Sphere;;; +EPSG:21782;ObliqueMercator-Rosenmund1903;;; +SR-ORG:6842;Sinusoidal-V5-MODIS;https://spatialreference.org/ref/sr-org/modis-sinusoidal/;https://spatialreference.org/ref/sr-org/modis-sinusoidal/;https://spatialreference.org/ref/sr-org/modis-sinusoidal/ +SR-ORG:6974;Sinusoidal-V5-MODIS;https://spatialreference.org/ref/sr-org/modis-sinusoidal-3/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-3/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-3/ +SR-ORG:6965;Sinusoidal-V5-MODIS;https://spatialreference.org/ref/sr-org/modis-sinusoidal-2/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-2/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-2/ +Sinusoidal;Sinusoidal-WGS84;;; +EPSG:3909;TransverseMercator-BalkansMGI1901;;; +EPSG:2393;TransverseMercator-Finland-KKJ;;; +EPSG:29903;TransverseMercator-Ireland1965;;; +EPSG:2039;TransverseMercator-Israel1989;;; +EPSG:3003;TransverseMercator-Monte_Mario-Italy_Z1;;; +EPSG:3021;TransverseMercator-Sweden-RT90;;; +EPSG:26710;UTM-10N-NAD27-CW;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32610;UTM-10N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32611;UTM-11N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32612;UTM-12N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32613;UTM-13N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32614;UTM-14N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32615;UTM-15N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32616;UTM-16N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32617;UTM-17N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32710;UTM-10S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32711;UTM-11S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32712;UTM-12S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32713;UTM-13S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32714;UTM-14S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32715;UTM-15S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32716;UTM-16S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26901;UTM-1N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26902;UTM-2N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26903;UTM-3N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26904;UTM-4N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26905;UTM-5N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26906;UTM-6N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26906;UTM-7N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26908;UTM-8N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26909;UTM-9N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26910;UTM-10N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26911;UTM-11N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26912;UTM-12N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26913;UTM-13N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26914;UTM-14N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26915;UTM-15N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26916;UTM-16N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26917;UTM-17N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26918;UTM-18N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26919;UTM-19N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26920;UTM-20N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26921;UTM-21N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26922;UTM-22N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26923;UTM-23N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26711;UTM-11N-NAD27-CW;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4486;UTM-13N-ITRF92;;; +EPSG:26713;UTM-13N-NAD27-MX;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4487;UTM-14N-ITRF92;;; +EPSG:26714;UTM-14N-NAD27-MX;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4488;UTM-15N-ITRF92;;; +EPSG:26715;UTM-15N-NAD27-MX;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26915;UTM-15N-NAD83;;; +EPSG:26716;UTM-16N-NAD27-BC;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:24877;UTM-17S-PSA56-P;;; +EPSG:29187;UTM-17S-SAD69-PE;;; +EPSG:32717;UTM-17S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32618;UTM-18N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29188;UTM-18S-SAD69-CH;;; +EPSG:32718;UTM-18S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29169;UTM-19N-SAD69-BR;;; +EPSG:32619;UTM-19N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:24879;UTM-19S-PSAD56-BC;;; +EPSG:24879-1201;UTM-19S-PSAD56-BC;Transformació per defecte segons https://epsg.io/24879;Transformación por defecto según https://epsg.io/24879;Default transformation according to https://epsg.io/24879 +EPSG:24879-1203;UTM-19S-PSAD56-CN;;; +EPSG:24879-1209;UTM-19S-PSAD56-V;;; +EPSG:29189;UTM-19S-SAD69-CH;;; +EPSG:32719;UTM-19S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29170;UTM-20N-SAD69-BR;;; +EPSG:32620;UTM-20N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29190;UTM-20S-SAD69-BR;;; +EPSG:32720;UTM-20S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29171;UTM-21N-SAD69-BR;;; +EPSG:32621;UTM-21N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29191;UTM-21S-SAD69-BR;;; +EPSG:32721;UTM-21S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29172;UTM-22N-SAD69-BR;;; +EPSG:32622;UTM-22N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32623;UTM-23N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32624;UTM-24N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32625;UTM-25N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32626;UTM-26N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29192;UTM-27S-SAD69-BR;;; +EPSG:32722;UTM-22S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32723;UTM-23S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32724;UTM-24S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32725;UTM-25S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32726;UTM-26S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32727;UTM-27S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32728;UTM-28S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:32729;UTM-29S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29193;UTM-23S-SAD69-BR;;; +EPSG:29194;UTM-24S-SAD69-BR;;; +EPSG:29195;UTM-25S-SAD69-BR;;; +ETRS-TM26;UTM-26N-ETRS89;;; +EPSG:3038;UTM-26N-ETRS89;;; +EPSG:3039;UTM-27N-ETRS89;;; +ETRS-TM27;UTM-27N-ETRS89;;; +EPSG:32627;UTM-27N-WGS84;Ordre d'eixos preferit: est-nord (XY). Sense paràmetres TOWGS84 a https://epsg.io/;Orden de ejes preferido: est-norte (XY). Sin parámetros TOWGS84 en https://epsg.io/;Preferred axis order: east-north (XY). No TOWGS84 parameters at https://epsg.io/ +EPSG:3040;UTM-28N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX). Sin parámetros TOWGS84 en https://epsg.io/;Preferred axis order: north-east (YX). No TOWGS84 parameters at https://epsg.io/ +ETRS-TM28;UTM-28N-ETRS89;;; +EPSG:4083;UTM-28N-REGCAN95;;; +EPSG:32628;UTM-28N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:23029;UTM-29N-S/IGN;;; +EPSG:23029-0000;UTM-29N-ED50-ABDF;;; +EPSG:23029-1145;UTM-29N-ED50-PS;;; +EPSG:25829;UTM-29N-ETRS89;Ordre d'eixos preferit: est-nord (XY);Orden de ejes preferido: est-norte (XY);Preferred axis order: east-north (XY) +EPSG:3041;UTM-29N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX);Preferred axis order: north-east (YX) +urn:ogc:def:crs:EPSG::25829;UTM-29N-ETRS89;;; +ETRS-TM29;UTM-29N-ETRS89;;; +EPSG:23029-1633;UTM-29N-S/IGN;;; +EPSG:32629;UTM-29N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +urn:ogc:def:crs:EPSG::23029;UTM-29N-S/IGN;;; +EPSG:25830;UTM-30N-ETRS89;Ordre d'eixos preferit: est-nord (XY);Orden de ejes preferido: est-norte (XY);Preferred axis order: east-north (XY) +EPSG:3042;UTM-30N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX);Preferred axis order: north-east (YX) +urn:ogc:def:crs:EPSG::25830;UTM-30N-ETRS89;;; +ETRS-TM30;UTM-30N-ETRS89;;; +EPSG:23030;UTM-30N-S/IGN;;; +EPSG:23030-0000;UTM-30N-ABDF;Transformació per defecte segons https://epsg.io/23030-to-4326;Transformación por defecto según https://epsg.io/23030-to-4326;Default transformation according to https://epsg.io/23030-to-4326 +EPSG:23030-15933;UTM-30N-IP;;; +EPSG:23030-1631;UTM-30N-Balearic;;; +EPSG:23030-1635;UTM-30N-NW_IP;;; +EPSG:23030-1145;UTM-30N-PS;;; +EPSG:23030-1633;UTM-30N-S/IGN;;; +urn:ogc:def:crs:EPSG::23030;UTM-30N-S/IGN;;; +UTM-30N;UTM-30N-S/IGN;;; +EPSG:32630;UTM-30N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25831;UTM-31N-ETRS89;Ordre d'eixos preferit: est-nord (XY);Orden de ejes preferido: est-norte (XY);Preferred axis order: east-north (XY) +EPSG:3043;UTM-31N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX);Preferred axis order: north-east (YX) +urn:ogc:def:crs:EPSG::25831;UTM-31N-ETRS89;;; +ETRS-TM31;UTM-31N-ETRS89;;; +UTM-31N;UTM-31N-UB/ICC;;; +UTM-31N-ED50;UTM-31N-UB/ICC;;; +EPSG:23031;UTM-31N-UB/ICC;Excepcionalment no es fa correspondre a UTM-31N-ABDF (=https://epsg.io/23031) sinó a UTM-31N-UB/ICC per compatibilitat descendent;Excepcionalmente no se hace corresponder a UTM-31N-ABDF (=https://epsg.io/23031) sino a UTM-31N-UB/ICC por compatibilidad descendente;Exceptionally it does not correspond to UTM-31N-ABDF (=https://epsg.io/23031) but to UTM-31N-UB/ICC for backwards compatibility +EPSG:23031-0000;UTM-31N-ABDF;Transformació per defecte segons https://epsg.io/23031-to-4326;Transformación por defecto según https://epsg.io/23031-to-4326;Default transformation according to https://epsg.io/23031-to-4326 +urn:ogc:def:crs:EPSG::23031;UTM-31N-UB/ICC;;; +EPSG:32631;UTM-31N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25832;UTM-32N-ETRS89;;; +ETRS-TM32;UTM-32N-ETRS89;;; +EPSG:32632;UTM-32N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25833;UTM-33N-ETRS89;;; +ETRS-TM33;UTM-33N-ETRS89;;; +EPSG:32633;UTM-33N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:22033;UTM-33S-Camacupa1980;;; +EPSG:32730;UTM-30S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32731;UTM-31S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32732;UTM-32S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32733;UTM-33S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32734;UTM-34S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32735;UTM-35S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25834;UTM-34N-ETRS89;;; +ETRS-TM34;UTM-34N-ETRS89;;; +EPSG:2100;UTM-34N-GGRS87;;; +EPSG:32634;UTM-34N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25835;UTM-35N-ETRS89;;; +ETRS-TM35;UTM-35N-ETRS89;;; +EPSG:32635;UTM-35N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ETRS-TM36;UTM-36N-ETRS89;;; +EPSG:25836;UTM-36N-ETRS89;;; +EPSG:32636;UTM-36N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:2736;UTM-36S-Tete-MZ;;; +EPSG:32736;UTM-36S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32737;UTM-37S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32738;UTM-38S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32739;UTM-39S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32740;UTM-40S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32741;UTM-41S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32742;UTM-42S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32743;UTM-43S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32744;UTM-44S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32745;UTM-45S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32746;UTM-46S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32747;UTM-47S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32748;UTM-48S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32749;UTM-49S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32750;UTM-50S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32751;UTM-51S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32752;UTM-52S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32753;UTM-53S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32754;UTM-54S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32755;UTM-55S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32756;UTM-56S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32757;UTM-57S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32758;UTM-58S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32759;UTM-59S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32760;UTM-60S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ETRS-TM37;UTM-37N-ETRS89;;; +EPSG:25837;UTM-37N-ETRS89;;; +EPSG:32637;UTM-37N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32638;UTM-38N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32639;UTM-39N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ETRS-TM38;UTM-38N-ETRS89;;; +EPSG:25838;UTM-38N-ETRS89;;; +ETRS-TM39;UTM-39N-ETRS89;;; +EPSG:32640;UTM-40N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32641;UTM-41N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32642;UTM-42N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32643;UTM-43N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32644;UTM-44N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32645;UTM-45N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32646;UTM-46N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32647;UTM-47N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32648;UTM-48N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32649;UTM-49N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32650;UTM-50N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32651;UTM-51N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32652;UTM-52N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32653;UTM-53N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32654;UTM-54N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32655;UTM-55N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32656;UTM-56N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32657;UTM-57N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32658;UTM-58N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32659;UTM-59N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32660;UTM-60N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32759;UTM-59S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32601;UTM-1N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32602;UTM-2N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32603;UTM-3N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32604;UTM-4N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32605;UTM-5N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32606;UTM-6N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32607;UTM-7N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32608;UTM-8N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32609;UTM-9N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32701;UTM-1S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32702;UTM-2S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32703;UTM-3S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32704;UTM-4S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32705;UTM-5S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32706;UTM-6S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32707;UTM-7S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32708;UTM-8S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32709;UTM-9S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4218;lat/long-Bogota;;; +EPSG:4149;lat/long-CH1903;;; +EPSG:4230-1145;lat/long-ED50-PS;;; +EPSG:4230-1633;lat/long-ED50-S/IGN;;; +EPSG:4230-0000;lat/long-ED50-ABDF;Transformació per defecte segons https://epsg.io/4230-to-4326;Transformación por defecto según https://epsg.io/4230-to-4326;Default transformation according to https://epsg.io/4230-to-4326 +EPSG:4230;lat/long-ED50-UB/ICC;;; +lat/long-ED50;lat/long-ED50-UB/ICC;;; +EPSG:4258;lat/long-ETRS89;;; +EPSG:4686;lat/long-MAGNA;;; +EPSG:4903;lat/long-Madrid1870;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4261;lat/long-Merchich;;; +EPSG:4267;lat/long-NAD27-BC;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4269;lat/long-NAD83-AA;;; +EPSG:4275;lat/long-NTF;;; +EPSG:4190;lat/long-PosGAR98;;; +EPSG:4081;lat/long-REGCAN95;;; +EPSG:5527;lat/long-SAD69-CH;;; +EPSG:4124;lat/long-Sweden-RT90;;; +EPSG:4127;lat/long-Tete-MZ;;; +EPSG:4326;lat/long-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +urn:ogc:def:crs:EPSG::4326;lat/long-WGS84;;; +CRS:84;lat/long-WGS84;;; +Equirectangular;lat/long-WGS84;;; +lat/long;lat/long-WGS84;;; +urn:ogc:def:crs:OGC:1.3:CRS84;lat/long-WGS84;;; +EPSG:9377;Transverse-Mercator_Colombia_ONacional;;; +MAGNA-SIRGAS / Origen-Nacional;Transverse-Mercator_Colombia_ONacional;https://origen.igac.gov.co/herramientas.html;https://origen.igac.gov.co/herramientas.html;https://origen.igac.gov.co/herramientas.html diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/cubewerx_extra.wkt b/.venv/lib/python3.12/site-packages/fiona/gdal_data/cubewerx_extra.wkt new file mode 100644 index 00000000..f29a5ca5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/cubewerx_extra.wkt @@ -0,0 +1,48 @@ +# +# This file derived from the public_coordsys.txt file distributed with +# CubeSTOR by CubeWerx (http://www.cubewerx.com) +# +# OGC-defined "AUTO" codes +# http://www.digitalearth.gov/wmt/auto.html +# +# Hmm, not really much point to including these as they require extra +# substitutions. See the importFromWMSAUTO() if you need these. +# +#42001,PROJCS["WGS 84 / Auto UTM%s",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["central_meridian","%.16g"],PARAMETER["latitude_of_origin",0],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing","%.16g"],UNIT["Meter",1],AUTHORITY["EPSG","42001"]] +#42002,PROJCS["WGS 84 / Auto Tr. Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["central_meridian","%.16g"],PARAMETER["latitude_of_origin",0],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing","%.16g"],UNIT["Meter",1],AUTHORITY["EPSG","42002"]] +#42003,PROJCS["WGS 84 / Auto Orthographic",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Orthographic"],PARAMETER["central_meridian","%.16g"],PARAMETER["latitude_of_origin","%.16g"],UNIT["Meter",1],AUTHORITY["EPSG","42003"]] +#42004,PROJCS["WGS 84 / Auto Equirectangular",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Equirectangular"],PARAMETER["central_meridian",0],PARAMETER["latitude_of_origin",0],PARAMETER["standard_parallel_1","%.16g"],UNIT["Meter",1],AUTHORITY["EPSG","42004"]] +# +# OGC-defined extended codes (41000--41999) +# see http://www.digitalearth.gov/wmt/auto.html +# +41001,PROJCS["WGS84 / Simple Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","41001"]] +# +# CubeWerx-defined extended codes (42100--42199) +# +42101,PROJCS["WGS 84 / LCC Canada",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["central_meridian",-95.0],PARAMETER["latitude_of_origin",0],PARAMETER["standard_parallel_1",49.0],PARAMETER["standard_parallel_2",77.0],PARAMETER["false_easting",0.0],PARAMETER["false_northing",-8000000.0],UNIT["Meter",1],AUTHORITY["EPSG","42101"]] +#EPSG:42102,"PROJCS[\"NAD83 / BC Albers\",GEOGCS[\"NAD83\",DATUM[\"North_American_Datum_1983\",SPHEROID[\"GRS_1980\",6378137,298.257222101]],PRIMEM[\"Greenwich\",0],UNIT[\"Decimal_Degree\",0.0174532925199433]],PROJECTION[\"Albers_conic_equal_area\"],PARAMETER[\"central_meridian\",-126.0],PARAMETER[\"latitude_of_origin\",45],PARAMETER[\"standard_parallel_1\",50.0],PARAMETER[\"standard_parallel_2\",58.5],PARAMETER[\"false_easting\",1000000.0],PARAMETER[\"false_northing\",0],UNIT[\"Meter\",1]]" +42103,PROJCS["WGS 84 / LCC USA",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1978",6378135,298.26]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["central_meridian",-100.0],PARAMETER["latitude_of_origin",0],PARAMETER["standard_parallel_1",33.0],PARAMETER["standard_parallel_2",45.0],PARAMETER["false_easting",0.0],PARAMETER["false_northing",0.0],UNIT["Meter",1],AUTHORITY["EPSG","42103"]] +42104,PROJCS["NAD83 / MTM zone 8 Quebec",GEOGCS["GRS80",DATUM["GRS_1980",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-73.5],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","42104"]] +42105,PROJCS["WGS84 / Merc NorthAm",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-96],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","42105"]] +42106,PROJCS["WGS84 / Lambert Azim Mozambique",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["Sphere_radius_6370997_m",6370997,0]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Lambert_Azimuthal_equal_area"],PARAMETER["latitude_of_origin",5],PARAMETER["central_meridian",20],PARAMETER["standard_parallel_1",5],PARAMETER["standard_parallel_2",5],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","42106"]] +# +# CubeWerx-customer definitions (42300--42399) +# +42301,PROJCS["NAD27 / Polar Stereographic / CM=-98",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.978698213901]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Stereographic"],PARAMETER["latitude_of_origin",90],PARAMETER["central_meridian",-98.0],PARAMETER["standard_parallel_1",90],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","42301"]] +42302,PROJCS["JapanOrtho.09 09",GEOGCS["Lon/Lat.Tokyo Datum",DATUM["Tokyo Datum",SPHEROID["anon",6377397.155,299.15281310608]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["Central_Meridian",139.833333333333],PARAMETER["False_Easting",0],PARAMETER["False_Northing",0],PARAMETER["Latitude_of_Origin",36],PARAMETER["Scale_Factor",0.9999],UNIT["Meter",1],AUTHORITY["EPSG","42302"]] +42303,PROJCS["NAD83 / Albers NorthAm",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Albers_conic_equal_area"],PARAMETER["central_meridian",-96.0],PARAMETER["latitude_of_origin",23],PARAMETER["standard_parallel_1",29.5],PARAMETER["standard_parallel_2",45.5],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","42303"]] +42304,PROJCS["NAD83 / NRCan LCC Canada",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["central_meridian",-95.0],PARAMETER["latitude_of_origin",49.0],PARAMETER["standard_parallel_1",49.0],PARAMETER["standard_parallel_2",77.0],PARAMETER["false_easting",0.0],PARAMETER["false_northing",0.0],UNIT["Meter",1],AUTHORITY["EPSG","42304"]] +42305,PROJCS["France_II",GEOGCS["GCS_NTF_Paris",DATUM["Nouvelle_Triangulation_Francaise",SPHEROID["Clarke_1880_IGN",6378249.2,293.46602]],PRIMEM["Paris",2.337229166666667],UNIT["degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",2200000],PARAMETER["Central_Meridian",0],PARAMETER["Standard_Parallel_1",45.898918964419],PARAMETER["Standard_Parallel_2",47.696014502038],PARAMETER["Latitude_Of_Origin",46.8],UNIT["Meter",1],AUTHORITY["EPSG","42305"]] +42306,PROJCS["NAD83/QC_LCC",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["central_meridian",-68.5],PARAMETER["latitude_of_origin",44],PARAMETER["standard_parallel_1",46],PARAMETER["standard_parallel_2",60],PARAMETER["false_easting",0.0],PARAMETER["false_northing",0.0],UNIT["Meter",1],AUTHORITY["EPSG","42306"]] +42307,PROJCS["NAD83 / Texas Central - feet",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31.8833333333333],PARAMETER["standard_parallel_2",30.1166666666667],PARAMETER["latitude_of_origin",29.6666666666667],PARAMETER["central_meridian",-100.333333333333],PARAMETER["false_easting",2296583.33333333333333],PARAMETER["false_northing",9842500],UNIT["US_Foot",0.30480060960121924],AUTHORITY["EPSG","42307"]] +42308,PROJCS["NAD27 / California Albers",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.978698213901]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Albers_conic_equal_area"],PARAMETER["central_meridian",-120.0],PARAMETER["latitude_of_origin",0],PARAMETER["standard_parallel_1",34],PARAMETER["standard_parallel_2",40.5],PARAMETER["false_easting",0],PARAMETER["false_northing",-4000000],UNIT["Meter",1],AUTHORITY["EPSG","42308"]] +42309,PROJCS["NAD 83 / LCC Canada AVHRR-2",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["central_meridian",-95.0],PARAMETER["latitude_of_origin",0],PARAMETER["standard_parallel_1",49.0],PARAMETER["standard_parallel_2",77.0],PARAMETER["false_easting",0.0],PARAMETER["false_northing",0.0],UNIT["Meter",1],AUTHORITY["EPSG","42309"]] +42310,PROJCS["WGS84+GRS80 / Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["GRS 1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],AUTHORITY["EPSG","42310"]] +42311,PROJCS["NAD83 / LCC Statcan",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["central_meridian",-91.866667],PARAMETER["latitude_of_origin",63.390675],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",77],PARAMETER["false_easting",6200000],PARAMETER["false_northing",3000000],UNIT["Meter",1],AUTHORITY["EPSG","42311"]] +# +# BC-Forestry/NFIS code +# +100001,GEOGCS["NAD83 / NFIS Seconds",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Decimal_Second",4.84813681109536e-06],AUTHORITY["EPSG","100001"]] +100002,PROJCS["NAD83 / Austin",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31.8833333333333],PARAMETER["standard_parallel_2",30.1166666666667],PARAMETER["latitude_of_origin",29.6666666666667],PARAMETER["central_meridian",-100.333333333333],PARAMETER["false_easting",2296583.333333],PARAMETER["false_northing",9842500.0000000],UNIT["Meter",1],AUTHORITY["EPSG","100002"]] +900913,PROJCS["Google Maps Global Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Mercator_2SP"],PARAMETER["standard_parallel_1",0],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1],EXTENSION["PROJ4","+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"]] diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/default.rsc b/.venv/lib/python3.12/site-packages/fiona/gdal_data/default.rsc new file mode 100644 index 00000000..2fb03e82 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/gdal_data/default.rsc differ diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ecw_cs.wkt b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ecw_cs.wkt new file mode 100644 index 00000000..cd4f1908 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ecw_cs.wkt @@ -0,0 +1,1453 @@ +AB_10TM,PROJCS["AB_10TM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-114.9999999999725],PARAMETER["scale_factor",0.9992],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ACRESLC,PROJCS["ACRESLC",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-18.00000000235031],PARAMETER["standard_parallel_2",-35.99999999897103],PARAMETER["latitude_of_origin",-26.99999999779589],PARAMETER["central_meridian",131.999999998137],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +AEAFRICA,LOCAL_CS["AEAFRICA - (unsupported)"] +AERUSS,LOCAL_CS["AERUSS - (unsupported)"] +ALALASK2,PROJCS["ALALASK2",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",65],PARAMETER["standard_parallel_2",55],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-153],PARAMETER["false_easting",0],PARAMETER["false_northing",-4943910.68]] +ALALASK3,PROJCS["ALALASK3",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",65],PARAMETER["standard_parallel_2",55],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-153],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALALASKA,PROJCS["ALALASKA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",65],PARAMETER["standard_parallel_2",55],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-150],PARAMETER["false_easting",0],PARAMETER["false_northing",-4943910.68]] +ALAUS,PROJCS["ALAUS",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",-10],PARAMETER["standard_parallel_2",-39.99999999999994],PARAMETER["latitude_of_center",-29.99999999999995],PARAMETER["longitude_of_center",135],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALBC,PROJCS["ALBC",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",58.5],PARAMETER["standard_parallel_2",50],PARAMETER["latitude_of_center",45],PARAMETER["longitude_of_center",-126],PARAMETER["false_easting",1000000],PARAMETER["false_northing",0]] +ALBERING,PROJCS["ALBERING",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",60],PARAMETER["standard_parallel_2",70],PARAMETER["latitude_of_center",60],PARAMETER["longitude_of_center",170],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALCAM,PROJCS["ALCAM",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",30],PARAMETER["standard_parallel_2",10],PARAMETER["latitude_of_center",20],PARAMETER["longitude_of_center",-69.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALCANADA,PROJCS["ALCANADA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",66],PARAMETER["standard_parallel_2",41],PARAMETER["latitude_of_center",55],PARAMETER["longitude_of_center",-89.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALCHI,PROJCS["ALCHI",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",45],PARAMETER["standard_parallel_2",20],PARAMETER["latitude_of_center",35],PARAMETER["longitude_of_center",110],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALCOLOMB,PROJCS["ALCOLOMB",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",1],PARAMETER["standard_parallel_2",5],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-72.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALDLGAL,PROJCS["ALDLGAL",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",55.00000000000679],PARAMETER["standard_parallel_2",64.99999999998198],PARAMETER["latitude_of_center",49.99999999999055],PARAMETER["longitude_of_center",-153.9999999999846],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALDLGHAW,PROJCS["ALDLGHAW",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",8.00000000002599],PARAMETER["standard_parallel_2",18.00000000000118],PARAMETER["latitude_of_center",3.000000000009746],PARAMETER["longitude_of_center",-156.9999999999944],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALDLGUSA,PROJCS["ALDLGUSA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",29.5],PARAMETER["standard_parallel_2",45.5],PARAMETER["latitude_of_center",23],PARAMETER["longitude_of_center",-95.99999999999996],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALEUR,PROJCS["ALEUR",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",70],PARAMETER["standard_parallel_2",35],PARAMETER["latitude_of_center",50],PARAMETER["longitude_of_center",12],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALEURO,PROJCS["ALEURO",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",40],PARAMETER["standard_parallel_2",60],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",12],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALFAR,PROJCS["ALFAR",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",30],PARAMETER["standard_parallel_2",10],PARAMETER["latitude_of_center",20],PARAMETER["longitude_of_center",80],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALFGDL,PROJCS["ALFGDL",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",24.00000000122388],PARAMETER["standard_parallel_2",31.50000000124825],PARAMETER["latitude_of_center",24.00000000122388],PARAMETER["longitude_of_center",-84.00000000141881],PARAMETER["false_easting",400000],PARAMETER["false_northing",0]] +ALFLA_GRS80,PROJCS["ALFLA_GRS80",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",24.00000000122388],PARAMETER["standard_parallel_2",31.50000000124825],PARAMETER["latitude_of_center",24.00000000122388],PARAMETER["longitude_of_center",-84.00000000141881],PARAMETER["false_easting",400000],PARAMETER["false_northing",0]] +ALFLA_N27,PROJCS["ALFLA_N27",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",24.00000000122388],PARAMETER["standard_parallel_2",31.50000000124825],PARAMETER["latitude_of_center",24.00000000122388],PARAMETER["longitude_of_center",-84.00000000141881],PARAMETER["false_easting",400000],PARAMETER["false_northing",0]] +ALGMEXIC,PROJCS["ALGMEXIC",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",28],PARAMETER["standard_parallel_2",22],PARAMETER["latitude_of_center",25],PARAMETER["longitude_of_center",-89.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALGULFFT,PROJCS["ALGULFFT",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",31],PARAMETER["standard_parallel_2",27],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-89.99999999999994],PARAMETER["false_easting",3500000],PARAMETER["false_northing",-7624216.25],UNIT["unnamed",0.3048006096]] +ALGULFMT,PROJCS["ALGULFMT",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",45.5],PARAMETER["standard_parallel_2",29.5],PARAMETER["latitude_of_center",23],PARAMETER["longitude_of_center",-89.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALMALIN,PROJCS["ALMALIN",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",30],PARAMETER["standard_parallel_2",0.008333299999997507],PARAMETER["latitude_of_center",15],PARAMETER["longitude_of_center",120],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALMEA2,PROJCS["ALMEA2",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",25.0000003],PARAMETER["standard_parallel_2",-24.99999999999997],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",20],PARAMETER["false_easting",5000000],PARAMETER["false_northing",5000000]] +ALMENA,PROJCS["ALMENA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",35],PARAMETER["standard_parallel_2",1],PARAMETER["latitude_of_center",18],PARAMETER["longitude_of_center",20],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALNEVADA,PROJCS["ALNEVADA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",36.00000000000237],PARAMETER["standard_parallel_2",41.0000000000186],PARAMETER["latitude_of_center",38.50000000001049],PARAMETER["longitude_of_center",-116.999999999979],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALNSEA,PROJCS["ALNSEA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",53],PARAMETER["standard_parallel_2",61],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",0],PARAMETER["false_easting",1000000],PARAMETER["false_northing",0]] +ALRUSS,PROJCS["ALRUSS",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",38],PARAMETER["standard_parallel_2",62],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",96],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +ALSAF,PROJCS["ALSAF",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",-0.9999999999999829],PARAMETER["standard_parallel_2",-31],PARAMETER["latitude_of_center",-15.99999999999996],PARAMETER["longitude_of_center",20],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALSAM,PROJCS["ALSAM",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",-0.9999999999999829],PARAMETER["standard_parallel_2",-54.99999999999998],PARAMETER["latitude_of_center",-27.99999999999998],PARAMETER["longitude_of_center",-69.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALTEALE,PROJCS["ALTEALE",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",34],PARAMETER["standard_parallel_2",40.49999999999996],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-120],PARAMETER["false_easting",0],PARAMETER["false_northing",-4000000]] +ALTX_TCMS_AEA,PROJCS["ALTX_TCMS_AEA",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",27.49999999997475],PARAMETER["standard_parallel_2",34.99999999999911],PARAMETER["latitude_of_center",18.00000000000118],PARAMETER["longitude_of_center",-99.9999999999811],PARAMETER["false_easting",1500000],PARAMETER["false_northing",6000000]] +ALUSA_FT,PROJCS["ALUSA_FT",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",29.5],PARAMETER["standard_parallel_2",45.5],PARAMETER["latitude_of_center",23],PARAMETER["longitude_of_center",-95.99999999999996],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["unnamed",0.3048006096]] +ALVENEZ,PROJCS["ALVENEZ",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",10],PARAMETER["standard_parallel_2",4],PARAMETER["latitude_of_center",7],PARAMETER["longitude_of_center",-65.99999999999996],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +ALWAUST,PROJCS["ALWAUST",PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",-17.4752127514901],PARAMETER["standard_parallel_2",-31.51267873219527],PARAMETER["latitude_of_center",-29.99999999999995],PARAMETER["longitude_of_center",120.8940947726037],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +BCNAMER,LOCAL_CS["BCNAMER - (unsupported)"] +BCSAMER,LOCAL_CS["BCSAMER - (unsupported)"] +BCSPHERE,LOCAL_CS["BCSPHERE - (unsupported)"] +BONNEPOR,LOCAL_CS["BONNEPOR - (unsupported)"] +BORNEOMT,LOCAL_CS["BORNEOMT - (unsupported)"] +CAISRAEL,PROJCS["CAISRAEL",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",31.7340969],PARAMETER["central_meridian",35.2120806],PARAMETER["false_easting",170251.555],PARAMETER["false_northing",1126867.91]] +CAISRMOD,PROJCS["CAISRMOD",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",31.7340969],PARAMETER["central_meridian",35.2120806],PARAMETER["false_easting",1170251.55],PARAMETER["false_northing",1126867.91]] +CAPAL,PROJCS["CAPAL",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",31.7340969],PARAMETER["central_meridian",35.2120806],PARAMETER["false_easting",170251.555],PARAMETER["false_northing",126867.91]] +CAQATAR,PROJCS["CAQATAR",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",25.3823611],PARAMETER["central_meridian",50.7613889],PARAMETER["false_easting",100000],PARAMETER["false_northing",100000]] +CAQATMOD,PROJCS["CAQATMOD",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",25.3823611],PARAMETER["central_meridian",50.7613889],PARAMETER["false_easting",100000],PARAMETER["false_northing",1100000]] +CASNGPOR,PROJCS["CASNGPOR",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",1.2876466],PARAMETER["central_meridian",103.8530022],PARAMETER["false_easting",30000],PARAMETER["false_northing",30000]] +CATOBAGO,PROJCS["CATOBAGO",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",11.2521786],PARAMETER["central_meridian",-60.6860088],PARAMETER["false_easting",187500],PARAMETER["false_northing",180000],UNIT["unnamed",0.201166195]] +CATRINID,PROJCS["CATRINID",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",10.4416666],PARAMETER["central_meridian",-61.33333329999998],PARAMETER["false_easting",430000],PARAMETER["false_northing",325000],UNIT["unnamed",0.201166195]] +CAVANUA,PROJCS["CAVANUA",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",-16.24999999999996],PARAMETER["central_meridian",179.3333333],PARAMETER["false_easting",12513.32],PARAMETER["false_northing",16628.88],UNIT["unnamed",0.201166195]] +CAVITI,PROJCS["CAVITI",PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",-17.99999999999998],PARAMETER["central_meridian",178],PARAMETER["false_easting",5440],PARAMETER["false_northing",7040],UNIT["unnamed",0.201166195]] +CE42BUL,LOCAL_CS["CE42BUL - (unsupported)"] +CEAUST,LOCAL_CS["CEAUST - (unsupported)"] +CEBLACK,LOCAL_CS["CEBLACK - (unsupported)"] +CECARP1,LOCAL_CS["CECARP1 - (unsupported)"] +CECASP,LOCAL_CS["CECASP - (unsupported)"] +CECASPAN,LOCAL_CS["CECASPAN - (unsupported)"] +CECISWMC,LOCAL_CS["CECISWMC - (unsupported)"] +CEEUR1,LOCAL_CS["CEEUR1 - (unsupported)"] +CEEUROPE,LOCAL_CS["CEEUROPE - (unsupported)"] +CERUSS,LOCAL_CS["CERUSS - (unsupported)"] +CERUSS1,LOCAL_CS["CERUSS1 - (unsupported)"] +CERUSS2,LOCAL_CS["CERUSS2 - (unsupported)"] +CEYUGO,LOCAL_CS["CEYUGO - (unsupported)"] +DUTCHNEW,LOCAL_CS["DUTCHNEW - (unsupported)"] +DUTCHOLD,LOCAL_CS["DUTCHOLD - (unsupported)"] +EGSA87,PROJCS["EGSA87",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",23.99999882666041],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +FLSPHERE,LOCAL_CS["FLSPHERE - (unsupported)"] +GALCC,PROJCS["GALCC",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-18.00000000235031],PARAMETER["standard_parallel_2",-35.99999999897103],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",134.0000000015812],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +HGRS87,PROJCS["HGRS87",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",23.99999882666041],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +IDTM,PROJCS["IDTM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.99999999999996],PARAMETER["central_meridian",-114],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",100000]] +JAPAN19_01,PROJCS["JAPAN19_01",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",33],PARAMETER["central_meridian",129.5000000000002],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_02,PROJCS["JAPAN19_02",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",33],PARAMETER["central_meridian",131],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_03,PROJCS["JAPAN19_03",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36],PARAMETER["central_meridian",132.1666666666665],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_04,PROJCS["JAPAN19_04",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",33],PARAMETER["central_meridian",133.5],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_05,PROJCS["JAPAN19_05",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.99999999897103],PARAMETER["central_meridian",134.3333333329101],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_06,PROJCS["JAPAN19_06",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.99999999897103],PARAMETER["central_meridian",136],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_07,PROJCS["JAPAN19_07",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36],PARAMETER["central_meridian",137.1666666666667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_08,PROJCS["JAPAN19_08",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36],PARAMETER["central_meridian",138.5000000000002],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_09,PROJCS["JAPAN19_09",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.99999999897103],PARAMETER["central_meridian",139.8333333333004],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_10,PROJCS["JAPAN19_10",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",140.8333333333334],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_11,PROJCS["JAPAN19_11",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",44],PARAMETER["central_meridian",140.25],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_12,PROJCS["JAPAN19_12",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",44],PARAMETER["central_meridian",142.2499999999997],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_13,PROJCS["JAPAN19_13",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",44],PARAMETER["central_meridian",144.25],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_14,PROJCS["JAPAN19_14",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",142],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_15,PROJCS["JAPAN19_15",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",127.5],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_16,PROJCS["JAPAN19_16",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",124],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_17,PROJCS["JAPAN19_17",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",131],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_18,PROJCS["JAPAN19_18",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",20],PARAMETER["central_meridian",136],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +JAPAN19_19,PROJCS["JAPAN19_19",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",154],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +KOREA_25,PROJCS["KOREA_25",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000000241528],PARAMETER["central_meridian",125.00289027778],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",500000]] +KOREA_27,PROJCS["KOREA_27",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000000241528],PARAMETER["central_meridian",127.0028902777799],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",500000]] +KOREA_29,PROJCS["KOREA_29",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000000241528],PARAMETER["central_meridian",129.00289027778],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",500000]] +KOREA_31,PROJCS["KOREA_31",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000000241528],PARAMETER["central_meridian",131.00289027778],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",500000]] +KOREA_JJ,PROJCS["KOREA_JJ",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000000241528],PARAMETER["central_meridian",127.0028902777799],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",550000]] +L2AFRICA,PROJCS["L2AFRICA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",20.00000000006498],PARAMETER["standard_parallel_2",-10.00000000003249],PARAMETER["latitude_of_origin",-25.00000000008122],PARAMETER["central_meridian",20.00000000006498],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2ALASKA,PROJCS["L2ALASKA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",60.00000000019492],PARAMETER["standard_parallel_2",70.00000000022742],PARAMETER["latitude_of_origin",65.00000000021116],PARAMETER["central_meridian",-150.0000000004873],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2ALS10F,PROJCS["L2ALS10F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",51.83333333617553],PARAMETER["standard_parallel_2",53.8333333338902],PARAMETER["latitude_of_origin",51.00000002766766],PARAMETER["central_meridian",-176.0000000280737],PARAMETER["false_easting",3000000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2ALS10M,PROJCS["L2ALS10M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",51.83333333617553],PARAMETER["standard_parallel_2",53.8333333338902],PARAMETER["latitude_of_origin",51.00000002766766],PARAMETER["central_meridian",-176.0000000280737],PARAMETER["false_easting",1000000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2ALSK10F83,PROJCS["L2ALSK10F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",51.82355506655649],PARAMETER["standard_parallel_2",53.82317782885884],PARAMETER["latitude_of_origin",50.9903789776676],PARAMETER["central_meridian",-175.9667980405784],PARAMETER["false_easting",3280833.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2ANT1,PROJCS["L2ANT1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-82.50000000599761],PARAMETER["standard_parallel_2",-81.49999997849238],PARAMETER["latitude_of_origin",-83.49999997620704],PARAMETER["central_meridian",-105.0000000232594],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2ANTDRI,PROJCS["L2ANTDRI",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-79.33333486606217],PARAMETER["standard_parallel_2",-76.6666632368084],PARAMETER["latitude_of_origin",-79.99999990858666],PARAMETER["central_meridian",159.9999998171733],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2ARKNF83,PROJCS["L2ARKNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.23333329670535],PARAMETER["standard_parallel_2",34.93333331251476],PARAMETER["latitude_of_origin",34.33333328455248],PARAMETER["central_meridian",-92.00000000946621],PARAMETER["false_easting",1312333.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2ARKSF83,PROJCS["L2ARKSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.76666670810897],PARAMETER["standard_parallel_2",33.30000000492102],PARAMETER["latitude_of_origin",32.66666672483252],PARAMETER["central_meridian",-92.00000000946621],PARAMETER["false_easting",1312333.333],PARAMETER["false_northing",1312333.333],UNIT["US Foot",0.30480061]] +L2AUST,PROJCS["L2AUST",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-30.00000000009746],PARAMETER["standard_parallel_2",-20.00000000006498],PARAMETER["latitude_of_origin",-25.00000000008122],PARAMETER["central_meridian",135.0000000004386],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2CAL1F83,PROJCS["L2CAL1F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.66666668590062],PARAMETER["standard_parallel_2",40.0000000115891],PARAMETER["latitude_of_origin",39.33333330748703],PARAMETER["central_meridian",-121.9999999751862],PARAMETER["false_easting",6561666.665],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2CAL1M,PROJCS["L2CAL1M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.0000000115891],PARAMETER["standard_parallel_2",41.66666668590062],PARAMETER["latitude_of_origin",39.33333330748703],PARAMETER["central_meridian",-121.9999999751862],PARAMETER["false_easting",2000000],PARAMETER["false_northing",500000],UNIT["unnamed",1]] +L2CAL2F83,PROJCS["L2CAL2F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.83333329259175],PARAMETER["standard_parallel_2",38.33333327998181],PARAMETER["latitude_of_origin",37.66666669047129],PARAMETER["central_meridian",-121.9999999751862],PARAMETER["false_easting",6561666.665],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2CAL2M,PROJCS["L2CAL2M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.33333333727759],PARAMETER["standard_parallel_2",39.83333334988753],PARAMETER["latitude_of_origin",37.66666669047129],PARAMETER["central_meridian",-121.9999999751862],PARAMETER["false_easting",2000000],PARAMETER["false_northing",500000],UNIT["unnamed",1]] +L2CAL3F83,PROJCS["L2CAL3F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.43333332283937],PARAMETER["standard_parallel_2",37.06666671980479],PARAMETER["latitude_of_origin",36.50000000126449],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",6561666.665],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2CAL3M,PROJCS["L2CAL3M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.066666662509],PARAMETER["standard_parallel_2",38.43333332283937],PARAMETER["latitude_of_origin",36.50000000126449],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",2000000],PARAMETER["false_northing",500000],UNIT["unnamed",1]] +L2CAL4F83,PROJCS["L2CAL4F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.24999997892158],PARAMETER["standard_parallel_2",36.00000001615977],PARAMETER["latitude_of_origin",35.3333333120577],PARAMETER["central_meridian",-119.0000000072621],PARAMETER["false_easting",6561666.665],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2CAL4M,PROJCS["L2CAL4M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.00000001615977],PARAMETER["standard_parallel_2",37.24999997892158],PARAMETER["latitude_of_origin",35.3333333120577],PARAMETER["central_meridian",-119.0000000072621],PARAMETER["false_easting",2000000],PARAMETER["false_northing",500000],UNIT["unnamed",1]] +L2CAL5F83,PROJCS["L2CAL5F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.46666672163305],PARAMETER["standard_parallel_2",34.03333332786711],PARAMETER["latitude_of_origin",33.49999997604461],PARAMETER["central_meridian",-117.9999999797569],PARAMETER["false_easting",6561666.665],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2CAL5M,PROJCS["L2CAL5M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.46666672163305],PARAMETER["standard_parallel_2",34.03333332786711],PARAMETER["latitude_of_origin",33.49999997604461],PARAMETER["central_meridian",-117.9999999797569],PARAMETER["false_easting",2000000],PARAMETER["false_northing",500000]] +L2CAL6F83,PROJCS["L2CAL6F83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.88333332087654],PARAMETER["standard_parallel_2",32.78333330780953],PARAMETER["latitude_of_origin",32.16666668243202],PARAMETER["central_meridian",-116.2499999745946],PARAMETER["false_easting",6561666.665],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2CAL6M,PROJCS["L2CAL6M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.78333330780953],PARAMETER["standard_parallel_2",33.88333332087654],PARAMETER["latitude_of_origin",32.16666668243202],PARAMETER["central_meridian",-116.2499999745946],PARAMETER["false_easting",2000000],PARAMETER["false_northing",500000],UNIT["unnamed",1]] +L2CAMER,PROJCS["L2CAMER",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",10.00000000003249],PARAMETER["standard_parallel_2",30.00000000009746],PARAMETER["latitude_of_origin",20.00000000006498],PARAMETER["central_meridian",-90.00000000029239],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2CAN2K,PROJCS["L2CAN2K",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",76.99999999795831],PARAMETER["standard_parallel_2",49.0000000013051],PARAMETER["latitude_of_origin",63.00000000249651],PARAMETER["central_meridian",-91.99999999800704],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2CANADA,PROJCS["L2CANADA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",50.00000000016244],PARAMETER["standard_parallel_2",60.00000000019492],PARAMETER["latitude_of_origin",55.00000000017868],PARAMETER["central_meridian",-100.0000000003249],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2COLCF83,PROJCS["L2COLCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.45000000001032],PARAMETER["standard_parallel_2",39.75000000001454],PARAMETER["latitude_of_origin",37.83333333332256],PARAMETER["central_meridian",-105.499999999999],PARAMETER["false_easting",3000000],PARAMETER["false_northing",999999.9998],UNIT["US Foot",0.30480061]] +L2COLCM,PROJCS["L2COLCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.44999997755038],PARAMETER["standard_parallel_2",39.75000001903675],PARAMETER["latitude_of_origin",37.83333335217286],PARAMETER["central_meridian",-105.5000000083642],PARAMETER["false_easting",914401.8289],PARAMETER["false_northing",304800.6096],UNIT["unnamed",1]] +L2COLNF83,PROJCS["L2COLNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.71666666664291],PARAMETER["standard_parallel_2",40.78333333333213],PARAMETER["latitude_of_origin",39.33333333332743],PARAMETER["central_meridian",-105.499999999999],PARAMETER["false_easting",3000000],PARAMETER["false_northing",999999.9998],UNIT["US Foot",0.30480061]] +L2COLNM,PROJCS["L2COLNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.71666665231896],PARAMETER["standard_parallel_2",40.78333335596397],PARAMETER["latitude_of_origin",39.33333330748703],PARAMETER["central_meridian",-105.5000000083642],PARAMETER["false_easting",914401.8289],PARAMETER["false_northing",304800.6096],UNIT["unnamed",1]] +L2COLSF83,PROJCS["L2COLSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.43333332283937],PARAMETER["standard_parallel_2",37.23333332421058],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-105.5000000083642],PARAMETER["false_easting",3000000],PARAMETER["false_northing",999999.9998],UNIT["US Foot",0.30480061]] +L2COLSM,PROJCS["L2COLSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.23333332421058],PARAMETER["standard_parallel_2",38.43333332283937],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-105.5000000083642],PARAMETER["false_easting",914401.8289],PARAMETER["false_northing",304800.6096],UNIT["unnamed",1]] +L2CONNF83,PROJCS["L2CONNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.86666671431998],PARAMETER["standard_parallel_2",41.20000001021791],PARAMETER["latitude_of_origin",40.83333332009697],PARAMETER["central_meridian",-72.75000000997663],PARAMETER["false_easting",1000000.001],PARAMETER["false_northing",499999.9999],UNIT["US Foot",0.30480061]] +L2EUROPE,PROJCS["L2EUROPE",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.00000000012995],PARAMETER["standard_parallel_2",60.00000000019492],PARAMETER["latitude_of_origin",50.00000000016244],PARAMETER["central_meridian",20.00000000006498],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2FLANF83,PROJCS["L2FLANF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.75000000067286],PARAMETER["standard_parallel_2",29.58333331146606],PARAMETER["latitude_of_origin",28.99999999551055],PARAMETER["central_meridian",-84.50000000371226],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2FLANM,PROJCS["L2FLANM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",29.58333331146606],PARAMETER["standard_parallel_2",30.75000000067286],PARAMETER["latitude_of_origin",28.99999999551055],PARAMETER["central_meridian",-84.50000000371226],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2IOWNF83,LOCAL_CS["L2IOWNF83 - (unsupported)"] +L2IOWNM,PROJCS["L2IOWNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.06666668544356],PARAMETER["standard_parallel_2",43.26666668407236],PARAMETER["latitude_of_origin",41.50000002419905],PARAMETER["central_meridian",-93.50000002207615],PARAMETER["false_easting",1500000],PARAMETER["false_northing",1000000],UNIT["unnamed",1]] +L2IOWSF83,LOCAL_CS["L2IOWSF83 - (unsupported)"] +L2IOWSM,PROJCS["L2IOWSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.6166666942624],PARAMETER["standard_parallel_2",41.78333332617341],PARAMETER["latitude_of_origin",40.0000000115891],PARAMETER["central_meridian",-93.50000002207615],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2KANNF83,PROJCS["L2KANNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.78333332845875],PARAMETER["standard_parallel_2",38.71666668210951],PARAMETER["latitude_of_origin",38.3260985419027],PARAMETER["central_meridian",-98.4814182215737],PARAMETER["false_easting",1312333.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2KANSF27,PROJCS["L2KANSF27",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.26666669092835],PARAMETER["standard_parallel_2",38.56666667511895],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-98.49999998771493],PARAMETER["false_easting",2000000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2KANSF83,PROJCS["L2KANSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.25963976464153],PARAMETER["standard_parallel_2",38.55939452289581],PARAMETER["latitude_of_origin",36.65975295313957],PARAMETER["central_meridian",-98.4814182215737],PARAMETER["false_easting",1312333.333],PARAMETER["false_northing",1312333.333],UNIT["US Foot",0.30480061]] +L2KANSM,PROJCS["L2KANSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.26666669092835],PARAMETER["standard_parallel_2",38.56666667511895],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-98.49999998771493],PARAMETER["false_easting",400000],PARAMETER["false_northing",400000],UNIT["unnamed",1]] +L2KYF83,PROJCS["L2KYF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.6666666666395],PARAMETER["standard_parallel_2",37.08333333332012],PARAMETER["latitude_of_origin",36.33333333331768],PARAMETER["central_meridian",-85.7499999999921],PARAMETER["false_easting",4921250],PARAMETER["false_northing",3280833.333],UNIT["US Foot",0.30480061]] +L2KYNFT83,PROJCS["L2KYNFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.96666666669451],PARAMETER["standard_parallel_2",38.96666666664047],PARAMETER["latitude_of_origin",37.50000000000723],PARAMETER["central_meridian",-84.24999999998722],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2KYSFT83,PROJCS["L2KYSFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.73333333331897],PARAMETER["standard_parallel_2",37.93333333332288],PARAMETER["latitude_of_origin",36.33333333331768],PARAMETER["central_meridian",-85.7499999999921],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",1640416.666],UNIT["US Foot",0.30480061]] +L2KYSM,PROJCS["L2KYSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.73333333910585],PARAMETER["standard_parallel_2",37.93333333773465],PARAMETER["latitude_of_origin",36.33333333956292],PARAMETER["central_meridian",-85.75000002376986],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000],UNIT["unnamed",1]] +L2LANFT83,PROJCS["L2LANFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.66666672483252],PARAMETER["standard_parallel_2",31.16666671222257],PARAMETER["latitude_of_origin",30.49424625135023],PARAMETER["central_meridian",-92.49999999457094],PARAMETER["false_easting",3280833.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2LAOFT83,PROJCS["L2LAOFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",26.16666668928801],PARAMETER["standard_parallel_2",27.83333330630376],PARAMETER["latitude_of_origin",25.4951894888338],PARAMETER["central_meridian",-91.33333330536414],PARAMETER["false_easting",3280833.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2LASFT83,PROJCS["L2LASFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.69999997924407],PARAMETER["standard_parallel_2",29.30000000949169],PARAMETER["latitude_of_origin",28.49999999894667],PARAMETER["central_meridian",-91.33333330536414],PARAMETER["false_easting",3280833.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MARYF83,PROJCS["L2MARYF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.30000002785559],PARAMETER["standard_parallel_2",39.4500000050556],PARAMETER["latitude_of_origin",37.66666666669354],PARAMETER["central_meridian",-76.99999999795831],PARAMETER["false_easting",1312333.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MARYM,PROJCS["L2MARYM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.30000002785559],PARAMETER["standard_parallel_2",39.4500000050556],PARAMETER["latitude_of_origin",37.66666669047129],PARAMETER["central_meridian",-76.99999999795831],PARAMETER["false_easting",400000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2MASIF27,PROJCS["L2MASIF27",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.2833333],PARAMETER["standard_parallel_2",41.4833333],PARAMETER["latitude_of_origin",41],PARAMETER["central_meridian",-70.49999999999996],PARAMETER["false_easting",800000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MASIF83,PROJCS["L2MASIF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.48333331219226],PARAMETER["standard_parallel_2",41.28333328377291],PARAMETER["latitude_of_origin",40.99226545263474],PARAMETER["central_meridian",-70.5000000197096],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MASMF83,PROJCS["L2MASMF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.71666670732941],PARAMETER["standard_parallel_2",42.68333331082107],PARAMETER["latitude_of_origin",41.0000000000186],PARAMETER["central_meridian",-71.49999998991905],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",2460625],UNIT["US Foot",0.30480061]] +L2MASMM,PROJCS["L2MASMM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.71666665003362],PARAMETER["standard_parallel_2",42.68333331082107],PARAMETER["latitude_of_origin",40.99999998179855],PARAMETER["central_meridian",-71.49999998991905],PARAMETER["false_easting",200000],PARAMETER["false_northing",750000],UNIT["unnamed",1]] +L2MICCF83,PROJCS["L2MICCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.18333332343101],PARAMETER["standard_parallel_2",45.69999999075196],PARAMETER["latitude_of_origin",43.30849844728642],PARAMETER["central_meridian",-84.33333328471491],PARAMETER["false_easting",19685000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MICCM,PROJCS["L2MICCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.18333332343101],PARAMETER["standard_parallel_2",45.69999999075196],PARAMETER["latitude_of_origin",43.31666664820536],PARAMETER["central_meridian",-84.36666665143269],PARAMETER["false_easting",6000000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2MICNF83,PROJCS["L2MICNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.48333330762161],PARAMETER["standard_parallel_2",47.08333330579333],PARAMETER["latitude_of_origin",44.77488176554888],PARAMETER["central_meridian",-86.99999998653165],PARAMETER["false_easting",26246666.66],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MICNM,PROJCS["L2MICNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.48333330762161],PARAMETER["standard_parallel_2",47.08333330579333],PARAMETER["latitude_of_origin",44.7833333513933],PARAMETER["central_meridian",-86.99999998653165],PARAMETER["false_easting",8000000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2MICSF83,PROJCS["L2MICSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.09999999486556],PARAMETER["standard_parallel_2",43.66666668361529],PARAMETER["latitude_of_origin",41.49217112888638],PARAMETER["central_meridian",-84.33333328471491],PARAMETER["false_easting",13123333.33],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2MICSM,PROJCS["L2MICSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.09999999486556],PARAMETER["standard_parallel_2",43.66666668361529],PARAMETER["latitude_of_origin",41.50000002419905],PARAMETER["central_meridian",-84.36666665143269],PARAMETER["false_easting",4000000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2MINCF83,PROJCS["L2MINCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.61666671719695],PARAMETER["standard_parallel_2",47.04999999637133],PARAMETER["latitude_of_origin",44.99151086264789],PARAMETER["central_meridian",-94.24999999973323],PARAMETER["false_easting",2624666.666],PARAMETER["false_northing",328083.3333],UNIT["US Foot",0.30480061]] +L2MINCM,PROJCS["L2MINCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.61666665990117],PARAMETER["standard_parallel_2",47.04999999637133],PARAMETER["latitude_of_origin",44.99999997722788],PARAMETER["central_meridian",-94.24999999973323],PARAMETER["false_easting",800000],PARAMETER["false_northing",100000],UNIT["unnamed",1]] +L2MINNF83,PROJCS["L2MINNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.03333328436455],PARAMETER["standard_parallel_2",48.63333328253628],PARAMETER["latitude_of_origin",46.49122789140281],PARAMETER["central_meridian",-93.10000002253321],PARAMETER["false_easting",2624666.666],PARAMETER["false_northing",328083.3333],UNIT["US Foot",0.30480061]] +L2MINNM,PROJCS["L2MINNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.03333334166033],PARAMETER["standard_parallel_2",48.63333333983206],PARAMETER["latitude_of_origin",46.49999998983782],PARAMETER["central_meridian",-93.10000002253321],PARAMETER["false_easting",800000],PARAMETER["false_northing",100000],UNIT["unnamed",1]] +L2MINSF83,PROJCS["L2MINSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43.78333332388808],PARAMETER["standard_parallel_2",45.21666666666077],PARAMETER["latitude_of_origin",43.0000000000251],PARAMETER["central_meridian",-94.00000000718087],PARAMETER["false_easting",2624666.666],PARAMETER["false_northing",328083.3333],UNIT["US Foot",0.30480061]] +L2MINSM,PROJCS["L2MINSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43.78333332388808],PARAMETER["standard_parallel_2",45.21666666035824],PARAMETER["latitude_of_origin",42.99999997951321],PARAMETER["central_meridian",-94.00000000718087],PARAMETER["false_easting",800000],PARAMETER["false_northing",100000],UNIT["unnamed",1]] +L2MON2,PROJCS["L2MON2",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.99999508186673],PARAMETER["standard_parallel_2",49.99999851047217],PARAMETER["latitude_of_origin",45.99999679616945],PARAMETER["central_meridian",104],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2MTF83,PROJCS["L2MTF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",48.99999997265721],PARAMETER["standard_parallel_2",44.99999997722788],PARAMETER["latitude_of_origin",44.24165234827042],PARAMETER["central_meridian",-109.5000000037935],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2NCAFT83,PROJCS["L2NCAFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.33333328455248],PARAMETER["standard_parallel_2",36.16666667786134],PARAMETER["latitude_of_origin",33.7500051825129],PARAMETER["central_meridian",-78.99999999567299],PARAMETER["false_easting",2000000.002],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2NCAM,PROJCS["L2NCAM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.33333334184825],PARAMETER["standard_parallel_2",36.16666667786134],PARAMETER["latitude_of_origin",33.75000002589275],PARAMETER["central_meridian",-78.99999999567299],PARAMETER["false_easting",609601.22],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2NDNFT83,PROJCS["L2NDNFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",48.73333332539385],PARAMETER["standard_parallel_2",47.43333328390748],PARAMETER["latitude_of_origin",47.00000719421077],PARAMETER["central_meridian",-100.4999999854296],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2NDSFT83,PROJCS["L2NDSFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.18333332114568],PARAMETER["standard_parallel_2",47.48333330533627],PARAMETER["latitude_of_origin",45.66667699457027],PARAMETER["central_meridian",-100.4999999854296],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2NDSM,PROJCS["L2NDSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.18333332114568],PARAMETER["standard_parallel_2",47.48333330533627],PARAMETER["latitude_of_origin",45.66666668132996],PARAMETER["central_meridian",-100.4999999854296],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2NEBF83,PROJCS["L2NEBF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.99999997951321],PARAMETER["standard_parallel_2",40.0000000115891],PARAMETER["latitude_of_origin",39.83000612667543],PARAMETER["central_meridian",-100.0000000003249],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2NEWYF83,PROJCS["L2NEWYF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.66666671569118],PARAMETER["standard_parallel_2",41.03333329122055],PARAMETER["latitude_of_origin",40.16667618439008],PARAMETER["central_meridian",-73.99999997273844],PARAMETER["false_easting",984249.9998],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2NEWYLIF,PROJCS["L2NEWYLIF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.6666667],PARAMETER["standard_parallel_2",41.0333333],PARAMETER["latitude_of_origin",40.5],PARAMETER["central_meridian",-73.99999999999993],PARAMETER["false_easting",2000000],PARAMETER["false_northing",100000],UNIT["US Foot",0.30480061]] +L2NEWYM,PROJCS["L2NEWYM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.6666666583954],PARAMETER["standard_parallel_2",41.03333334851633],PARAMETER["latitude_of_origin",40.16666667329068],PARAMETER["central_meridian",-73.99999997273844],PARAMETER["false_easting",300000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2NOAMER,PROJCS["L2NOAMER",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.00000000011371],PARAMETER["standard_parallel_2",55.00000000017868],PARAMETER["latitude_of_origin",45.00000000014619],PARAMETER["central_meridian",-100.0000000003249],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2NSW1,PROJCS["L2NSW1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-30],PARAMETER["standard_parallel_2",-36],PARAMETER["latitude_of_origin",-36],PARAMETER["central_meridian",147],PARAMETER["false_easting",700000],PARAMETER["false_northing",8200000]] +L2NSW2,PROJCS["L2NSW2",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-32.66666666666664],PARAMETER["standard_parallel_2",-35.33333333333334],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["false_easting",1000000],PARAMETER["false_northing",10000000]] +L2OHINF83,PROJCS["L2OHINF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.43333332055404],PARAMETER["standard_parallel_2",41.69999999532262],PARAMETER["latitude_of_origin",39.66667310531327],PARAMETER["central_meridian",-82.50000000599761],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2OHINM,PROJCS["L2OHINM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.43333332055404],PARAMETER["standard_parallel_2",41.69999999532262],PARAMETER["latitude_of_origin",39.66666668818596],PARAMETER["central_meridian",-82.50000000599761],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2OHISF83,PROJCS["L2OHISF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.73333327952474],PARAMETER["standard_parallel_2",40.03333332101111],PARAMETER["latitude_of_origin",38.00000585804395],PARAMETER["central_meridian",-82.50000000599761],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2OHISM,PROJCS["L2OHISM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.73333333682052],PARAMETER["standard_parallel_2",40.03333332101111],PARAMETER["latitude_of_origin",38.00000001387444],PARAMETER["central_meridian",-82.50000000599761],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2OKLNF83,PROJCS["L2OKLNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.76666670582364],PARAMETER["standard_parallel_2",35.56666670719483],PARAMETER["latitude_of_origin",35.00000537445783],PARAMETER["central_meridian",-98.00000000261021],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2OKLSF83,PROJCS["L2OKLSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.23333332649591],PARAMETER["standard_parallel_2",33.93333328500954],PARAMETER["latitude_of_origin",33.33333509051219],PARAMETER["central_meridian",-98.00000000261021],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2ORENF83,PROJCS["L2ORENF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.33333327312581],PARAMETER["standard_parallel_2",46.0000000047331],PARAMETER["latitude_of_origin",43.66667671037671],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",8202083.332],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2ORENM,PROJCS["L2ORENM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.33333333042158],PARAMETER["standard_parallel_2",46.0000000047331],PARAMETER["latitude_of_origin",43.66666668361529],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2ORESF83,PROJCS["L2ORESF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.33333327541114],PARAMETER["standard_parallel_2",44.00000000701844],PARAMETER["latitude_of_origin",41.66667636888736],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",4921249.999],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2ORESM,PROJCS["L2ORESM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.33333333270692],PARAMETER["standard_parallel_2",44.00000000701844],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2PANFT83,PROJCS["L2PANFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.88333328422998],PARAMETER["standard_parallel_2",41.94999998787499],PARAMETER["latitude_of_origin",40.16667280393909],PARAMETER["central_meridian",-77.74999997561541],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2PANM,PROJCS["L2PANM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.88333334152576],PARAMETER["standard_parallel_2",41.94999998787499],PARAMETER["latitude_of_origin",40.16666667329068],PARAMETER["central_meridian",-77.74999997561541],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2PASFT83,PROJCS["L2PASFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.93333327815354],PARAMETER["standard_parallel_2",40.80000001067497],PARAMETER["latitude_of_origin",39.33333938083966],PARAMETER["central_meridian",-77.74999997561541],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2PASM,PROJCS["L2PASM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.93333333544932],PARAMETER["standard_parallel_2",40.96666667237655],PARAMETER["latitude_of_origin",39.33333330748703],PARAMETER["central_meridian",-77.74999997561541],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2PRVF83,PROJCS["L2PRVF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",18.43333328839693],PARAMETER["standard_parallel_2",18.033333288854],PARAMETER["latitude_of_origin",17.83333572415316],PARAMETER["central_meridian",-66.4333332908447],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",656166.6665],UNIT["US Foot",0.30480061]] +L2PRVIM,PROJCS["L2PRVIM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",18.03333334614978],PARAMETER["standard_parallel_2",18.43333334569271],PARAMETER["latitude_of_origin",17.83333331773042],PARAMETER["central_meridian",-66.43333334814049],PARAMETER["false_easting",200000],PARAMETER["false_northing",200000],UNIT["unnamed",1]] +L2SAUST,PROJCS["L2SAUST",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-27.99999999999998],PARAMETER["standard_parallel_2",-36],PARAMETER["latitude_of_origin",-31.99999999999997],PARAMETER["central_meridian",134.9999999999997],PARAMETER["false_easting",1000000],PARAMETER["false_northing",2000000]] +L2SCFT83,PROJCS["L2SCFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.50000499056798],PARAMETER["standard_parallel_2",34.83333836898157],PARAMETER["latitude_of_origin",31.83333490601491],PARAMETER["central_meridian",-80.99999999338766],PARAMETER["false_easting",1999996],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2SCM,PROJCS["L2SCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.50000000583516],PARAMETER["standard_parallel_2",34.83333332695297],PARAMETER["latitude_of_origin",31.83333335902886],PARAMETER["central_meridian",-80.99999999338766],PARAMETER["false_easting",609600],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2SDNFT83,PROJCS["L2SDNFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.41666671856815],PARAMETER["standard_parallel_2",45.68333327874517],PARAMETER["latitude_of_origin",43.83334004892307],PARAMETER["central_meridian",-100.0000000003249],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2SDNM,PROJCS["L2SDNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.41666666127237],PARAMETER["standard_parallel_2",45.68333333604095],PARAMETER["latitude_of_origin",43.83333334531687],PARAMETER["central_meridian",-100.0000000003249],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2SDSFT83,PROJCS["L2SDSFT83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.83333331781164],PARAMETER["standard_parallel_2",44.40000000656137],PARAMETER["latitude_of_origin",42.33333952065111],PARAMETER["central_meridian",-100.333333323728],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2SDSM,PROJCS["L2SDSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.83333331781164],PARAMETER["standard_parallel_2",44.40000000656137],PARAMETER["latitude_of_origin",42.33333333270692],PARAMETER["central_meridian",-100.333333323728],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2SOAMER,PROJCS["L2SOAMER",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",0],PARAMETER["standard_parallel_2",-30.00000000009746],PARAMETER["latitude_of_origin",-15.00000000004873],PARAMETER["central_meridian",-60.00000000019492],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2TENNF27,PROJCS["L2TENNF27",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.24999998120691],PARAMETER["standard_parallel_2",36.41666672770949],PARAMETER["latitude_of_origin",34.66666666525141],PARAMETER["central_meridian",-86.00000001632222],PARAMETER["false_easting",2000000],PARAMETER["false_northing",100000],UNIT["US Foot",0.30480061]] +L2TENNF83,PROJCS["L2TENNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.24999998120691],PARAMETER["standard_parallel_2",36.41666672770949],PARAMETER["latitude_of_origin",34.33333826928529],PARAMETER["central_meridian",-86.00000001632222],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2TENNM,PROJCS["L2TENNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.24999998120691],PARAMETER["standard_parallel_2",36.41666667041371],PARAMETER["latitude_of_origin",34.33333334184825],PARAMETER["central_meridian",-86.00000001632222],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2TXCF83,PROJCS["L2TXCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31.88333332316187],PARAMETER["standard_parallel_2",30.11666672058434],PARAMETER["latitude_of_origin",29.66666664231684],PARAMETER["central_meridian",-100.333333323728],PARAMETER["false_easting",2296583.333],PARAMETER["false_northing",9842499.998],UNIT["US Foot",0.30480061]] +L2TXNCF83,PROJCS["L2TXNCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.96666670902311],PARAMETER["standard_parallel_2",32.13333331571423],PARAMETER["latitude_of_origin",31.66666698380619],PARAMETER["central_meridian",-98.49999998771493],PARAMETER["false_easting",1968500],PARAMETER["false_northing",6561666.665],UNIT["US Foot",0.30480061]] +L2TXNF27,PROJCS["L2TXNF27",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.6500000105404],PARAMETER["standard_parallel_2",36.18333333257235],PARAMETER["latitude_of_origin",34.00000001844511],PARAMETER["central_meridian",-101.5000000129348],PARAMETER["false_easting",2000000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2TXNF83,PROJCS["L2TXNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.6500000105404],PARAMETER["standard_parallel_2",36.18333327527657],PARAMETER["latitude_of_origin",34.00000001844511],PARAMETER["central_meridian",-101.5000000129348],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",3280833.333],UNIT["US Foot",0.30480061]] +L2TXNM,PROJCS["L2TXNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.6500000105404],PARAMETER["standard_parallel_2",36.18333333257235],PARAMETER["latitude_of_origin",34.00000001844511],PARAMETER["central_meridian",-101.5000000129348],PARAMETER["false_easting",200000],PARAMETER["false_northing",1000000],UNIT["unnamed",1]] +L2TXSCF83,PROJCS["L2TXSCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333332499014],PARAMETER["standard_parallel_2",28.38333331283726],PARAMETER["latitude_of_origin",27.83333330630376],PARAMETER["central_meridian",-98.99999997281965],PARAMETER["false_easting",1968500],PARAMETER["false_northing",13123333.33],UNIT["US Foot",0.30480061]] +L2TXSF83,PROJCS["L2TXSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27.83333330630376],PARAMETER["standard_parallel_2",26.16666668928801],PARAMETER["latitude_of_origin",25.66666670418329],PARAMETER["central_meridian",-98.49999998771493],PARAMETER["false_easting",984249.9998],PARAMETER["false_northing",16404166.66],UNIT["US Foot",0.30480061]] +L2TX_SHACK_FT,PROJCS["L2TX_SHACK_FT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27.41600000001344],PARAMETER["standard_parallel_2",34.91599999998051],PARAMETER["latitude_of_origin",31.15999999997518],PARAMETER["central_meridian",-99.9999999999811],PARAMETER["false_easting",3000000],PARAMETER["false_northing",3000000],UNIT["US Foot",0.30480061]] +L2TX_TCMS_LC,PROJCS["L2TX_TCMS_LC",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27.49999999997475],PARAMETER["standard_parallel_2",34.99999999999911],PARAMETER["latitude_of_origin",18.00000000000118],PARAMETER["central_meridian",-99.9999999999811],PARAMETER["false_easting",1500000],PARAMETER["false_northing",5000000]] +L2TX_TSMS,PROJCS["L2TX_TSMS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27.41600000001344],PARAMETER["standard_parallel_2",34.91599999998051],PARAMETER["latitude_of_origin",31.15999999997518],PARAMETER["central_meridian",-99.9999999999811],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +L2USA48,PROJCS["L2USA48",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.00000000239903],PARAMETER["standard_parallel_2",45.00000000014619],PARAMETER["latitude_of_origin",23.00000000236655],PARAMETER["central_meridian",-95.99999999916595],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +L2UTHCF83,PROJCS["L2UTHCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.01666669609066],PARAMETER["standard_parallel_2",40.6500000036844],PARAMETER["latitude_of_origin",38.33333299350291],PARAMETER["central_meridian",-111.5000000015082],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",6561666.665],UNIT["US Foot",0.30480061]] +L2UTHCM,PROJCS["L2UTHCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.01666663879488],PARAMETER["standard_parallel_2",40.6500000036844],PARAMETER["latitude_of_origin",38.33333333727759],PARAMETER["central_meridian",-111.5000000015082],PARAMETER["false_easting",500000],PARAMETER["false_northing",2000000],UNIT["unnamed",1]] +L2UTHNF83,PROJCS["L2UTHNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.71666667982419],PARAMETER["standard_parallel_2",41.78333332617341],PARAMETER["latitude_of_origin",40.33333327769648],PARAMETER["central_meridian",-111.5000000015082],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",3280833.333],UNIT["US Foot",0.30480061]] +L2UTHNM,PROJCS["L2UTHNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.71666667982419],PARAMETER["standard_parallel_2",41.78333332617341],PARAMETER["latitude_of_origin",40.33333333499225],PARAMETER["central_meridian",-111.5000000015082],PARAMETER["false_easting",500000],PARAMETER["false_northing",1000000],UNIT["unnamed",1]] +L2UTHSF83,PROJCS["L2UTHSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.21666672679535],PARAMETER["standard_parallel_2",38.34999999198859],PARAMETER["latitude_of_origin",36.66666672026184],PARAMETER["central_meridian",-111.5000000015082],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",9842499.998],UNIT["US Foot",0.30480061]] +L2UTHSM,PROJCS["L2UTHSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.21666666949957],PARAMETER["standard_parallel_2",38.34999999198859],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-111.5000000015082],PARAMETER["false_easting",500000],PARAMETER["false_northing",3000000],UNIT["unnamed",1]] +L2VIRNF83,PROJCS["L2VIRNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.03333332329644],PARAMETER["standard_parallel_2",39.20000001250324],PARAMETER["latitude_of_origin",37.66666669047129],PARAMETER["central_meridian",-78.50000001056827],PARAMETER["false_easting",11482916.66],PARAMETER["false_northing",6561666.665],UNIT["US Foot",0.30480061]] +L2VIRNM,PROJCS["L2VIRNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.03333332329644],PARAMETER["standard_parallel_2",39.20000001250324],PARAMETER["latitude_of_origin",37.66666669047129],PARAMETER["central_meridian",-78.50000001056827],PARAMETER["false_easting",3500000],PARAMETER["false_northing",2000000],UNIT["unnamed",1]] +L2VIRSF83,PROJCS["L2VIRSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.76666670582364],PARAMETER["standard_parallel_2",37.96666670445244],PARAMETER["latitude_of_origin",36.33333328226714],PARAMETER["central_meridian",-78.50000001056827],PARAMETER["false_easting",11482916.66],PARAMETER["false_northing",3280833.333],UNIT["US Foot",0.30480061]] +L2VIRSM,PROJCS["L2VIRSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.76666664852786],PARAMETER["standard_parallel_2",37.96666664715666],PARAMETER["latitude_of_origin",36.33333333956292],PARAMETER["central_meridian",-78.50000001056827],PARAMETER["false_easting",3500000],PARAMETER["false_northing",1000000],UNIT["unnamed",1]] +L2WA_WGS84,PROJCS["L2WA_WGS84",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-83.49999997620704],PARAMETER["standard_parallel_2",-81.49999997849238],PARAMETER["latitude_of_origin",-82.50000000599761],PARAMETER["central_meridian",-105.0000000232594],PARAMETER["false_easting",343122.675],PARAMETER["false_northing",203866.49]] +L2WISCF83,PROJCS["L2WISCF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.2499999995708],PARAMETER["standard_parallel_2",45.50000001962838],PARAMETER["latitude_of_origin",43.83333328802108],PARAMETER["central_meridian",-90.00000001175154],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WISCM,PROJCS["L2WISCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.2499999995708],PARAMETER["standard_parallel_2",45.50000001962838],PARAMETER["latitude_of_origin",43.83333334531687],PARAMETER["central_meridian",-90.00000001175154],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2WISNF83,PROJCS["L2WISNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.56666669576817],PARAMETER["standard_parallel_2",46.76666669439697],PARAMETER["latitude_of_origin",45.16666669622524],PARAMETER["central_meridian",-90.00000001175154],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WISNM,PROJCS["L2WISNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.56666663847238],PARAMETER["standard_parallel_2",46.76666669439697],PARAMETER["latitude_of_origin",45.16666663892946],PARAMETER["central_meridian",-90.00000001175154],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2WISSF83,PROJCS["L2WISSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.73333327495407],PARAMETER["standard_parallel_2",44.06666668315822],PARAMETER["latitude_of_origin",42.00000000930377],PARAMETER["central_meridian",-90.00000001175154],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WISSM,PROJCS["L2WISSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.73333333224986],PARAMETER["standard_parallel_2",44.06666668315822],PARAMETER["latitude_of_origin",42.00000000930377],PARAMETER["central_meridian",-90.00000001175154],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2WSHNF83,PROJCS["L2WSHNF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.50000001734305],PARAMETER["standard_parallel_2",48.73333332539385],PARAMETER["latitude_of_origin",46.99999997494255],PARAMETER["central_meridian",-120.8333332859794],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WSHNM,PROJCS["L2WSHNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.50000001734305],PARAMETER["standard_parallel_2",48.73333332539385],PARAMETER["latitude_of_origin",46.99999997494255],PARAMETER["central_meridian",-120.8333333432752],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2WSHSF83,PROJCS["L2WSHSF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.83333328573575],PARAMETER["standard_parallel_2",47.33333329834569],PARAMETER["latitude_of_origin",45.33333301415213],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WSHSM,PROJCS["L2WSHSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.83333334303153],PARAMETER["standard_parallel_2",47.33333335564147],PARAMETER["latitude_of_origin",45.3333333579268],PARAMETER["central_meridian",-120.500000019872],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2WVANF83,PROJCS["L2WVANF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.99999998408389],PARAMETER["standard_parallel_2",40.25000000414146],PARAMETER["latitude_of_origin",38.49999999897916],PARAMETER["central_meridian",-79.4999999807777],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WVANM,PROJCS["L2WVANM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.99999998408389],PARAMETER["standard_parallel_2",40.25000000414146],PARAMETER["latitude_of_origin",38.49999999897916],PARAMETER["central_meridian",-79.4999999807777],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2WVASF83,PROJCS["L2WVASF83",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.48333331676294],PARAMETER["standard_parallel_2",38.88333328651532],PARAMETER["latitude_of_origin",36.99999998636922],PARAMETER["central_meridian",-80.99999999338766],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +L2WVASM,PROJCS["L2WVASM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.48333331676294],PARAMETER["standard_parallel_2",38.8833333438111],PARAMETER["latitude_of_origin",36.99999998636922],PARAMETER["central_meridian",-80.99999999338766],PARAMETER["false_easting",600000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +L2_MEX_INEGI,PROJCS["L2_MEX_INEGI",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",17.49999999999956],PARAMETER["standard_parallel_2",29.49999999998125],PARAMETER["latitude_of_origin",23.50000000001905],PARAMETER["central_meridian",-101.9999999999876],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0]] +L2_PLSA,PROJCS["L2_PLSA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-27.99999999999998],PARAMETER["standard_parallel_2",-36],PARAMETER["latitude_of_origin",-31.99999999999997],PARAMETER["central_meridian",134.9999999999997],PARAMETER["false_easting",1000000],PARAMETER["false_northing",2000000]] +LABORDE,LOCAL_CS["LABORDE - (unsupported)"] +LAMCAN,PROJCS["LAMCAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",77],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-91.9999935923389],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000]] +LAMSAFRI,PROJCS["LAMSAFRI",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-3.999999995429332],PARAMETER["standard_parallel_2",-31.00000010781677],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",24.99999982819388],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000]] +LE00N16E,PROJCS["LE00N16E",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",16],PARAMETER["false_easting",5000000],PARAMETER["false_northing",5000000]] +LE13S127,PROJCS["LE13S127",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",-12.99999999999995],PARAMETER["longitude_of_center",127],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LE20S60W,PROJCS["LE20S60W",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",-19.99999999999994],PARAMETER["longitude_of_center",-59.99999999999994],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LE35S135,PROJCS["LE35S135",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",-34.99999999999997],PARAMETER["longitude_of_center",135],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LEAFRICA,PROJCS["LEAFRICA",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",20],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LEAMERIC,PROJCS["LEAMERIC",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-89.99999999999994],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LEFRAN,PROJCS["LEFRAN",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",47],PARAMETER["longitude_of_center",2],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LEGLOBE,PROJCS["LEGLOBE",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",-39.5],PARAMETER["longitude_of_center",-55.99999999999996],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LELUSAK0,PROJCS["LELUSAK0",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",28.3333333],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LELUSAKA,PROJCS["LELUSAKA",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",-15.43333329999995],PARAMETER["longitude_of_center",28.3333333],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LEMONG,PROJCS["LEMONG",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",47],PARAMETER["longitude_of_center",105],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LENAFRIC,PROJCS["LENAFRIC",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",1.25],PARAMETER["longitude_of_center",20],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LENSEA,PROJCS["LENSEA",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",60],PARAMETER["longitude_of_center",1],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LERUSSIA,PROJCS["LERUSSIA",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",60.99999999905226],PARAMETER["longitude_of_center",124.0000000015487],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LESAMER,PROJCS["LESAMER",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",-21.99999999999996],PARAMETER["longitude_of_center",-55.99999999999996],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LESEASIA,PROJCS["LESEASIA",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",20.00000000006498],PARAMETER["longitude_of_center",105.0000000003411],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LESOAMER,PROJCS["LESOAMER",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",-60.00000000019492],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LESUR554,PROJCS["LESUR554",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",5],PARAMETER["longitude_of_center",-54],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LEUSA0,PROJCS["LEUSA0",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",44.99999980534054],PARAMETER["longitude_of_center",-100.0000027505223],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LEWEURO,PROJCS["LEWEURO",PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",50],PARAMETER["longitude_of_center",2],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM1ADEN,PROJCS["LM1ADEN",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",15],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.999365678],PARAMETER["false_easting",1500000],PARAMETER["false_northing",1000000]] +LM1AFNDX,PROJCS["LM1AFNDX",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",10],PARAMETER["central_meridian",30],PARAMETER["scale_factor",0.99],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM1ALGND,PROJCS["LM1ALGND",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",36],PARAMETER["central_meridian",2.7],PARAMETER["scale_factor",0.999625544],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1ALGSD,PROJCS["LM1ALGSD",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",33.3],PARAMETER["central_meridian",2.7],PARAMETER["scale_factor",0.999625769],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1BANG,PROJCS["LM1BANG",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",90],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",2743185.69],PARAMETER["false_northing",914395.23]] +LM1BLSEA,PROJCS["LM1BLSEA",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",45],PARAMETER["central_meridian",35],PARAMETER["scale_factor",1],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM1BURMA,PROJCS["LM1BURMA",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",19],PARAMETER["central_meridian",100],PARAMETER["scale_factor",0.9987864],PARAMETER["false_easting",914398.8],PARAMETER["false_northing",2743196.4]] +LM1CARIB,PROJCS["LM1CARIB",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",22.35],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.999936],PARAMETER["false_easting",500000],PARAMETER["false_northing",280296]] +LM1CAUC,PROJCS["LM1CAUC",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",39.5],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.998461538],PARAMETER["false_easting",2155500],PARAMETER["false_northing",675000]] +LM1COLC,PROJCS["LM1COLC",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",7],PARAMETER["central_meridian",-73.49999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM1CORSE,PROJCS["LM1CORSE",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",42.165],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.99994471],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRA1D,PROJCS["LM1FRA1D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.999877341],PARAMETER["false_easting",600000],PARAMETER["false_northing",1200000]] +LM1FRA1G,PROJCS["LM1FRA1G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.999877341],PARAMETER["false_easting",600000],PARAMETER["false_northing",1200000]] +LM1FRA2D,PROJCS["LM1FRA2D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",46.8],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.99987742],PARAMETER["false_easting",600000],PARAMETER["false_northing",2200000]] +LM1FRA2G,PROJCS["LM1FRA2G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",46.8],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.99987742],PARAMETER["false_easting",600000],PARAMETER["false_northing",2200000]] +LM1FRA3D,PROJCS["LM1FRA3D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",44.1],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.999877499],PARAMETER["false_easting",600000],PARAMETER["false_northing",3200000]] +LM1FRA3G,PROJCS["LM1FRA3G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",44.1],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.999877499],PARAMETER["false_easting",600000],PARAMETER["false_northing",3200000]] +LM1FRA4D,PROJCS["LM1FRA4D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",42.165],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.99994471],PARAMETER["false_easting",234.36],PARAMETER["false_northing",4185861.37]] +LM1FRA4G,PROJCS["LM1FRA4G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",42.165],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.99994471],PARAMETER["false_easting",234.36],PARAMETER["false_northing",4185861.37]] +LM1FRAND,PROJCS["LM1FRAND",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",7.7372083],PARAMETER["scale_factor",0.99950908],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1FRE1D,PROJCS["LM1FRE1D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.999877341],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRE1G,PROJCS["LM1FRE1G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.999877341],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRE2D,PROJCS["LM1FRE2D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",46.8],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.99987742],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRE2G,PROJCS["LM1FRE2G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",46.8],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.99987742],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRE3D,PROJCS["LM1FRE3D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",44.1],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.999877499],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRE3G,PROJCS["LM1FRE3G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",44.1],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.999877499],PARAMETER["false_easting",600000],PARAMETER["false_northing",200000]] +LM1FRE4D,PROJCS["LM1FRE4D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",42.165],PARAMETER["central_meridian",2.337229166666664],PARAMETER["scale_factor",0.99994471],PARAMETER["false_easting",234.36],PARAMETER["false_northing",185861.37]] +LM1FRE4G,PROJCS["LM1FRE4G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",42.165],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.99994471],PARAMETER["false_easting",234.36],PARAMETER["false_northing",185861.37]] +LM1GREN1,PROJCS["LM1GREN1",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",67.5],PARAMETER["central_meridian",-51.99999999999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",250000]] +LM1GRNOR,PROJCS["LM1GRNOR",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",78.75],PARAMETER["central_meridian",-58.99999999999997],PARAMETER["scale_factor",0.997],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM1GRSUD,PROJCS["LM1GRSUD",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",66.5],PARAMETER["central_meridian",-58.99999999999997],PARAMETER["scale_factor",0.997],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM1IND1,PROJCS["LM1IND1",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",32.5],PARAMETER["central_meridian",68],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",2743196.4],PARAMETER["false_northing",914398.8]] +LM1IND4A,PROJCS["LM1IND4A",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",12],PARAMETER["central_meridian",80],PARAMETER["scale_factor",0.9987864],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000]] +LM1IRAN,PROJCS["LM1IRAN",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",32.5],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",1500000],PARAMETER["false_northing",1166200]] +LM1IRAQ,PROJCS["LM1IRAQ",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",32.5],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",1500000],PARAMETER["false_northing",1166200]] +LM1JAFT,PROJCS["LM1JAFT",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",18],PARAMETER["central_meridian",-76.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",550000],PARAMETER["false_northing",400000],UNIT["unnamed",0.304799472]] +LM1JAMTR,PROJCS["LM1JAMTR",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",18],PARAMETER["central_meridian",-76.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",250000],PARAMETER["false_northing",150000]] +LM1KANG,PROJCS["LM1KANG",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",110],PARAMETER["scale_factor",0.997],PARAMETER["false_easting",3900000],PARAMETER["false_northing",900000]] +LM1LEVD,PROJCS["LM1LEVD",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",34.65],PARAMETER["central_meridian",37.35],PARAMETER["scale_factor",0.9996256],PARAMETER["false_easting",300000],PARAMETER["false_northing",300000]] +LM1LEVG,PROJCS["LM1LEVG",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",34.65],PARAMETER["central_meridian",37.35],PARAMETER["scale_factor",0.9996256],PARAMETER["false_easting",300000],PARAMETER["false_northing",300000]] +LM1LIBS,PROJCS["LM1LIBS",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",23],PARAMETER["central_meridian",18],PARAMETER["scale_factor",0.99907],PARAMETER["false_easting",800000],PARAMETER["false_northing",600000]] +LM1LIBYA,PROJCS["LM1LIBYA",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",18],PARAMETER["scale_factor",0.99938949],PARAMETER["false_easting",1000000],PARAMETER["false_northing",550000]] +LM1MORND,PROJCS["LM1MORND",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",33.3],PARAMETER["central_meridian",-5.399999999999953],PARAMETER["scale_factor",0.999625769],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1MORSD,PROJCS["LM1MORSD",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",29.7],PARAMETER["central_meridian",-5.399999999999953],PARAMETER["scale_factor",0.999615596],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1NEP1,PROJCS["LM1NEP1",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",32.5],PARAMETER["central_meridian",68],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000],UNIT["unnamed",0.9143988]] +LM1NEP2A,PROJCS["LM1NEP2A",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",74],PARAMETER["scale_factor",1],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000],UNIT["unnamed",0.9143988]] +LM1NEP2B,PROJCS["LM1NEP2B",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",90],PARAMETER["scale_factor",1],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000],UNIT["unnamed",0.9143988]] +LM1NPG,PROJCS["LM1NPG",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",-7.999999999999978],PARAMETER["central_meridian",150],PARAMETER["scale_factor",0.9997],PARAMETER["false_easting",300000],PARAMETER["false_northing",100000]] +LM1PA2B,PROJCS["LM1PA2B",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",90],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",2743196.4],PARAMETER["false_northing",914398.8]] +LM1PA2BY,PROJCS["LM1PA2BY",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",90],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000],UNIT["unnamed",0.9143988]] +LM1PAK1,PROJCS["LM1PAK1",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",32.5],PARAMETER["central_meridian",68],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",2743196.4],PARAMETER["false_northing",914398.8]] +LM1PAK1Y,PROJCS["LM1PAK1Y",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",32.5],PARAMETER["central_meridian",68],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000],UNIT["unnamed",0.9143988]] +LM1PAK2,PROJCS["LM1PAK2",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",74],PARAMETER["scale_factor",0.9987864077],PARAMETER["false_easting",2743196.4],PARAMETER["false_northing",914398.8]] +LM1PAK2Y,PROJCS["LM1PAK2Y",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",26],PARAMETER["central_meridian",74],PARAMETER["scale_factor",0.998786408],PARAMETER["false_easting",3000000],PARAMETER["false_northing",1000000],UNIT["unnamed",0.9143988]] +LM1PB1D,PROJCS["LM1PB1D",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",2.3372083],PARAMETER["scale_factor",0.999877341],PARAMETER["false_easting",600000],PARAMETER["false_northing",1200000]] +LM1PB1G,PROJCS["LM1PB1G",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",49.5],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.999877341],PARAMETER["false_easting",600000],PARAMETER["false_northing",1200000]] +LM1POL,PROJCS["LM1POL",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",52],PARAMETER["central_meridian",19],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000]] +LM1ROM,PROJCS["LM1ROM",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",44.7916667],PARAMETER["central_meridian",9.000000000000002],PARAMETER["scale_factor",1],PARAMETER["false_easting",2000000],PARAMETER["false_northing",2000000]] +LM1SHAB,PROJCS["LM1SHAB",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",15.4],PARAMETER["central_meridian",47.0355556],PARAMETER["scale_factor",1],PARAMETER["false_easting",1704346.3],PARAMETER["false_northing",8718549.7]] +LM1SPAIN,PROJCS["LM1SPAIN",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",-3.687373899999992],PARAMETER["scale_factor",0.9988085293],PARAMETER["false_easting",600000],PARAMETER["false_northing",600000]] +LM1SPANM,PROJCS["LM1SPANM",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.998808529],PARAMETER["false_easting",600000],PARAMETER["false_northing",600000]] +LM1SYRSD,PROJCS["LM1SYRSD",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",33.3],PARAMETER["central_meridian",36],PARAMETER["scale_factor",0.999625769],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1SYRSG,PROJCS["LM1SYRSG",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",33.3],PARAMETER["central_meridian",36],PARAMETER["scale_factor",0.999625769],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1TUNND,PROJCS["LM1TUNND",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",36],PARAMETER["central_meridian",9.899999999999995],PARAMETER["scale_factor",0.999625544],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1TUNSD,PROJCS["LM1TUNSD",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",33.3],PARAMETER["central_meridian",9.899999999999995],PARAMETER["scale_factor",0.999625769],PARAMETER["false_easting",500000],PARAMETER["false_northing",300000]] +LM1TURK,PROJCS["LM1TURK",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",27.4],PARAMETER["scale_factor",1],PARAMETER["false_easting",2000000],PARAMETER["false_northing",2000000]] +LM1USSR,PROJCS["LM1USSR",PROJECTION["Lambert_Conformal_Conic_1SP"],PARAMETER["latitude_of_origin",44],PARAMETER["central_meridian",38],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2AF113,PROJCS["LM2AF113",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.5482083],PARAMETER["central_meridian",9.000000000000002],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AF114,PROJCS["LM2AF114",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.5482083],PARAMETER["central_meridian",27],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AF72,PROJCS["LM2AF72",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32],PARAMETER["standard_parallel_2",64],PARAMETER["latitude_of_origin",48.8942353],PARAMETER["central_meridian",-8.999999999999959],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AF92,PROJCS["LM2AF92",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.5482083],PARAMETER["central_meridian",-8.999999999999959],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AF93,PROJCS["LM2AF93",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.5482083],PARAMETER["central_meridian",9.000000000000002],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AF94,PROJCS["LM2AF94",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.5482083],PARAMETER["central_meridian",27],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AF95,PROJCS["LM2AF95",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.5482083],PARAMETER["central_meridian",45],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2AFE,PROJCS["LM2AFE",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40],PARAMETER["standard_parallel_2",-10],PARAMETER["latitude_of_origin",15.5397257],PARAMETER["central_meridian",100],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2AFSH,PROJCS["LM2AFSH",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-10],PARAMETER["standard_parallel_2",-29.99999999999995],PARAMETER["latitude_of_origin",-20.10980229999997],PARAMETER["central_meridian",30],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2ALG,PROJCS["LM2ALG",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",22],PARAMETER["standard_parallel_2",34],PARAMETER["latitude_of_origin",28.0571556],PARAMETER["central_meridian",0],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2ANT1,PROJCS["LM2ANT1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-82.50000000599761],PARAMETER["standard_parallel_2",-81.49999997849238],PARAMETER["latitude_of_origin",-83.49999997620704],PARAMETER["central_meridian",-105.0000000232594],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2ARAB,PROJCS["LM2ARAB",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33],PARAMETER["standard_parallel_2",17],PARAMETER["latitude_of_origin",25.0895279],PARAMETER["central_meridian",47],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2ARAB2,LOCAL_CS["LM2ARAB2 - (unsupported)"] +LM2AREA1,PROJCS["LM2AREA1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40],PARAMETER["standard_parallel_2",24],PARAMETER["latitude_of_origin",32.1197536],PARAMETER["central_meridian",117],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2AREA2,PROJCS["LM2AREA2",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",24],PARAMETER["standard_parallel_2",4],PARAMETER["latitude_of_origin",14.0752451],PARAMETER["central_meridian",110],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2AREA3,PROJCS["LM2AREA3",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",-15.99999999999996],PARAMETER["latitude_of_origin",-6.031738599999985],PARAMETER["central_meridian",115],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2ARKNF,PROJCS["LM2ARKNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.2333333],PARAMETER["standard_parallel_2",34.9333333],PARAMETER["latitude_of_origin",35.5842285],PARAMETER["central_meridian",-91.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",455289.01],UNIT["US Foot",0.30480061]] +LM2ARKNM,PROJCS["LM2ARKNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.2333333],PARAMETER["standard_parallel_2",34.9333333],PARAMETER["latitude_of_origin",35.5842283],PARAMETER["central_meridian",-91.99999999999997],PARAMETER["false_easting",400000],PARAMETER["false_northing",138776.13]] +LM2ARKSF,PROJCS["LM2ARKSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.7666667],PARAMETER["standard_parallel_2",33.3],PARAMETER["latitude_of_origin",34.0344096],PARAMETER["central_meridian",-91.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",497685.06],UNIT["US Foot",0.30480061]] +LM2ARKSM,PROJCS["LM2ARKSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.7666667],PARAMETER["standard_parallel_2",33.3],PARAMETER["latitude_of_origin",34.0344094],PARAMETER["central_meridian",-91.99999999999997],PARAMETER["false_easting",400000],PARAMETER["false_northing",551699.26]] +LM2ASEAN,PROJCS["LM2ASEAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",8],PARAMETER["standard_parallel_2",22],PARAMETER["latitude_of_origin",15.0393768],PARAMETER["central_meridian",110],PARAMETER["false_easting",5000000],PARAMETER["false_northing",5000000]] +LM2ASIA,PROJCS["LM2ASIA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",65],PARAMETER["standard_parallel_2",37],PARAMETER["latitude_of_origin",51.7530074],PARAMETER["central_meridian",100],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2AUST,PROJCS["LM2AUST",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-10],PARAMETER["standard_parallel_2",-39.99999999999994],PARAMETER["latitude_of_origin",-25.32172549999997],PARAMETER["central_meridian",140],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2AZERB,PROJCS["LM2AZERB",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39],PARAMETER["standard_parallel_2",41],PARAMETER["latitude_of_origin",40.0024798],PARAMETER["central_meridian",48],PARAMETER["false_easting",5000000],PARAMETER["false_northing",5000000]] +LM2BAREN,PROJCS["LM2BAREN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",70],PARAMETER["standard_parallel_2",80],PARAMETER["latitude_of_origin",75.2834933],PARAMETER["central_meridian",20],PARAMETER["false_easting",2000000],PARAMETER["false_northing",1000000]] +LM2BELG,PROJCS["LM2BELG",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",51.1666667],PARAMETER["standard_parallel_2",49.8333333],PARAMETER["latitude_of_origin",50.5015857],PARAMETER["central_meridian",4.3569397],PARAMETER["false_easting",150000],PARAMETER["false_northing",132159.2]] +LM2BELG72,PROJCS["LM2BELG72",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",51.16666723333334],PARAMETER["standard_parallel_2",49.8333339],PARAMETER["latitude_of_origin",90],PARAMETER["central_meridian",4.367486666666665],PARAMETER["false_easting",150000.013],PARAMETER["false_northing",5400088.438]] +LM2BKSEA,PROJCS["LM2BKSEA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45],PARAMETER["standard_parallel_2",41],PARAMETER["latitude_of_origin",43.0110159],PARAMETER["central_meridian",35],PARAMETER["false_easting",2000000],PARAMETER["false_northing",1000000]] +LM2BLACK,PROJCS["LM2BLACK",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37],PARAMETER["standard_parallel_2",65],PARAMETER["latitude_of_origin",51.7530393],PARAMETER["central_meridian",39],PARAMETER["false_easting",5000000],PARAMETER["false_northing",5000000]] +LM2BLCKS,PROJCS["LM2BLCKS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",43.0027521],PARAMETER["central_meridian",36],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2BLKSE,PROJCS["LM2BLKSE",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.3333333],PARAMETER["standard_parallel_2",46.6666667],PARAMETER["latitude_of_origin",44.020285],PARAMETER["central_meridian",35],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2BNOR,PROJCS["LM2BNOR",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",69],PARAMETER["standard_parallel_2",60],PARAMETER["latitude_of_origin",64.6256029],PARAMETER["central_meridian",11.5],PARAMETER["false_easting",0],PARAMETER["false_northing",13960.37]] +LM2BOF,PROJCS["LM2BOF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36],PARAMETER["standard_parallel_2",52.8],PARAMETER["latitude_of_origin",44.6069094],PARAMETER["central_meridian",4.499999999999997],PARAMETER["false_easting",200000],PARAMETER["false_northing",0]] +LM2BURMA,PROJCS["LM2BURMA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",11.5],PARAMETER["standard_parallel_2",24],PARAMETER["latitude_of_origin",17.7874284],PARAMETER["central_meridian",96],PARAMETER["false_easting",2000000],PARAMETER["false_northing",3004117.66]] +LM2CAL1F,PROJCS["LM2CAL1F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.6666667],PARAMETER["standard_parallel_2",40],PARAMETER["latitude_of_origin",40.8351064],PARAMETER["central_meridian",-122],PARAMETER["false_easting",2000000],PARAMETER["false_northing",547077.92],UNIT["US Foot",0.30480061]] +LM2CAL2F,PROJCS["LM2CAL2F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.8333333],PARAMETER["standard_parallel_2",38.3333333],PARAMETER["latitude_of_origin",39.0846842],PARAMETER["central_meridian",-122],PARAMETER["false_easting",2000000],PARAMETER["false_northing",516417.19],UNIT["US Foot",0.30480061]] +LM2CAL3F,PROJCS["LM2CAL3F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.4333333],PARAMETER["standard_parallel_2",37.0666667],PARAMETER["latitude_of_origin",37.7510696],PARAMETER["central_meridian",-120.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",455516.16],UNIT["US Foot",0.30480061]] +LM2CAL4F,PROJCS["LM2CAL4F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.25],PARAMETER["standard_parallel_2",36],PARAMETER["latitude_of_origin",36.6258595],PARAMETER["central_meridian",-119],PARAMETER["false_easting",2000000],PARAMETER["false_northing",470526.84],UNIT["US Foot",0.30480061]] +LM2CAL4M,PROJCS["LM2CAL4M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.25],PARAMETER["standard_parallel_2",36],PARAMETER["latitude_of_origin",36.6258593],PARAMETER["central_meridian",-119],PARAMETER["false_easting",2000000],PARAMETER["false_northing",643420.49]] +LM2CAL5F,PROJCS["LM2CAL5F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.4666667],PARAMETER["standard_parallel_2",34.0333333],PARAMETER["latitude_of_origin",34.7510555],PARAMETER["central_meridian",-118],PARAMETER["false_easting",2000000],PARAMETER["false_northing",455278.16],UNIT["US Foot",0.30480061]] +LM2CAL5M,PROJCS["LM2CAL5M",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.4666667],PARAMETER["standard_parallel_2",34.0333333],PARAMETER["latitude_of_origin",34.7510553],PARAMETER["central_meridian",-118],PARAMETER["false_easting",2000000],PARAMETER["false_northing",638773.03]] +LM2CAL6F,PROJCS["LM2CAL6F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.8833333],PARAMETER["standard_parallel_2",32.7833333],PARAMETER["latitude_of_origin",33.3339231],PARAMETER["central_meridian",-116.25],PARAMETER["false_easting",2000000],PARAMETER["false_northing",424696.28],UNIT["US Foot",0.30480061]] +LM2CAL7F,PROJCS["LM2CAL7F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.4166667],PARAMETER["standard_parallel_2",33.8666667],PARAMETER["latitude_of_origin",34.1418186],PARAMETER["central_meridian",-118.3333333],PARAMETER["false_easting",4186692.58],PARAMETER["false_northing",4164014.63],UNIT["US Foot",0.30480061]] +LM2CAMER,PROJCS["LM2CAMER",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",25],PARAMETER["standard_parallel_2",5],PARAMETER["latitude_of_origin",15.0808559],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CAN,PROJCS["LM2CAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",77],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",64.2621819],PARAMETER["central_meridian",-99.99999999999996],PARAMETER["false_easting",2500000],PARAMETER["false_northing",2500000]] +LM2CAN60,PROJCS["LM2CAN60",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",77],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",64.2621819],PARAMETER["central_meridian",-59.99999999999994],PARAMETER["false_easting",2500000],PARAMETER["false_northing",2500000]] +LM2CAN78,PROJCS["LM2CAN78",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",64.26218189999996],PARAMETER["standard_parallel_2",77],PARAMETER["latitude_of_origin",49.00000000000002],PARAMETER["central_meridian",-77.99999999999997],PARAMETER["false_easting",0],PARAMETER["false_northing",8250000]] +LM2CBRAZ,PROJCS["LM2CBRAZ",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-4.999999999999972],PARAMETER["standard_parallel_2",-18.99999999999996],PARAMETER["latitude_of_origin",-12.03125459999998],PARAMETER["central_meridian",-54.99999999999998],PARAMETER["false_easting",0],PARAMETER["false_northing",-3431.9]] +LM2CEGYP,PROJCS["LM2CEGYP",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",29.25],PARAMETER["standard_parallel_2",31.75],PARAMETER["latitude_of_origin",30.5027312],PARAMETER["central_meridian",29.5],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CFRAN,PROJCS["LM2CFRAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",47.012648],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",1405.23]] +LM2CHBON,PROJCS["LM2CHBON",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38],PARAMETER["standard_parallel_2",41],PARAMETER["latitude_of_origin",39.5054838],PARAMETER["central_meridian",121],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CHECS,PROJCS["LM2CHECS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27],PARAMETER["standard_parallel_2",31],PARAMETER["latitude_of_origin",29.0065858],PARAMETER["central_meridian",123.5],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CHEOF,PROJCS["LM2CHEOF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",25],PARAMETER["standard_parallel_2",37],PARAMETER["latitude_of_origin",31.0645115],PARAMETER["central_meridian",122.5],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CHIN,PROJCS["LM2CHIN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35],PARAMETER["standard_parallel_2",20],PARAMETER["latitude_of_origin",27.5876688],PARAMETER["central_meridian",105],PARAMETER["false_easting",2500000],PARAMETER["false_northing",2509632.22]] +LM2CHINA,PROJCS["LM2CHINA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35],PARAMETER["standard_parallel_2",-4.999999999999972],PARAMETER["latitude_of_origin",15.3356381],PARAMETER["central_meridian",125],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CHRUS,PROJCS["LM2CHRUS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",74],PARAMETER["standard_parallel_2",42],PARAMETER["latitude_of_origin",59.3395467],PARAMETER["central_meridian",130],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2CHYS,PROJCS["LM2CHYS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33],PARAMETER["standard_parallel_2",36],PARAMETER["latitude_of_origin",34.5045819],PARAMETER["central_meridian",122],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2CM693,PROJCS["LM2CM693",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",77],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",64.2621819],PARAMETER["central_meridian",-69.49999999220437],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2COLCF,PROJCS["LM2COLCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.75],PARAMETER["standard_parallel_2",38.45],PARAMETER["latitude_of_origin",39.1010152],PARAMETER["central_meridian",-105.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",461675.32],UNIT["US Foot",0.30480061]] +LM2COLNF,PROJCS["LM2COLNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.7166667],PARAMETER["standard_parallel_2",40.7833333],PARAMETER["latitude_of_origin",40.2507116],PARAMETER["central_meridian",-105.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",334169.85],UNIT["US Foot",0.30480061]] +LM2COLSF,PROJCS["LM2COLSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.4333333],PARAMETER["standard_parallel_2",37.2333333],PARAMETER["latitude_of_origin",37.8341604],PARAMETER["central_meridian",-105.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",425097.72],UNIT["US Foot",0.30480061]] +LM2COLUM,PROJCS["LM2COLUM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",6.6666667],PARAMETER["standard_parallel_2",1.3333333],PARAMETER["latitude_of_origin",4.001486399999998],PARAMETER["central_meridian",-72.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2COMAN,PROJCS["LM2COMAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",19],PARAMETER["standard_parallel_2",25],PARAMETER["latitude_of_origin",22.0108377],PARAMETER["central_meridian",56],PARAMETER["false_easting",0],PARAMETER["false_northing",1198.34]] +LM2CONNF,PROJCS["LM2CONNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.8666667],PARAMETER["standard_parallel_2",41.2],PARAMETER["latitude_of_origin",41.533624],PARAMETER["central_meridian",-72.74999999999994],PARAMETER["false_easting",600000],PARAMETER["false_northing",255156.68],UNIT["US Foot",0.30480061]] +LM2CONNM,PROJCS["LM2CONNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.8666667],PARAMETER["standard_parallel_2",41.2],PARAMETER["latitude_of_origin",41.5336239],PARAMETER["central_meridian",-72.74999999999994],PARAMETER["false_easting",304800.61],PARAMETER["false_northing",230173.41]] +LM2CSPN,PROJCS["LM2CSPN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38],PARAMETER["standard_parallel_2",42.5],PARAMETER["latitude_of_origin",40.2626746],PARAMETER["central_meridian",-2.999999999999949],PARAMETER["false_easting",0],PARAMETER["false_northing",29145.17]] +LM2EE,PROJCS["LM2EE",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36],PARAMETER["standard_parallel_2",43],PARAMETER["latitude_of_origin",39.5299114],PARAMETER["central_meridian",66],PARAMETER["false_easting",2000000],PARAMETER["false_northing",1502329.69]] +LM2EGYPT,PROJCS["LM2EGYPT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",23.6666667],PARAMETER["standard_parallel_2",30.6666667],PARAMETER["latitude_of_origin",27.1853739],PARAMETER["central_meridian",31],PARAMETER["false_easting",620681.47],PARAMETER["false_northing",559230.78]] +LM2EUNDX,PROJCS["LM2EUNDX",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",52],PARAMETER["standard_parallel_2",36],PARAMETER["latitude_of_origin",44.1848032],PARAMETER["central_meridian",12],PARAMETER["false_easting",3000000],PARAMETER["false_northing",2000000]] +LM2EURO,PROJCS["LM2EURO",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37],PARAMETER["standard_parallel_2",65],PARAMETER["latitude_of_origin",51.7530393],PARAMETER["central_meridian",28],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2FKLDS,PROJCS["LM2FKLDS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-34.99999999999997],PARAMETER["standard_parallel_2",-54.99999999999998],PARAMETER["latitude_of_origin",-45.30145409999996],PARAMETER["central_meridian",-49.99999999999994],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2FLANF,PROJCS["LM2FLANF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.75],PARAMETER["standard_parallel_2",29.5833333],PARAMETER["latitude_of_origin",30.1672537],PARAMETER["central_meridian",-84.49999999999996],PARAMETER["false_easting",2000000],PARAMETER["false_northing",424481.59],UNIT["US Foot",0.30480061]] +LM2FRANC,PROJCS["LM2FRANC",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.89891888888889],PARAMETER["standard_parallel_2",47.69601444444444],PARAMETER["latitude_of_origin",46.80000000000000],PARAMETER["central_meridian",2.337229169999754],PARAMETER["false_easting",600000],PARAMETER["false_northing",2200000]] +LM2GULF,PROJCS["LM2GULF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31],PARAMETER["standard_parallel_2",27],PARAMETER["latitude_of_origin",29.0065873],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",3500000],PARAMETER["false_northing",2551152.36],UNIT["US Foot",0.30480061]] +LM2H6,PROJCS["LM2H6",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",25.3333333],PARAMETER["standard_parallel_2",30.6666667],PARAMETER["latitude_of_origin",28.0112409],PARAMETER["central_meridian",47],PARAMETER["false_easting",0],PARAMETER["false_northing",1244.39]] +LM2IND76,PROJCS["LM2IND76",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32],PARAMETER["standard_parallel_2",64],PARAMETER["latitude_of_origin",48.8939963],PARAMETER["central_meridian",63],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2IND77,PROJCS["LM2IND77",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32],PARAMETER["standard_parallel_2",64],PARAMETER["latitude_of_origin",48.8939963],PARAMETER["central_meridian",81],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2IND78,PROJCS["LM2IND78",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32],PARAMETER["standard_parallel_2",64],PARAMETER["latitude_of_origin",48.8939963],PARAMETER["central_meridian",98.99999999999997],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2IND96,PROJCS["LM2IND96",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.548179],PARAMETER["central_meridian",63],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2IND97,PROJCS["LM2IND97",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.548179],PARAMETER["central_meridian",81],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2IND98,PROJCS["LM2IND98",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",4],PARAMETER["standard_parallel_2",21],PARAMETER["latitude_of_origin",12.548179],PARAMETER["central_meridian",98.99999999999997],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2INDIA,PROJCS["LM2INDIA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",25],PARAMETER["standard_parallel_2",1],PARAMETER["latitude_of_origin",13.1008489],PARAMETER["central_meridian",60],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2IOWNF,PROJCS["LM2IOWNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43.2666667],PARAMETER["standard_parallel_2",42.0666667],PARAMETER["latitude_of_origin",42.6676461],PARAMETER["central_meridian",-93.49999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",425511.73],UNIT["US Foot",0.30480061]] +LM2IOWSF,PROJCS["LM2IOWSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.7833333],PARAMETER["standard_parallel_2",40.6166667],PARAMETER["latitude_of_origin",41.2008799],PARAMETER["central_meridian",-93.49999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",437511.38],UNIT["US Foot",0.30480061]] +LM2IRAN,PROJCS["LM2IRAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",26],PARAMETER["standard_parallel_2",37],PARAMETER["latitude_of_origin",31.5552453],PARAMETER["central_meridian",54],PARAMETER["false_easting",2000000],PARAMETER["false_northing",2000000]] +LM2JEBCO,PROJCS["LM2JEBCO",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",60],PARAMETER["standard_parallel_2",62.1666667],PARAMETER["latitude_of_origin",61.0895556],PARAMETER["central_meridian",97],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2JUNGB,PROJCS["LM2JUNGB",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44],PARAMETER["standard_parallel_2",48],PARAMETER["latitude_of_origin",46.0122162],PARAMETER["central_meridian",86.99999999999997],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2KALIM,PROJCS["LM2KALIM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",1.3333333],PARAMETER["standard_parallel_2",6.6666667],PARAMETER["latitude_of_origin",4.0014861],PARAMETER["central_meridian",117],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000164.14]] +LM2KANNF,PROJCS["LM2KANNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.7833333],PARAMETER["standard_parallel_2",38.7166667],PARAMETER["latitude_of_origin",39.2506871],PARAMETER["central_meridian",-97.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",334102.73],UNIT["US Foot",0.30480061]] +LM2KANNM,PROJCS["LM2KANNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.7833333],PARAMETER["standard_parallel_2",38.7166667],PARAMETER["latitude_of_origin",39.2506869],PARAMETER["central_meridian",-97.99999999999997],PARAMETER["false_easting",400000],PARAMETER["false_northing",101836.74]] +LM2KYNFT,PROJCS["LM2KYNFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.9666667],PARAMETER["standard_parallel_2",38.9666667],PARAMETER["latitude_of_origin",38.4672541],PARAMETER["central_meridian",-84.24999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",352230.83],UNIT["US Foot",0.30480061]] +LM2KYNM,PROJCS["LM2KYNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.9666667],PARAMETER["standard_parallel_2",37.9666667],PARAMETER["latitude_of_origin",38.467254],PARAMETER["central_meridian",-84.24999999999994],PARAMETER["false_easting",500000],PARAMETER["false_northing",107362.48]] +LM2KYSFT,PROJCS["LM2KYSFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.7333333],PARAMETER["standard_parallel_2",37.9333333],PARAMETER["latitude_of_origin",37.3341458],PARAMETER["central_meridian",-85.74999999999996],PARAMETER["false_easting",2000000],PARAMETER["false_northing",364374.61],UNIT["US Foot",0.30480061]] +LM2LANDS,PROJCS["LM2LANDS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37],PARAMETER["standard_parallel_2",65],PARAMETER["latitude_of_origin",51.7530393],PARAMETER["central_meridian",30],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2LANFT,PROJCS["LM2LANFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31.1666667],PARAMETER["standard_parallel_2",32.6666667],PARAMETER["latitude_of_origin",31.9177058],PARAMETER["central_meridian",-92.49999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",455060.71],UNIT["US Foot",0.30480061]] +LM2LANM,PROJCS["LM2LANM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.6666667],PARAMETER["standard_parallel_2",31.1666667],PARAMETER["latitude_of_origin",31.9177056],PARAMETER["central_meridian",-92.49999999999994],PARAMETER["false_easting",1000000],PARAMETER["false_northing",157187.89]] +LM2LAOFT,PROJCS["LM2LAOFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",26.1666667],PARAMETER["standard_parallel_2",27.8333333],PARAMETER["latitude_of_origin",27.0010515],PARAMETER["central_meridian",-91.33333329999992],PARAMETER["false_easting",2000000],PARAMETER["false_northing",485012.86],UNIT["US Foot",0.30480061]] +LM2LASFT,PROJCS["LM2LASFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",29.3],PARAMETER["standard_parallel_2",30.7],PARAMETER["latitude_of_origin",30.0008397],PARAMETER["central_meridian",-91.33333329999992],PARAMETER["false_easting",2000000],PARAMETER["false_northing",485164],UNIT["US Foot",0.30480061]] +LM2LASM,PROJCS["LM2LASM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.7],PARAMETER["standard_parallel_2",29.3],PARAMETER["latitude_of_origin",30.0008395],PARAMETER["central_meridian",-91.33333329999992],PARAMETER["false_easting",1000000],PARAMETER["false_northing",166359.47]] +LM2MARYF,PROJCS["LM2MARYF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.3],PARAMETER["standard_parallel_2",39.45],PARAMETER["latitude_of_origin",38.8757881],PARAMETER["central_meridian",-76.99999999999994],PARAMETER["false_easting",800000],PARAMETER["false_northing",379638.15],UNIT["US Foot",0.30480061]] +LM2MASIM,PROJCS["LM2MASIM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.4833333],PARAMETER["standard_parallel_2",41.2833333],PARAMETER["latitude_of_origin",41.3833594],PARAMETER["central_meridian",-70.49999999999996],PARAMETER["false_easting",500000],PARAMETER["false_northing",42575.23]] +LM2MASMF,PROJCS["LM2MASMF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.7166667],PARAMETER["standard_parallel_2",42.6833333],PARAMETER["latitude_of_origin",42.2006254],PARAMETER["central_meridian",-71.49999999999996],PARAMETER["false_easting",600000],PARAMETER["false_northing",437502.72],UNIT["US Foot",0.30480061]] +LM2ME,PROJCS["LM2ME",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36],PARAMETER["standard_parallel_2",20],PARAMETER["latitude_of_origin",28.102018],PARAMETER["central_meridian",50],PARAMETER["false_easting",3000000],PARAMETER["false_northing",2011195.53]] +LM2ME1,PROJCS["LM2ME1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35],PARAMETER["standard_parallel_2",15],PARAMETER["latitude_of_origin",25.1405776],PARAMETER["central_meridian",50],PARAMETER["false_easting",3000000],PARAMETER["false_northing",2011195.53]] +LM2MEDIT,PROJCS["LM2MEDIT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.5],PARAMETER["standard_parallel_2",32.5],PARAMETER["latitude_of_origin",37.5569977],PARAMETER["central_meridian",15],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2MICCF,PROJCS["LM2MICCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.1833333],PARAMETER["standard_parallel_2",45.7],PARAMETER["latitude_of_origin",44.943359],PARAMETER["central_meridian",-84.33333329999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",593030.52],UNIT["US Foot",0.30480061]] +LM2MICNF,PROJCS["LM2MICNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.4833333],PARAMETER["standard_parallel_2",47.0833333],PARAMETER["latitude_of_origin",46.2853059],PARAMETER["central_meridian",-86.99999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",547682.99],UNIT["US Foot",0.30480061]] +LM2MICSF,PROJCS["LM2MICSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.1],PARAMETER["standard_parallel_2",43.6666667],PARAMETER["latitude_of_origin",42.8850154],PARAMETER["central_meridian",-84.33333329999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",504729.43],UNIT["US Foot",0.30480061]] +LM2MINCF,PROJCS["LM2MINCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.6166667],PARAMETER["standard_parallel_2",47.05],PARAMETER["latitude_of_origin",46.334919],PARAMETER["central_meridian",-94.24999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",486777.48],UNIT["US Foot",0.30480061]] +LM2MINNF,PROJCS["LM2MINNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.0333333],PARAMETER["standard_parallel_2",48.6333333],PARAMETER["latitude_of_origin",47.8354144],PARAMETER["central_meridian",-93.09999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",487078.53],UNIT["US Foot",0.30480061]] +LM2MINSF,PROJCS["LM2MINSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43.7833333],PARAMETER["standard_parallel_2",45.2166667],PARAMETER["latitude_of_origin",44.5014886],PARAMETER["central_meridian",-93.99999999999993],PARAMETER["false_easting",2000000],PARAMETER["false_northing",547343.48],UNIT["US Foot",0.30480061]] +LM2MOCFT,PROJCS["LM2MOCFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.45],PARAMETER["standard_parallel_2",47.8833333],PARAMETER["latitude_of_origin",47.1682986],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",486866.43],UNIT["US Foot",0.30480061]] +LM2MON,PROJCS["LM2MON",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",48],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",46.0122162],PARAMETER["central_meridian",104],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2MONFT,PROJCS["LM2MONFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.85],PARAMETER["standard_parallel_2",48.7166667],PARAMETER["latitude_of_origin",48.2839534],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",468377.04],UNIT["US Foot",0.30480061]] +LM2MOSFT,PROJCS["LM2MOSFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.8666667],PARAMETER["standard_parallel_2",46.4],PARAMETER["latitude_of_origin",45.6351048],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",596169.89],UNIT["US Foot",0.30480061]] +LM2MTCFT,PROJCS["LM2MTCFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.8833333],PARAMETER["standard_parallel_2",46.45],PARAMETER["latitude_of_origin",47.1682986],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",486866.43],UNIT["US Foot",0.30480061]] +LM2MTM,PROJCS["LM2MTM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",45],PARAMETER["latitude_of_origin",47.0126454],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",600000],PARAMETER["false_northing",306982.36]] +LM2MTNFT,PROJCS["LM2MTNFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",48.7166667],PARAMETER["standard_parallel_2",47.85],PARAMETER["latitude_of_origin",48.2839534],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",468377.04],UNIT["US Foot",0.30480061]] +LM2MTSFT,PROJCS["LM2MTSFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.4],PARAMETER["standard_parallel_2",44.8666667],PARAMETER["latitude_of_origin",45.6351048],PARAMETER["central_meridian",-109.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",596169.89],UNIT["US Foot",0.30480061]] +LM2NBRUN,PROJCS["LM2NBRUN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45],PARAMETER["standard_parallel_2",33],PARAMETER["latitude_of_origin",39.0867598],PARAMETER["central_meridian",-66.5],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2NBSEA,PROJCS["LM2NBSEA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.3333333],PARAMETER["standard_parallel_2",46.6666667],PARAMETER["latitude_of_origin",44.0202838],PARAMETER["central_meridian",38],PARAMETER["false_easting",5000000],PARAMETER["false_northing",5000000]] +LM2NCAFT,PROJCS["LM2NCAFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",34.3333333],PARAMETER["standard_parallel_2",36.1666667],PARAMETER["latitude_of_origin",35.2517589],PARAMETER["central_meridian",-78.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",546538.78],UNIT["US Foot",0.30480061]] +LM2NDNFT,PROJCS["LM2NDNFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.4333333],PARAMETER["standard_parallel_2",48.7333333],PARAMETER["latitude_of_origin",48.084719],PARAMETER["central_meridian",-100.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",395667.3],UNIT["US Foot",0.30480061]] +LM2NDNM,PROJCS["LM2NDNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",48.7333333],PARAMETER["standard_parallel_2",47.4333333],PARAMETER["latitude_of_origin",48.0847188],PARAMETER["central_meridian",-100.5],PARAMETER["false_easting",600000],PARAMETER["false_northing",120599.98]] +LM2NDSFT,PROJCS["LM2NDSFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.1833333],PARAMETER["standard_parallel_2",47.4833333],PARAMETER["latitude_of_origin",46.8346604],PARAMETER["central_meridian",-100.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",425949.37],UNIT["US Foot",0.30480061]] +LM2NEBM,PROJCS["LM2NEBM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43],PARAMETER["standard_parallel_2",40],PARAMETER["latitude_of_origin",41.5058803],PARAMETER["central_meridian",-99.99999999999996],PARAMETER["false_easting",500000],PARAMETER["false_northing",185694.92]] +LM2NEBNF,PROJCS["LM2NEBNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",41.85],PARAMETER["standard_parallel_2",42.8166667],PARAMETER["latitude_of_origin",42.3339616],PARAMETER["central_meridian",-99.99999999999996],PARAMETER["false_easting",2000000],PARAMETER["false_northing",364631.59],UNIT["US Foot",0.30480061]] +LM2NEBSF,PROJCS["LM2NEBSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.2833333],PARAMETER["standard_parallel_2",41.7166667],PARAMETER["latitude_of_origin",41.001319],PARAMETER["central_meridian",-99.49999999999993],PARAMETER["false_easting",2000000],PARAMETER["false_northing",486220.86],UNIT["US Foot",0.30480061]] +LM2NEPAL,PROJCS["LM2NEPAL",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30],PARAMETER["standard_parallel_2",27],PARAMETER["latitude_of_origin",28.5036278],PARAMETER["central_meridian",84],PARAMETER["false_easting",2000000],PARAMETER["false_northing",10000000]] +LM2NFA,PROJCS["LM2NFA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",46.0275217],PARAMETER["central_meridian",-45.99999999999996],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000]] +LM2NFB,PROJCS["LM2NFB",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",43],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",46.0275217],PARAMETER["central_meridian",-51],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000]] +LM2NHEM,PROJCS["LM2NHEM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",60],PARAMETER["standard_parallel_2",30],PARAMETER["latitude_of_origin",45.6982614],PARAMETER["central_meridian",20],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2NSEA,PROJCS["LM2NSEA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",58.8333333],PARAMETER["standard_parallel_2",54.1666667],PARAMETER["latitude_of_origin",56.52417129999998],PARAMETER["central_meridian",0],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2NSW1,PROJCS["LM2NSW1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-30],PARAMETER["standard_parallel_2",-36],PARAMETER["latitude_of_origin",-36],PARAMETER["central_meridian",147],PARAMETER["false_easting",700000],PARAMETER["false_northing",8200000]] +LM2NSW2,PROJCS["LM2NSW2",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-32.66666666666664],PARAMETER["standard_parallel_2",-35.33333333333334],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["false_easting",1000000],PARAMETER["false_northing",10000000]] +LM2NZN,PROJCS["LM2NZN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-33.33333329999995],PARAMETER["standard_parallel_2",-38.66666669999996],PARAMETER["latitude_of_origin",-36.01531539999996],PARAMETER["central_meridian",175],PARAMETER["false_easting",0],PARAMETER["false_northing",-1697.5]] +LM2NZS,PROJCS["LM2NZS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-41.3333333],PARAMETER["standard_parallel_2",-46.66666669999994],PARAMETER["latitude_of_origin",-44.02028839999996],PARAMETER["central_meridian",171],PARAMETER["false_easting",0],PARAMETER["false_northing",-2251.83]] +LM2OHINF,PROJCS["LM2OHINF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.4333333],PARAMETER["standard_parallel_2",41.7],PARAMETER["latitude_of_origin",41.0676991],PARAMETER["central_meridian",-82.49999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",510419.83],UNIT["US Foot",0.30480061]] +LM2OHISF,PROJCS["LM2OHISF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.7333333],PARAMETER["standard_parallel_2",40.0333333],PARAMETER["latitude_of_origin",39.3843587],PARAMETER["central_meridian",-82.49999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",504195.18],UNIT["US Foot",0.30480061]] +LM2OKLNF,PROJCS["LM2OKLNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.5666667],PARAMETER["standard_parallel_2",36.7666667],PARAMETER["latitude_of_origin",36.1674458],PARAMETER["central_meridian",-97.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",424960.05],UNIT["US Foot",0.30480061]] +LM2OKLNM,PROJCS["LM2OKLNM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.7666667],PARAMETER["standard_parallel_2",35.5666667],PARAMETER["latitude_of_origin",36.1674456],PARAMETER["central_meridian",-97.99999999999997],PARAMETER["false_easting",600000],PARAMETER["false_northing",129531.44]] +LM2OKLSF,PROJCS["LM2OKLSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.9333333],PARAMETER["standard_parallel_2",35.2333333],PARAMETER["latitude_of_origin",34.5841963],PARAMETER["central_meridian",-97.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",455201.85],UNIT["US Foot",0.30480061]] +LM2OKLSM,PROJCS["LM2OKLSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",35.2333333],PARAMETER["standard_parallel_2",33.9333333],PARAMETER["latitude_of_origin",34.5841961],PARAMETER["central_meridian",-97.99999999999997],PARAMETER["false_easting",600000],PARAMETER["false_northing",138749.82]] +LM2ONH25,PROJCS["LM2ONH25",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",25.3333333],PARAMETER["standard_parallel_2",30.6666667],PARAMETER["latitude_of_origin",28.0112409],PARAMETER["central_meridian",-78.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",2889214.55]] +LM2ORENF,PROJCS["LM2ORENF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.3333333],PARAMETER["standard_parallel_2",46],PARAMETER["latitude_of_origin",45.1687263],PARAMETER["central_meridian",-120.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",547601.51],UNIT["US Foot",0.30480061]] +LM2ORESF,PROJCS["LM2ORESF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.3333333],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",43.1685891],PARAMETER["central_meridian",-120.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",547357.21],UNIT["US Foot",0.30480061]] +LM2OSTER,PROJCS["LM2OSTER",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",47.5072345],PARAMETER["central_meridian",14],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2PAK,PROJCS["LM2PAK",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27.49999998290061],PARAMETER["standard_parallel_2",26.00000002758644],PARAMETER["latitude_of_origin",28.99999999551055],PARAMETER["central_meridian",63.00000001395566],PARAMETER["false_easting",500000],PARAMETER["false_northing",1000000]] +LM2PANFT,PROJCS["LM2PANFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.8833333],PARAMETER["standard_parallel_2",41.95],PARAMETER["latitude_of_origin",41.4174077],PARAMETER["central_meridian",-77.74999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",455699.08],UNIT["US Foot",0.30480061]] +LM2PASFT,PROJCS["LM2PASFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.9333333],PARAMETER["standard_parallel_2",40.8],PARAMETER["latitude_of_origin",40.3671383],PARAMETER["central_meridian",-77.74999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",376593.83],UNIT["US Foot",0.30480061]] +LM2PRMB,PROJCS["LM2PRMB",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",24],PARAMETER["standard_parallel_2",18],PARAMETER["latitude_of_origin",21.0102961],PARAMETER["central_meridian",114],PARAMETER["false_easting",500000],PARAMETER["false_northing",501138.4]] +LM2PRV1F,PROJCS["LM2PRV1F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",18.4333333],PARAMETER["standard_parallel_2",18.0333333],PARAMETER["latitude_of_origin",18.2333726],PARAMETER["central_meridian",-66.43333329999994],PARAMETER["false_easting",500000],PARAMETER["false_northing",145256.89],UNIT["US Foot",0.30480061]] +LM2PRV2F,PROJCS["LM2PRV2F",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",18.4333333],PARAMETER["standard_parallel_2",18.0333333],PARAMETER["latitude_of_origin",18.2333726],PARAMETER["central_meridian",-66.43333329999994],PARAMETER["false_easting",500000],PARAMETER["false_northing",245256.89],UNIT["US Foot",0.30480061]] +LM2RUSS,PROJCS["LM2RUSS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",66],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",55.5285841],PARAMETER["central_meridian",96],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2RUSS1,PROJCS["LM2RUSS1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",54.6666667],PARAMETER["standard_parallel_2",49.3333333],PARAMETER["latitude_of_origin",52.0268006],PARAMETER["central_meridian",116],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2RUSS2,PROJCS["LM2RUSS2",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",54.6666667],PARAMETER["standard_parallel_2",49.3333333],PARAMETER["latitude_of_origin",52.0268006],PARAMETER["central_meridian",96],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2RUSS3,PROJCS["LM2RUSS3",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.6666667],PARAMETER["standard_parallel_2",41.3333333],PARAMETER["latitude_of_origin",44.0202838],PARAMETER["central_meridian",107],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2RUSS4,PROJCS["LM2RUSS4",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.6666667],PARAMETER["standard_parallel_2",41.3333333],PARAMETER["latitude_of_origin",44.0202838],PARAMETER["central_meridian",123],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2RUSS5,PROJCS["LM2RUSS5",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",46.6666667],PARAMETER["standard_parallel_2",41.3333333],PARAMETER["latitude_of_origin",44.0202838],PARAMETER["central_meridian",91],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2RUSS6,PROJCS["LM2RUSS6",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",64],PARAMETER["standard_parallel_2",32],PARAMETER["latitude_of_origin",48.8940765],PARAMETER["central_meridian",56],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2SCHIN,PROJCS["LM2SCHIN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",18],PARAMETER["standard_parallel_2",24],PARAMETER["latitude_of_origin",21.0102961],PARAMETER["central_meridian",114],PARAMETER["false_easting",500000],PARAMETER["false_northing",501138.4]] +LM2SCHNS,PROJCS["LM2SCHNS",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",10],PARAMETER["standard_parallel_2",18],PARAMETER["latitude_of_origin",14.0119194],PARAMETER["central_meridian",115],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2SCNFT,PROJCS["LM2SCNFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.7666667],PARAMETER["standard_parallel_2",34.9666667],PARAMETER["latitude_of_origin",34.3673961],PARAMETER["central_meridian",-81],PARAMETER["false_easting",2000000],PARAMETER["false_northing",497599.34],UNIT["US Foot",0.30480061]] +LM2SCSFT,PROJCS["LM2SCSFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.3333333],PARAMETER["standard_parallel_2",33.6666667],PARAMETER["latitude_of_origin",33.0008557],PARAMETER["central_meridian",-81],PARAMETER["false_easting",2000000],PARAMETER["false_northing",424761.1],UNIT["US Foot",0.30480061]] +LM2SDNFT,PROJCS["LM2SDNFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.4166667],PARAMETER["standard_parallel_2",45.6833333],PARAMETER["latitude_of_origin",45.0511848],PARAMETER["central_meridian",-99.99999999999996],PARAMETER["false_easting",2000000],PARAMETER["false_northing",443993.06],UNIT["US Foot",0.30480061]] +LM2SDSFT,PROJCS["LM2SDSFT",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.8333333],PARAMETER["standard_parallel_2",44.4],PARAMETER["latitude_of_origin",43.6183918],PARAMETER["central_meridian",-100.3333333],PARAMETER["false_easting",2000000],PARAMETER["false_northing",468361.68],UNIT["US Foot",0.30480061]] +LM2SEYCH,PROJCS["LM2SEYCH",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-1.999999999999966],PARAMETER["standard_parallel_2",-10.99999999999998],PARAMETER["latitude_of_origin",-6.50690739999996],PARAMETER["central_meridian",51],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +LM2SHAW,PROJCS["LM2SHAW",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",7],PARAMETER["standard_parallel_2",10],PARAMETER["latitude_of_origin",8.501003999999996],PARAMETER["central_meridian",106],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2SYRIA,PROJCS["LM2SYRIA",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",17],PARAMETER["standard_parallel_2",33],PARAMETER["latitude_of_origin",25.0895049],PARAMETER["central_meridian",48],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2TAIW,PROJCS["LM2TAIW",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",22.25],PARAMETER["standard_parallel_2",24.75],PARAMETER["latitude_of_origin",23.5020212],PARAMETER["central_meridian",120.5],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2TARIM,PROJCS["LM2TARIM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.5],PARAMETER["standard_parallel_2",41.6666667],PARAMETER["latitude_of_origin",39.0993748],PARAMETER["central_meridian",84],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2TIBET,PROJCS["LM2TIBET",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31.99999999781213],PARAMETER["standard_parallel_2",24.99999982819388],PARAMETER["latitude_of_origin",45.00000000014619],PARAMETER["central_meridian",84.999999988817],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2TIMAN,PROJCS["LM2TIMAN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",65.6666667],PARAMETER["standard_parallel_2",68.3333333],PARAMETER["latitude_of_origin",67.0122484],PARAMETER["central_meridian",56.5],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2TURK,PROJCS["LM2TURK",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.6666667],PARAMETER["standard_parallel_2",43.3333333],PARAMETER["latitude_of_origin",42.0047273],PARAMETER["central_meridian",28.9809583],PARAMETER["false_easting",0],PARAMETER["false_northing",524.95]] +LM2TURKG,PROJCS["LM2TURKG",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40],PARAMETER["standard_parallel_2",38],PARAMETER["latitude_of_origin",39.0023944],PARAMETER["central_meridian",35],PARAMETER["false_easting",1000000],PARAMETER["false_northing",0]] +LM2TURKY,PROJCS["LM2TURKY",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42],PARAMETER["standard_parallel_2",36],PARAMETER["latitude_of_origin",39.0215807],PARAMETER["central_meridian",35],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1003455.28]] +LM2TXCF,PROJCS["LM2TXCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.1166667],PARAMETER["standard_parallel_2",31.8833333],PARAMETER["latitude_of_origin",31.0013911],PARAMETER["central_meridian",-100.3333333],PARAMETER["false_easting",2000000],PARAMETER["false_northing",485417.56],UNIT["US Foot",0.30480061]] +LM2TXCM,PROJCS["LM2TXCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",31.8833333],PARAMETER["standard_parallel_2",30.1166667],PARAMETER["latitude_of_origin",31.0013908],PARAMETER["central_meridian",-100.3333333],PARAMETER["false_easting",700000],PARAMETER["false_northing",3147960.78]] +LM2TXNCF,PROJCS["LM2TXNCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",32.1333333],PARAMETER["standard_parallel_2",33.9666667],PARAMETER["latitude_of_origin",33.0516209],PARAMETER["central_meridian",-97.49999999999996],PARAMETER["false_easting",2000000],PARAMETER["false_northing",503845.05],UNIT["US Foot",0.30480061]] +LM2TXNCM,PROJCS["LM2TXNCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",33.9666667],PARAMETER["standard_parallel_2",32.1333333],PARAMETER["latitude_of_origin",33.0516206],PARAMETER["central_meridian",-98.49999999999994],PARAMETER["false_easting",600000],PARAMETER["false_northing",2153577.14]] +LM2TXSCF,PROJCS["LM2TXSCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",28.3833333],PARAMETER["standard_parallel_2",30.2833333],PARAMETER["latitude_of_origin",29.3348392],PARAMETER["central_meridian",-98.99999999999997],PARAMETER["false_easting",2000000],PARAMETER["false_northing",545930.94],UNIT["US Foot",0.30480061]] +LM2TXSCM,PROJCS["LM2TXSCM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.2833333],PARAMETER["standard_parallel_2",28.3833333],PARAMETER["latitude_of_origin",29.3348388],PARAMETER["central_meridian",-98.99999999999997],PARAMETER["false_easting",600000],PARAMETER["false_northing",4166406.43]] +LM2TXSF,PROJCS["LM2TXSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",26.1666667],PARAMETER["standard_parallel_2",27.8333333],PARAMETER["latitude_of_origin",27.0010515],PARAMETER["central_meridian",-98.49999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",485012.86],UNIT["US Foot",0.30480061]] +LM2TXSM,PROJCS["LM2TXSM",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",27.8333333],PARAMETER["standard_parallel_2",26.1666667],PARAMETER["latitude_of_origin",27.0010513],PARAMETER["central_meridian",-98.49999999999994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5147838.39]] +LM2UKN,PROJCS["LM2UKN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",52],PARAMETER["standard_parallel_2",57],PARAMETER["latitude_of_origin",54.5257722],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2USSR,PROJCS["LM2USSR",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",74],PARAMETER["standard_parallel_2",42],PARAMETER["latitude_of_origin",59.3395467],PARAMETER["central_meridian",105],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +LM2USSR1,PROJCS["LM2USSR1",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.5],PARAMETER["standard_parallel_2",51.5],PARAMETER["latitude_of_origin",48.5300074],PARAMETER["central_meridian",52],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +LM2USSR2,PROJCS["LM2USSR2",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36],PARAMETER["standard_parallel_2",44],PARAMETER["latitude_of_origin",40.039788],PARAMETER["central_meridian",63],PARAMETER["false_easting",2000000],PARAMETER["false_northing",2000000]] +LM2UTHCF,PROJCS["LM2UTHCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39.0166667],PARAMETER["standard_parallel_2",40.65],PARAMETER["latitude_of_origin",39.8349778],PARAMETER["central_meridian",-111.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",546937.88],UNIT["US Foot",0.30480061]] +LM2UTHNF,PROJCS["LM2UTHNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",40.7166667],PARAMETER["standard_parallel_2",41.7833333],PARAMETER["latitude_of_origin",41.2507368],PARAMETER["central_meridian",-111.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",334237.62],UNIT["US Foot",0.30480061]] +LM2UTHSF,PROJCS["LM2UTHSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.2166667],PARAMETER["standard_parallel_2",38.35],PARAMETER["latitude_of_origin",37.7840698],PARAMETER["central_meridian",-111.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",406857.45],UNIT["US Foot",0.30480061]] +LM2VEN,PROJCS["LM2VEN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",7],PARAMETER["standard_parallel_2",13],PARAMETER["latitude_of_origin",10.0047415],PARAMETER["central_meridian",-65.99999999999996],PARAMETER["false_easting",1111539.44],PARAMETER["false_northing",536590.41]] +LM2VENCN,PROJCS["LM2VENCN",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",3],PARAMETER["standard_parallel_2",9.000000000000002],PARAMETER["latitude_of_origin",6.002827699999997],PARAMETER["central_meridian",-65.99999999999996],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1664090.82]] +LM2VENPC,PROJCS["LM2VENPC",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",12],PARAMETER["standard_parallel_2",6],PARAMETER["latitude_of_origin",9.0042597],PARAMETER["central_meridian",-69.99999999999994],PARAMETER["false_easting",1444072.44],PARAMETER["false_northing",1440169.11]] +LM2VIET,PROJCS["LM2VIET",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",9.000000000000002],PARAMETER["standard_parallel_2",7],PARAMETER["latitude_of_origin",8.000419599999997],PARAMETER["central_meridian",108],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2VIRNF,PROJCS["LM2VIRNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",38.0333333],PARAMETER["standard_parallel_2",39.2],PARAMETER["latitude_of_origin",38.6174705],PARAMETER["central_meridian",-78.49999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",346244.54],UNIT["US Foot",0.30480061]] +LM2VIRSF,PROJCS["LM2VIRSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",36.7666667],PARAMETER["standard_parallel_2",37.9666667],PARAMETER["latitude_of_origin",37.3674801],PARAMETER["central_meridian",-78.49999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",376513.28],UNIT["US Foot",0.30480061]] +LM2WAUST,PROJCS["LM2WAUST",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-32.65859432245691],PARAMETER["standard_parallel_2",-35.35149595957179],PARAMETER["latitude_of_origin",-25.32172549999997],PARAMETER["central_meridian",120.8940947726037],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +LM2WISCF,PROJCS["LM2WISCF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44.25],PARAMETER["standard_parallel_2",45.5],PARAMETER["latitude_of_origin",44.8761469],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",380166.49],UNIT["US Foot",0.30480061]] +LM2WISNF,PROJCS["LM2WISNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.5666667],PARAMETER["standard_parallel_2",46.7666667],PARAMETER["latitude_of_origin",46.1677717],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",365046.6],UNIT["US Foot",0.30480061]] +LM2WISSF,PROJCS["LM2WISSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.7333333],PARAMETER["standard_parallel_2",44.0666667],PARAMETER["latitude_of_origin",43.4012402],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",2000000],PARAMETER["false_northing",510702.31],UNIT["US Foot",0.30480061]] +LM2WSHNF,PROJCS["LM2WSHNF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",47.5],PARAMETER["standard_parallel_2",48.7333333],PARAMETER["latitude_of_origin",48.1179153],PARAMETER["central_meridian",-120.8333333],PARAMETER["false_easting",2000000],PARAMETER["false_northing",407781.44],UNIT["US Foot",0.30480061]] +LM2WSHSF,PROJCS["LM2WSHSF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.8333333],PARAMETER["standard_parallel_2",47.3333333],PARAMETER["latitude_of_origin",46.585085],PARAMETER["central_meridian",-120.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",456465.91],UNIT["US Foot",0.30480061]] +LM2WVANF,PROJCS["LM2WVANF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",39],PARAMETER["standard_parallel_2",40.25],PARAMETER["latitude_of_origin",39.6259561],PARAMETER["central_meridian",-79.5],PARAMETER["false_easting",2000000],PARAMETER["false_northing",410097.76],UNIT["US Foot",0.30480061]] +LM2WVASF,PROJCS["LM2WVASF",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",37.4833333],PARAMETER["standard_parallel_2",38.8833333],PARAMETER["latitude_of_origin",38.1844732],PARAMETER["central_meridian",-81],PARAMETER["false_easting",2000000],PARAMETER["false_northing",431297.77],UNIT["US Foot",0.30480061]] +LM2_WA_WGS84,PROJCS["LM2_WA_WGS84",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",-83.49999997620704],PARAMETER["standard_parallel_2",-81.49999997849238],PARAMETER["latitude_of_origin",-82.50000000599761],PARAMETER["central_meridian",-105.0000000232594],PARAMETER["false_easting",343122.675],PARAMETER["false_northing",203866.49]] +LMFRAN93,PROJCS["LMFRAN93",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",44],PARAMETER["standard_parallel_2",49],PARAMETER["latitude_of_origin",46.5],PARAMETER["central_meridian",3],PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],UNIT["unnamed",1]] +LOCAL,LOCAL_CS["LOCAL - (unsupported)"] +LOE7330,PROJCS["LOE7330",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.99999999900352],PARAMETER["standard_parallel_2",48.99999999557553],PARAMETER["latitude_of_origin",47.50000000000002],PARAMETER["central_meridian",13],PARAMETER["false_easting",300000],PARAMETER["false_northing",200000]] +LOE7332,PROJCS["LOE7332",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.99999999900352],PARAMETER["standard_parallel_2",48.99999999557553],PARAMETER["latitude_of_origin",47.50000000000002],PARAMETER["central_meridian",13.33333333333331],PARAMETER["false_easting",400000],PARAMETER["false_northing",400000]] +LOE8032,PROJCS["LOE8032",PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",45.99999999900352],PARAMETER["standard_parallel_2",48.99999999557553],PARAMETER["latitude_of_origin",47.99999999999998],PARAMETER["central_meridian",13.33333333333331],PARAMETER["false_easting",400000],PARAMETER["false_northing",400000]] +MALAYA,LOCAL_CS["MALAYA - (unsupported)"] +MALRSOE,LOCAL_CS["MALRSOE - (unsupported)"] +MALRSOW,LOCAL_CS["MALRSOW - (unsupported)"] +MGA48,PROJCS["MGA48",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA49,PROJCS["MGA49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA50,PROJCS["MGA50",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA51,PROJCS["MGA51",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA52,PROJCS["MGA52",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA53,PROJCS["MGA53",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA54,PROJCS["MGA54",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA55,PROJCS["MGA55",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA56,PROJCS["MGA56",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA57,PROJCS["MGA57",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MGA58,PROJCS["MGA58",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +MICH_GEOREF,LOCAL_CS["MICH_GEOREF - (unsupported)"] +MPCALIF,LOCAL_CS["MPCALIF - (unsupported)"] +MR1630N,PROJCS["MR1630N",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",16.5],PARAMETER["central_meridian",39.6666667],PARAMETER["scale_factor",0.9590787188081463],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +MR21N,PROJCS["MR21N",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",21],PARAMETER["central_meridian",40],PARAMETER["scale_factor",0.933982001373389],PARAMETER["false_easting",100000],PARAMETER["false_northing",800000]] +MR36N,PROJCS["MR36N",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",36],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.8099581558643186],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MR38N,PROJCS["MR38N",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",38],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.7890166629883195],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MR43N,PROJCS["MR43N",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",43],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.7324998104788255],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MR65N,PROJCS["MR65N",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",65],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.4237899569845271],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MR7S,PROJCS["MR7S",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",-6.999999999999995],PARAMETER["central_meridian",115],PARAMETER["scale_factor",0.9925953501989099],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MRAFRICA,PROJCS["MRAFRICA",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MRANS,PROJCS["MRANS",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MRBLACKS,PROJCS["MRBLACKS",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",34],PARAMETER["scale_factor",1],PARAMETER["false_easting",2000000],PARAMETER["false_northing",0]] +MRCAMER,PROJCS["MRCAMER",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MRCARIB,PROJCS["MRCARIB",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",19],PARAMETER["central_meridian",-79.99999999999994],PARAMETER["scale_factor",0.9458579352767946],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MRCONGO,PROJCS["MRCONGO",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",-4.999999999999972],PARAMETER["central_meridian",11],PARAMETER["scale_factor",0.9962204409159013],PARAMETER["false_easting",200000],PARAMETER["false_northing",1051440.8]] +MREV,PROJCS["MREV",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",90],PARAMETER["scale_factor",1],PARAMETER["false_easting",20000000],PARAMETER["false_northing",0]] +MRGOM,PROJCS["MRGOM",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",25],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["scale_factor",0.9068561129815975],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +MRINDO,PROJCS["MRINDO",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0.5386389000000007],PARAMETER["central_meridian",101.4418306],PARAMETER["scale_factor",0.9999561056335834],PARAMETER["false_easting",400000],PARAMETER["false_northing",100000]] +MRINDON,PROJCS["MRINDON",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0.539165300000001],PARAMETER["central_meridian",101.4417703],PARAMETER["scale_factor",0.9999560198000614],PARAMETER["false_easting",400000],PARAMETER["false_northing",100000]] +MRLCC,PROJCS["MRLCC",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",365392607.481532],PARAMETER["central_meridian",109.9999999888982],PARAMETER["scale_factor",0.997],PARAMETER["false_easting",3900000],PARAMETER["false_northing",900000]] +MRMALAY,PROJCS["MRMALAY",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",4.85],PARAMETER["central_meridian",109],PARAMETER["scale_factor",0.9964432276572127],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +MRNAM,PROJCS["MRNAM",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",15],PARAMETER["central_meridian",108],PARAMETER["scale_factor",0.9661424762736215],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +MRNEIEZ,PROJCS["MRNEIEZ",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",110],PARAMETER["scale_factor",0.997],PARAMETER["false_easting",3900000],PARAMETER["false_northing",900000]] +MRNEWFND,PROJCS["MRNEWFND",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",46],PARAMETER["central_meridian",-45.5],PARAMETER["scale_factor",0.6958780751155514],PARAMETER["false_easting",500000],PARAMETER["false_northing",1000000]] +MRNSEA,PROJCS["MRNSEA",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",57.8129472],PARAMETER["central_meridian",-1.999999999999966],PARAMETER["scale_factor",0.5339721600128644],PARAMETER["false_easting",2000000],PARAMETER["false_northing",0]] +MRNWL10D,PROJCS["MRNWL10D",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",20000000],PARAMETER["false_northing",10000000]] +MRVENZ,PROJCS["MRVENZ",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-64.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",3500000],PARAMETER["false_northing",0]] +MRVIET,PROJCS["MRVIET",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",18],PARAMETER["central_meridian",106],PARAMETER["scale_factor",0.9513606030407835],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +MRWORLD,PROJCS["MRWORLD",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",20000000],PARAMETER["false_northing",0]] +MRWORLD1,PROJCS["MRWORLD1",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",10000000],PARAMETER["false_northing",0]] +MRWORLD2,PROJCS["MRWORLD2",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +MRWSOUTH,PROJCS["MRWSOUTH",PROJECTION["Mercator_1SP"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",20000000],PARAMETER["false_northing",20000000]] +MSAFRICA,LOCAL_CS["MSAFRICA - (unsupported)"] +MW180E,LOCAL_CS["MW180E - (unsupported)"] +MW90EAST,LOCAL_CS["MW90EAST - (unsupported)"] +MW90WEST,LOCAL_CS["MW90WEST - (unsupported)"] +MWSPHERE,LOCAL_CS["MWSPHERE - (unsupported)"] +NTM51,PROJCS["NTM51",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +NTM52,PROJCS["NTM52",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +NTM53,PROJCS["NTM53",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +NTM54,PROJCS["NTM54",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +NTM55,PROJCS["NTM55",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +NTM56,PROJCS["NTM56",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +NUTM01,PROJCS["NUTM01",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM01_FT,PROJCS["NUTM01_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-177.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM02,PROJCS["NUTM02",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM02_FT,PROJCS["NUTM02_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-171.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM03,PROJCS["NUTM03",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM03_FT,PROJCS["NUTM03_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-165.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM04,PROJCS["NUTM04",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM04_FT,PROJCS["NUTM04_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-159.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM05,PROJCS["NUTM05",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM05_FT,PROJCS["NUTM05_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-153.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM06,PROJCS["NUTM06",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM06_FT,PROJCS["NUTM06_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-147.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM07,PROJCS["NUTM07",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM07_FT,PROJCS["NUTM07_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-141.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM08,PROJCS["NUTM08",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM08_FT,PROJCS["NUTM08_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-135.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM09,PROJCS["NUTM09",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM09_FT,PROJCS["NUTM09_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-129.0000000000003],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM10,PROJCS["NUTM10",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM10_FT,PROJCS["NUTM10_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-123.0000000000002],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM11,PROJCS["NUTM11",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM11_FT,PROJCS["NUTM11_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117.0000000000002],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM12,PROJCS["NUTM12",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM12_FT,PROJCS["NUTM12_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-111.0000000000002],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM13,PROJCS["NUTM13",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM13_FT,PROJCS["NUTM13_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-105.0000000000002],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM14,PROJCS["NUTM14",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM14_FT,PROJCS["NUTM14_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-99.0000000000002],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM15,PROJCS["NUTM15",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM15_FT,PROJCS["NUTM15_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-92.99999999999996],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM16,PROJCS["NUTM16",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM16_FT,PROJCS["NUTM16_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-86.99999999999994],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM17,PROJCS["NUTM17",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM17_FT,PROJCS["NUTM17_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM18,PROJCS["NUTM18",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM18_FT,PROJCS["NUTM18_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-75.00000000000016],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM19,PROJCS["NUTM19",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM19_FT,PROJCS["NUTM19_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-69.00000000000013],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM20,PROJCS["NUTM20",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM20_FT,PROJCS["NUTM20_FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-63.00000000000013],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["unnamed",0.30480060966]] +NUTM21,PROJCS["NUTM21",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM22,PROJCS["NUTM22",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM23,PROJCS["NUTM23",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM24,PROJCS["NUTM24",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM25,PROJCS["NUTM25",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM26,PROJCS["NUTM26",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM27,PROJCS["NUTM27",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM28,PROJCS["NUTM28",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM29,PROJCS["NUTM29",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM30,PROJCS["NUTM30",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM31,PROJCS["NUTM31",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM32,PROJCS["NUTM32",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM33,PROJCS["NUTM33",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM34,PROJCS["NUTM34",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM35,PROJCS["NUTM35",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM36,PROJCS["NUTM36",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM37,PROJCS["NUTM37",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM38,PROJCS["NUTM38",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM39,PROJCS["NUTM39",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM40,PROJCS["NUTM40",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM41,PROJCS["NUTM41",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM42,PROJCS["NUTM42",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM43,PROJCS["NUTM43",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM44,PROJCS["NUTM44",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM45,PROJCS["NUTM45",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM46,PROJCS["NUTM46",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM47,PROJCS["NUTM47",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM48,PROJCS["NUTM48",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM49,PROJCS["NUTM49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM50,PROJCS["NUTM50",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM51,PROJCS["NUTM51",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM52,PROJCS["NUTM52",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM53,PROJCS["NUTM53",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM54,PROJCS["NUTM54",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM55,PROJCS["NUTM55",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM56,PROJCS["NUTM56",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM57,PROJCS["NUTM57",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM58,PROJCS["NUTM58",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM59,PROJCS["NUTM59",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NUTM60,PROJCS["NUTM60",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]] +NZAMUR49,PROJCS["NZAMUR49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-42.68911658055557],PARAMETER["central_meridian",173.0101333888891],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZBLUF49,PROJCS["NZBLUF49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-46.60000961111109],PARAMETER["central_meridian",168.342872],PARAMETER["scale_factor",1],PARAMETER["false_easting",300002.66],PARAMETER["false_northing",699999.58],UNIT["unnamed",1]] +NZBULL49,PROJCS["NZBULL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.81080286111109],PARAMETER["central_meridian",171.5812600611113],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZBYPL49,PROJCS["NZBYPL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-37.76124981111111],PARAMETER["central_meridian",176.4661972499998],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZCOLL49,PROJCS["NZCOLL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-40.7147590611111],PARAMETER["central_meridian",172.6720465],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZGAWL49,PROJCS["NZGAWL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.74871156111108],PARAMETER["central_meridian",171.3607484694444],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZGREY49,PROJCS["NZGREY49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-42.33369428055552],PARAMETER["central_meridian",171.5497713111112],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZHAWK49,PROJCS["NZHAWK49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-39.65092931111111],PARAMETER["central_meridian",176.6736805305557],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZHOKI49,PROJCS["NZHOKI49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-42.88632236111108],PARAMETER["central_meridian",170.9799934999998],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZJACK49,PROJCS["NZJACK49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.97780288888887],PARAMETER["central_meridian",168.606267],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZKARA49,PROJCS["NZKARA49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.28991153055556],PARAMETER["central_meridian",172.1090281888886],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZLIND49,PROJCS["NZLIND49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-44.73526796944446],PARAMETER["central_meridian",169.4677550805554],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZMARL49,PROJCS["NZMARL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.54448666944445],PARAMETER["central_meridian",173.8020741111113],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZMG,LOCAL_CS["NZMG - (unsupported)"] +NZMTED49,PROJCS["NZMTED49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-36.87986528055556],PARAMETER["central_meridian",174.764339361111],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZMTNI49,PROJCS["NZMTNI49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.13290258055557],PARAMETER["central_meridian",168.3986411888889],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZMTPL49,PROJCS["NZMTPL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.59063758055553],PARAMETER["central_meridian",172.7271935805556],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZMTYO49,PROJCS["NZMTYO49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.56372616944442],PARAMETER["central_meridian",167.7388617805554],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZNELS49,PROJCS["NZNELS49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.27454471944445],PARAMETER["central_meridian",173.2993168111111],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZNTAI49,PROJCS["NZNTAI49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.8615133611111],PARAMETER["central_meridian",170.2825891111109],PARAMETER["scale_factor",0.99996],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZOBSE49,PROJCS["NZOBSE49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.81619661111111],PARAMETER["central_meridian",170.6285951694446],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZOKAR49,PROJCS["NZOKAR49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.11012813888885],PARAMETER["central_meridian",170.2609258305558],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZPOVE49,PROJCS["NZPOVE49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-38.62470278055553],PARAMETER["central_meridian",177.8856362805553],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZTARA49,PROJCS["NZTARA49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-39.13575831111109],PARAMETER["central_meridian",174.2280117500001],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZTIMA49,PROJCS["NZTIMA49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-44.40222036111108],PARAMETER["central_meridian",171.0572508305555],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZTM,PROJCS["NZTM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",173],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1600000],PARAMETER["false_northing",10000000]] +NZTUHI49,PROJCS["NZTUHI49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-39.51247038888886],PARAMETER["central_meridian",175.6400368111111],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZWAIR49,PROJCS["NZWAIR49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-40.9255326388889],PARAMETER["central_meridian",175.6473496694445],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZWANG49,PROJCS["NZWANG49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-40.24194713888888],PARAMETER["central_meridian",175.4880996111111],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +NZWELL49,PROJCS["NZWELL49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.30131963888891],PARAMETER["central_meridian",174.7766231111108],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["unnamed",1]] +OG45N45E,LOCAL_CS["OG45N45E - (unsupported)"] +OG55N80E,LOCAL_CS["OG55N80E - (unsupported)"] +OGEQU90W,LOCAL_CS["OGEQU90W - (unsupported)"] +OGNPOLE,LOCAL_CS["OGNPOLE - (unsupported)"] +OMALSK1F,LOCAL_CS["OMALSK1F - (unsupported)"] +OMALSK1M,LOCAL_CS["OMALSK1M - (unsupported)"] +OSASIA,LOCAL_CS["OSASIA - (unsupported)"] +OSNAMER,LOCAL_CS["OSNAMER - (unsupported)"] +OSSYRIA,LOCAL_CS["OSSYRIA - (unsupported)"] +PCALASKA,LOCAL_CS["PCALASKA - (unsupported)"] +PCALBERT,LOCAL_CS["PCALBERT - (unsupported)"] +PCG94,PROJCS["PCG94",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",115.8166666666661],PARAMETER["scale_factor",0.99999906],PARAMETER["false_easting",50000],PARAMETER["false_northing",3800000]] +PCNSLOPE,LOCAL_CS["PCNSLOPE - (unsupported)"] +PCNWT,LOCAL_CS["PCNWT - (unsupported)"] +PCTRUSFT,LOCAL_CS["PCTRUSFT - (unsupported)"] +PCWORLD,LOCAL_CS["PCWORLD - (unsupported)"] +PSCANADA,LOCAL_CS["PSCANADA - (unsupported)"] +PSFALK,LOCAL_CS["PSFALK - (unsupported)"] +PSGREEN,LOCAL_CS["PSGREEN - (unsupported)"] +PSN150W,LOCAL_CS["PSN150W - (unsupported)"] +PSNORTH,LOCAL_CS["PSNORTH - (unsupported)"] +PSNORWAY,LOCAL_CS["PSNORWAY - (unsupported)"] +PSNTH000,LOCAL_CS["PSNTH000 - (unsupported)"] +PSNTH045,LOCAL_CS["PSNTH045 - (unsupported)"] +PSNTH180,LOCAL_CS["PSNTH180 - (unsupported)"] +PSSOUTH,LOCAL_CS["PSSOUTH - (unsupported)"] +PSSTH000,LOCAL_CS["PSSTH000 - (unsupported)"] +PS_WGS84,LOCAL_CS["PS_WGS84 - (unsupported)"] +PUW1992,PROJCS["PUW1992",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",19],PARAMETER["scale_factor",0.9993],PARAMETER["false_easting",500000],PARAMETER["false_northing",-5300000]] +PUWG1992,PROJCS["PUWG1992",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",19],PARAMETER["scale_factor",0.9993],PARAMETER["false_easting",500000],PARAMETER["false_northing",-5300000]] +QC_MTM05,PROJCS["QC_MTM05",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-64.49999999999996],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +QC_MTM06,PROJCS["QC_MTM06",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-67.49999999999996],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +QC_MTM07,PROJCS["QC_MTM07",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-70.49999999999996],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +QC_MTM08,PROJCS["QC_MTM08",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-73.49999999999997],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +QC_MTM09,PROJCS["QC_MTM09",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-76.5],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +QC_MTM10,PROJCS["QC_MTM10",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-79.5],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +RO90E,LOCAL_CS["RO90E - (unsupported)"] +RO90W,LOCAL_CS["RO90W - (unsupported)"] +ROBINSON,LOCAL_CS["ROBINSON - (unsupported)"] +ROS4270,LOCAL_CS["ROS4270 - (unsupported)"] +RPBRA,LOCAL_CS["RPBRA - (unsupported)"] +RPMON,LOCAL_CS["RPMON - (unsupported)"] +RPNAM,LOCAL_CS["RPNAM - (unsupported)"] +RPSIB,LOCAL_CS["RPSIB - (unsupported)"] +RPUSSR,LOCAL_CS["RPUSSR - (unsupported)"] +S34JFRX,LOCAL_CS["S34JFRX - (unsupported)"] +S34SRX,LOCAL_CS["S34SRX - (unsupported)"] +S45BRX,LOCAL_CS["S45BRX - (unsupported)"] +SNSPHERE,LOCAL_CS["SNSPHERE - (unsupported)"] +SNWORLD,LOCAL_CS["SNWORLD - (unsupported)"] +STME24,PROJCS["STME24",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",24],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",5000000],PARAMETER["false_northing",10000000]] +STMLO11,LOCAL_CS["STMLO11 - (unsupported)"] +STMLO13,LOCAL_CS["STMLO13 - (unsupported)"] +STMLO15,LOCAL_CS["STMLO15 - (unsupported)"] +STMLO17,LOCAL_CS["STMLO17 - (unsupported)"] +STMLO19,LOCAL_CS["STMLO19 - (unsupported)"] +STMLO21,LOCAL_CS["STMLO21 - (unsupported)"] +STMLO23,LOCAL_CS["STMLO23 - (unsupported)"] +STMLO25,LOCAL_CS["STMLO25 - (unsupported)"] +STMLO25F,LOCAL_CS["STMLO25F - (unsupported)"] +STMLO27,LOCAL_CS["STMLO27 - (unsupported)"] +STMLO27F,LOCAL_CS["STMLO27F - (unsupported)"] +STMLO29,LOCAL_CS["STMLO29 - (unsupported)"] +STMLO31,LOCAL_CS["STMLO31 - (unsupported)"] +STMLO33,LOCAL_CS["STMLO33 - (unsupported)"] +STMLO35,LOCAL_CS["STMLO35 - (unsupported)"] +STMLO37,LOCAL_CS["STMLO37 - (unsupported)"] +STMLO39,LOCAL_CS["STMLO39 - (unsupported)"] +STMLO41,LOCAL_CS["STMLO41 - (unsupported)"] +STMLO43,LOCAL_CS["STMLO43 - (unsupported)"] +STMLO9,LOCAL_CS["STMLO9 - (unsupported)"] +SUTM01,PROJCS["SUTM01",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM02,PROJCS["SUTM02",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM03,PROJCS["SUTM03",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM04,PROJCS["SUTM04",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM05,PROJCS["SUTM05",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM06,PROJCS["SUTM06",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM07,PROJCS["SUTM07",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM08,PROJCS["SUTM08",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM09,PROJCS["SUTM09",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM10,PROJCS["SUTM10",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM11,PROJCS["SUTM11",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM12,PROJCS["SUTM12",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM13,PROJCS["SUTM13",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM14,PROJCS["SUTM14",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM15,PROJCS["SUTM15",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM16,PROJCS["SUTM16",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM17,PROJCS["SUTM17",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM18,PROJCS["SUTM18",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM19,PROJCS["SUTM19",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM20,PROJCS["SUTM20",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM21,PROJCS["SUTM21",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM22,PROJCS["SUTM22",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM23,PROJCS["SUTM23",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM24,PROJCS["SUTM24",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM25,PROJCS["SUTM25",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM26,PROJCS["SUTM26",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM27,PROJCS["SUTM27",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM28,PROJCS["SUTM28",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM29,PROJCS["SUTM29",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM30,PROJCS["SUTM30",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM31,PROJCS["SUTM31",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM32,PROJCS["SUTM32",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM33,PROJCS["SUTM33",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM34,PROJCS["SUTM34",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM35,PROJCS["SUTM35",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM36,PROJCS["SUTM36",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM37,PROJCS["SUTM37",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM38,PROJCS["SUTM38",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM39,PROJCS["SUTM39",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM40,PROJCS["SUTM40",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM41,PROJCS["SUTM41",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM42,PROJCS["SUTM42",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM43,PROJCS["SUTM43",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM44,PROJCS["SUTM44",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM45,PROJCS["SUTM45",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM46,PROJCS["SUTM46",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM47,PROJCS["SUTM47",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM48,PROJCS["SUTM48",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM49,PROJCS["SUTM49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM50,PROJCS["SUTM50",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM51,PROJCS["SUTM51",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM52,PROJCS["SUTM52",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM53,PROJCS["SUTM53",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM54,PROJCS["SUTM54",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM55,PROJCS["SUTM55",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM56,PROJCS["SUTM56",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM57,PROJCS["SUTM57",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM58,PROJCS["SUTM58",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM59,PROJCS["SUTM59",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SUTM60,PROJCS["SUTM60",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]] +SWISSNEW,LOCAL_CS["SWISSNEW - (unsupported)"] +SWISSOLD,LOCAL_CS["SWISSOLD - (unsupported)"] +TAIWAN,PROJCS["TAIWAN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",120.9995190069077],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",250000],PARAMETER["false_northing",0]] +TM103_30,PROJCS["TM103_30",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",103.5],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM16E,PROJCS["TM16E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",16],PARAMETER["scale_factor",0.95],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM36,PROJCS["TM36",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",36],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM36E,PROJCS["TM36E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",36],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM42E,PROJCS["TM42E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",42],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM54E,PROJCS["TM54E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",54],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM54WCM,PROJCS["TM54WCM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-54],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TM6W,PROJCS["TM6W",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-5.999999999999955],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMAFRICA,PROJCS["TMAFRICA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.99],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMAFT15,PROJCS["TMAFT15",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-92.99999999999996],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.5],PARAMETER["false_northing",0],UNIT["unnamed",0.304800641]] +TMAFT16,PROJCS["TMAFT16",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-86.99999999999994],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.5],PARAMETER["false_northing",0],UNIT["unnamed",0.304800641]] +TMAFT17,PROJCS["TMAFT17",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.5],PARAMETER["false_northing",0],UNIT["unnamed",0.304800641]] +TMALABEF,PROJCS["TMALABEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.5],PARAMETER["central_meridian",-85.83333329999994],PARAMETER["scale_factor",0.99996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALABEF83,PROJCS["TMALABEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.50000000812049],PARAMETER["central_meridian",-85.83333329732486],PARAMETER["scale_factor",0.99996],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALABEM,PROJCS["TMALABEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.50000000812049],PARAMETER["central_meridian",-85.83333335462063],PARAMETER["scale_factor",0.99996],PARAMETER["false_easting",200000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMALABWF,PROJCS["TMALABWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",-87.49999999999996],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALABWF83,PROJCS["TMALABWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.00000002301578],PARAMETER["central_meridian",-87.49999997163637],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALABWM,PROJCS["TMALABWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",-87.49999999999996],PARAMETER["scale_factor",0.9999333333],PARAMETER["false_easting",600000],PARAMETER["false_northing",0]] +TMALSK2F,PROJCS["TMALSK2F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-142],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK2F83,PROJCS["TMALSK2F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-142.0000000096286],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK2M,PROJCS["TMALSK2M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-142],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK3F,PROJCS["TMALSK3F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-146],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK3F83,PROJCS["TMALSK3F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-146.000000005058],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK3M,PROJCS["TMALSK3M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-146],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK4F,PROJCS["TMALSK4F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-150],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK4F83,PROJCS["TMALSK4F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-150.0000000004873],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK4M,PROJCS["TMALSK4M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-150],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK5F,PROJCS["TMALSK5F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-154],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK5F83,PROJCS["TMALSK5F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-153.9999999959166],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK5M,PROJCS["TMALSK5M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-154],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK6F,PROJCS["TMALSK6F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-158],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK6F83,PROJCS["TMALSK6F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-157.999999991346],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK6M,PROJCS["TMALSK6M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-158],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK7F,PROJCS["TMALSK7F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-162],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK7F83,PROJCS["TMALSK7F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-161.9999999867753],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK7M,PROJCS["TMALSK7M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-162],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK8F,PROJCS["TMALSK8F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-166],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK8F83,PROJCS["TMALSK8F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-165.9999999822046],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK8M,PROJCS["TMALSK8M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-166],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMALSK9F,PROJCS["TMALSK9F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-170],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK9F83,PROJCS["TMALSK9F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.99999999559177],PARAMETER["central_meridian",-169.999999977634],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMALSK9M,PROJCS["TMALSK9M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",54],PARAMETER["central_meridian",-170],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMAMG48,PROJCS["TMAMG48",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG49,PROJCS["TMAMG49",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG50,PROJCS["TMAMG50",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG51,PROJCS["TMAMG51",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG52,PROJCS["TMAMG52",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG53,PROJCS["TMAMG53",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG54,PROJCS["TMAMG54",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG55,PROJCS["TMAMG55",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG56,PROJCS["TMAMG56",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG57,PROJCS["TMAMG57",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMAMG58,PROJCS["TMAMG58",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMARG1,PROJCS["TMARG1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-71.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0]] +TMARG2,PROJCS["TMARG2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-68.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0]] +TMARG3,PROJCS["TMARG3",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-65.99999999999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",3500000],PARAMETER["false_northing",0]] +TMARG4,PROJCS["TMARG4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-62.99999999999995],PARAMETER["scale_factor",1],PARAMETER["false_easting",4500000],PARAMETER["false_northing",0]] +TMARG5,PROJCS["TMARG5",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-59.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",5500000],PARAMETER["false_northing",0]] +TMARG54,PROJCS["TMARG54",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-71.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",7500000],PARAMETER["false_northing",10002288.2999]] +TMARG57,PROJCS["TMARG57",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-68.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",6500000],PARAMETER["false_northing",10002288.2999]] +TMARG6,PROJCS["TMARG6",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-56.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",6500000],PARAMETER["false_northing",0]] +TMARG60,PROJCS["TMARG60",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-65.99999999999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",5500000],PARAMETER["false_northing",10002288.2999]] +TMARG63,PROJCS["TMARG63",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-62.99999999999995],PARAMETER["scale_factor",1],PARAMETER["false_easting",4500000],PARAMETER["false_northing",10002288.2999]] +TMARG66,PROJCS["TMARG66",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-59.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",3500000],PARAMETER["false_northing",10002288.2999]] +TMARG69,PROJCS["TMARG69",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-56.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",2500000],PARAMETER["false_northing",10002288.2999]] +TMARG7,PROJCS["TMARG7",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-89.99999999999994],PARAMETER["central_meridian",-54],PARAMETER["scale_factor",1],PARAMETER["false_easting",7500000],PARAMETER["false_northing",0]] +TMARG72,PROJCS["TMARG72",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-54],PARAMETER["scale_factor",1],PARAMETER["false_easting",1500000],PARAMETER["false_northing",10002288.2999]] +TMARG8,PROJCS["TMARG8",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-68.99999999999997],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",2500000],PARAMETER["false_northing",10000000]] +TMARG9,PROJCS["TMARG9",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-56.99999999999994],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",2500000],PARAMETER["false_northing",10000000]] +TMARIZCF,PROJCS["TMARIZCF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-111.9166667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMARIZCF83,PROJCS["TMARIZCF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-111.9166667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",700000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMARIZEF,PROJCS["TMARIZEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-110.1666667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMARIZEF83,PROJCS["TMARIZEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-110.1666667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",700000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMARIZWF,PROJCS["TMARIZWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-113.75],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMARIZWF83,PROJCS["TMARIZWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-113.75],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",700000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMARUBA,PROJCS["TMARUBA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",12.5200894],PARAMETER["central_meridian",-69.99294669999993],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",10000],PARAMETER["false_northing",15000]] +TMAUSC,PROJCS["TMAUSC",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",13.3333333],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMAUSE,PROJCS["TMAUSE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",16.3333333],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMAUSW,PROJCS["TMAUSW",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",10.3333333],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMAUSYD1,PROJCS["TMAUSYD1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",116],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD2,PROJCS["TMAUSYD2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",121],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD3,PROJCS["TMAUSYD3",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",126],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD4,PROJCS["TMAUSYD4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",131],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD5,PROJCS["TMAUSYD5",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",136],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD6,PROJCS["TMAUSYD6",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",141],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD7,PROJCS["TMAUSYD7",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",146],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMAUSYD8,PROJCS["TMAUSYD8",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-34],PARAMETER["central_meridian",151],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",0.914391796]] +TMBAHR,PROJCS["TMBAHR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMBOAG1R,PROJCS["TMBOAG1R",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-3.452333299999991],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0]] +TMBOAG2R,PROJCS["TMBOAG2R",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",2.547666699999998],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",2520000],PARAMETER["false_northing",0]] +TMBOAGA1,PROJCS["TMBOAGA1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9.000000000000002],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0]] +TMBOAGA2,PROJCS["TMBOAGA2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",2520000],PARAMETER["false_northing",0]] +TMBOGEQ,PROJCS["TMBOGEQ",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-74.08091659999998],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMBOH,PROJCS["TMBOH",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",1],PARAMETER["false_easting",20500000],PARAMETER["false_northing",0]] +TMBOL1,PROJCS["TMBOL1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-65.99999999999996],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMBONAIR,PROJCS["TMBONAIR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",12.1797141],PARAMETER["central_meridian",-68.25184439999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",23000],PARAMETER["false_northing",20980.49]] +TMBUCHAN,PROJCS["TMBUCHAN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMBURMA,PROJCS["TMBURMA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",96],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMCM116,PROJCS["TMCM116",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",116],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMCM126,PROJCS["TMCM126",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",126],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMCM133E,PROJCS["TMCM133E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",133],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMCM157E,PROJCS["TMCM157E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",157],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMCOLB,PROJCS["TMCOLB",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.5990472],PARAMETER["central_meridian",-74.08091669999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +TMCOLE,PROJCS["TMCOLE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.5990472],PARAMETER["central_meridian",-68.08091669999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +TMCOLEC,PROJCS["TMCOLEC",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.5990472],PARAMETER["central_meridian",-71.08091669999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +TMCOLW,PROJCS["TMCOLW",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.5990472],PARAMETER["central_meridian",-77.08091669999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",1000000],PARAMETER["false_northing",1000000]] +TMCONGO,PROJCS["TMCONGO",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",11],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMCORONA,PROJCS["TMCORONA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",33.76446202777643],PARAMETER["central_meridian",-117.4745428888658],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMDELWRF,PROJCS["TMDELWRF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38],PARAMETER["central_meridian",-75.41666669999995],PARAMETER["scale_factor",0.999995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMDELWRF83,PROJCS["TMDELWRF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000001387444],PARAMETER["central_meridian",-75.41666671179337],PARAMETER["scale_factor",0.999995],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMDELWRM,PROJCS["TMDELWRM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.00000001387444],PARAMETER["central_meridian",-75.41666665449759],PARAMETER["scale_factor",0.999995],PARAMETER["false_easting",200000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMEG24P,PROJCS["TMEG24P",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",34.5],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",0]] +TMEGEPTU,PROJCS["TMEGEPTU",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",11],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMEGMFBP,PROJCS["TMEGMFBP",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",34.5],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",0]] +TMEGSA87,PROJCS["TMEGSA87",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",23.99999882666041],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMEGYPTB,PROJCS["TMEGYPTB",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",35],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",1100000]] +TMEGYPTG,PROJCS["TMEGYPTG",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",35],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",1100000]] +TMEGYPTP,PROJCS["TMEGYPTP",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",27],PARAMETER["scale_factor",1],PARAMETER["false_easting",700000],PARAMETER["false_northing",200000]] +TMEGYPTR,PROJCS["TMEGYPTR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",31],PARAMETER["scale_factor",1],PARAMETER["false_easting",615000],PARAMETER["false_northing",810000]] +TMEGYPTS,PROJCS["TMEGYPTS",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",27],PARAMETER["scale_factor",1],PARAMETER["false_easting",700000],PARAMETER["false_northing",1200000]] +TMEGYPTW,PROJCS["TMEGYPTW",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",28],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMETHIOP,PROJCS["TMETHIOP",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",40],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMFIN0,PROJCS["TMFIN0",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",17.99999625520633],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFIN1,PROJCS["TMFIN1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",20.99999754093336],PARAMETER["scale_factor",1],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFIN2,PROJCS["TMFIN2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",23.99999882666041],PARAMETER["scale_factor",1],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFIN3,PROJCS["TMFIN3",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27.00000011238744],PARAMETER["scale_factor",1],PARAMETER["false_easting",3500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFIN4,PROJCS["TMFIN4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",29.99999566853653],PARAMETER["scale_factor",1],PARAMETER["false_easting",4500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFIN5,PROJCS["TMFIN5",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",32.99999695426357],PARAMETER["scale_factor",1],PARAMETER["false_easting",5500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFLRAEF,PROJCS["TMFLRAEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.3333333],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMFLRAEF83,PROJCS["TMFLRAEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.33333329597914],PARAMETER["central_meridian",-80.99999999338766],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMFLRAEM,PROJCS["TMFLRAEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.33333335327492],PARAMETER["central_meridian",-80.99999999338766],PARAMETER["scale_factor",0.99994118],PARAMETER["false_easting",200000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMFLRAWF,PROJCS["TMFLRAWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.3333333],PARAMETER["central_meridian",-81.99999999999997],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMFLRAWF83,PROJCS["TMFLRAWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.3333333],PARAMETER["central_meridian",-82.00000002089288],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMFLRAWM,PROJCS["TMFLRAWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.33333335327492],PARAMETER["central_meridian",-82.00000002089288],PARAMETER["scale_factor",0.99994118],PARAMETER["false_easting",200000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMGCWEG2,PROJCS["TMGCWEG2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-90.64999999999993],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",1640416.67],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMGEOREF,PROJCS["TMGEOREF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",-82.16666669999994],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMGEOREF83,PROJCS["TMGEOREF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.00000002301578],PARAMETER["central_meridian",-82.16666668259445],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMGEOREM,PROJCS["TMGEOREM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.00000002301578],PARAMETER["central_meridian",-82.16666668259445],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",200000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMGEORWF,PROJCS["TMGEORWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30],PARAMETER["central_meridian",-84.16666669999995],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMGEORWF83,PROJCS["TMGEORWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.00000002301578],PARAMETER["central_meridian",-84.16666668030912],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",2296583.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMGEORWM,PROJCS["TMGEORWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.00000002301578],PARAMETER["central_meridian",-84.16666668030912],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",700000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMGER1,PROJCS["TMGER1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",1],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0]] +TMGER2,PROJCS["TMGER2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",6],PARAMETER["scale_factor",1],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0]] +TMGER3,PROJCS["TMGER3",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9.000000000000002],PARAMETER["scale_factor",1],PARAMETER["false_easting",3500000],PARAMETER["false_northing",0]] +TMGER4,PROJCS["TMGER4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",12],PARAMETER["scale_factor",1],PARAMETER["false_easting",4500000],PARAMETER["false_northing",0]] +TMGER5,PROJCS["TMGER5",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",1],PARAMETER["false_easting",5500000],PARAMETER["false_northing",0]] +TMGHANA,PROJCS["TMGHANA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.666666599999998],PARAMETER["central_meridian",-0.9999999999999829],PARAMETER["scale_factor",0.99975],PARAMETER["false_easting",274319.51],PARAMETER["false_northing",0]] +TMGHANAF,PROJCS["TMGHANAF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.6666667],PARAMETER["central_meridian",-0.9999999999999829],PARAMETER["scale_factor",0.99975],PARAMETER["false_easting",900000],PARAMETER["false_northing",0],UNIT["unnamed",0.304799472]] +TMGHANAY,PROJCS["TMGHANAY",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4.6666667],PARAMETER["central_meridian",-0.9999999999999829],PARAMETER["scale_factor",0.99975],PARAMETER["false_easting",300000],PARAMETER["false_northing",0],UNIT["unnamed",0.91439841462]] +TMGK20E,PROJCS["TMGK20E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",120],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN05,PROJCS["TMGKN05",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN06,PROJCS["TMGKN06",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",33],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN07,PROJCS["TMGKN07",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",39],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN10,PROJCS["TMGKN10",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",57],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN11,PROJCS["TMGKN11",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",63],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN12,PROJCS["TMGKN12",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",69],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN13,PROJCS["TMGKN13",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",75],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN14,PROJCS["TMGKN14",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",81],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN15,PROJCS["TMGKN15",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",86.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN16,PROJCS["TMGKN16",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN17,PROJCS["TMGKN17",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",98.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN18,PROJCS["TMGKN18",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN19,PROJCS["TMGKN19",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN20,PROJCS["TMGKN20",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN20W,PROJCS["TMGKN20W",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN21,PROJCS["TMGKN21",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN21W,PROJCS["TMGKN21W",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN22,PROJCS["TMGKN22",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN23,PROJCS["TMGKN23",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN8,PROJCS["TMGKN8",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",45],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMGKN9,PROJCS["TMGKN9",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",51],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMHAWI1F,PROJCS["TMHAWI1F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",18.8333333],PARAMETER["central_meridian",-155.5],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI1F83,PROJCS["TMHAWI1F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",18.83333328793987],PARAMETER["central_meridian",-155.5000000085266],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI1M,PROJCS["TMHAWI1M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",18.83333334523564],PARAMETER["central_meridian",-155.5000000085266],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMHAWI2F,PROJCS["TMHAWI2F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",20.3333333],PARAMETER["central_meridian",-156.6666666],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI2F83,PROJCS["TMHAWI2F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",20.33333330054981],PARAMETER["central_meridian",-156.6666665831418],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI2M,PROJCS["TMHAWI2M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",20.33333335784559],PARAMETER["central_meridian",-156.6666666404376],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMHAWI3F,PROJCS["TMHAWI3F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.1666666],PARAMETER["central_meridian",-158],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI3F83,PROJCS["TMHAWI3F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.16666660905768],PARAMETER["central_meridian",-157.999999991346],PARAMETER["scale_factor",1],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI3M,PROJCS["TMHAWI3M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.16666666635346],PARAMETER["central_meridian",-157.999999991346],PARAMETER["scale_factor",0.99999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMHAWI4F,PROJCS["TMHAWI4F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.8333333],PARAMETER["central_meridian",-159.5],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI4F83,PROJCS["TMHAWI4F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.83333331315976],PARAMETER["central_meridian",-159.5000000039559],PARAMETER["scale_factor",1],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI4M,PROJCS["TMHAWI4M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.83333331315976],PARAMETER["central_meridian",-159.5000000039559],PARAMETER["scale_factor",0.99999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMHAWI5F,PROJCS["TMHAWI5F",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.6666667],PARAMETER["central_meridian",-160.1666667],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI5F83,PROJCS["TMHAWI5F83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.66666670875396],PARAMETER["central_meridian",-160.166666708058],PARAMETER["scale_factor",1],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMHAWI5M,PROJCS["TMHAWI5M",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",21.66666665145818],PARAMETER["central_meridian",-160.1666666507622],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMHK80,PROJCS["TMHK80",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",22.31213277122482],PARAMETER["central_meridian",114.1785550046161],PARAMETER["scale_factor",1],PARAMETER["false_easting",836694.05],PARAMETER["false_northing",819069.8]] +TMHNT170,PROJCS["TMHNT170",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-44],PARAMETER["central_meridian",170],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000],UNIT["unnamed",0.914398415]] +TMI,PROJCS["TMI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31.68438845381803],PARAMETER["central_meridian",35.20449790765302],PARAMETER["scale_factor",1.0000067],PARAMETER["false_easting",219529.584],PARAMETER["false_northing",626907.39]] +TMIDACFT,PROJCS["TMIDACFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.6666667],PARAMETER["central_meridian",-114],PARAMETER["scale_factor",0.999947368],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMIDACFT83,PROJCS["TMIDACFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-113.9999999843275],PARAMETER["scale_factor",0.999947368],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMIDACM,PROJCS["TMIDACM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-113.9999999843275],PARAMETER["scale_factor",0.99994737],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMIDAEFT,PROJCS["TMIDAEFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.6666667],PARAMETER["central_meridian",-112.1666667],PARAMETER["scale_factor",0.999947368],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMIDAEFT83,PROJCS["TMIDAEFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-112.1666667056102],PARAMETER["scale_factor",0.999947368],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMIDAEM,PROJCS["TMIDAEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-112.1666666483144],PARAMETER["scale_factor",0.99994737],PARAMETER["false_easting",200000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMIDAWFT,PROJCS["TMIDAWFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.6666667],PARAMETER["central_meridian",-115.75],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMIDAWFT83,PROJCS["TMIDAWFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-115.7499999894899],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",2624666.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMIDAWM,PROJCS["TMIDAWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.66666668590062],PARAMETER["central_meridian",-115.7499999894899],PARAMETER["scale_factor",0.99993333],PARAMETER["false_easting",800000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMILLEFT,PROJCS["TMILLEFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.6666667],PARAMETER["central_meridian",-88.3333333],PARAMETER["scale_factor",0.999975],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMILLEFT83,PROJCS["TMILLEFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.66666672026184],PARAMETER["central_meridian",-88.33333328014425],PARAMETER["scale_factor",0.999975],PARAMETER["false_easting",984249.9998],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMILLEM,PROJCS["TMILLEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-88.33333333744002],PARAMETER["scale_factor",0.999975],PARAMETER["false_easting",300000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMILLWFT,PROJCS["TMILLWFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.6666667],PARAMETER["central_meridian",-90.16666669999996],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMILLWFT83,PROJCS["TMILLWFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.66666672026184],PARAMETER["central_meridian",-90.16666667345312],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",2296583.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMILLWM,PROJCS["TMILLWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.66666666296607],PARAMETER["central_meridian",-90.16666667345312],PARAMETER["scale_factor",0.99994118],PARAMETER["false_easting",700000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMIND114,PROJCS["TMIND114",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",114],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMINDEFT,PROJCS["TMINDEFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",37.5],PARAMETER["central_meridian",-85.66666669999996],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMINDEFT83,PROJCS["TMINDEFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",37.49999997147393],PARAMETER["central_meridian",-85.66666669291907],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",820208.3332],PARAMETER["false_northing",328083.3333],UNIT["US Foot",0.30480061]] +TMINDEM,PROJCS["TMINDEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",37.49999997147393],PARAMETER["central_meridian",-85.66666669291907],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",100000],PARAMETER["false_northing",250000],UNIT["unnamed",1]] +TMINDWFT,PROJCS["TMINDWFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",37.5],PARAMETER["central_meridian",-87.08333329999992],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMINDWFT83,PROJCS["TMINDWFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",37.49999997147393],PARAMETER["central_meridian",-87.08333331738244],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",820208.3332],PARAMETER["false_northing",2952749.999],UNIT["US Foot",0.30480061]] +TMINDWM,PROJCS["TMINDWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",37.49999997147393],PARAMETER["central_meridian",-87.08333331738244],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",900000],PARAMETER["false_northing",250000],UNIT["unnamed",1]] +TMIRAQ,PROJCS["TMIRAQ",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",29.0262683],PARAMETER["central_meridian",46.5],PARAMETER["scale_factor",0.9994],PARAMETER["false_easting",800000],PARAMETER["false_northing",0]] +TMIRAQC,PROJCS["TMIRAQC",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",43],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMISG541,PROJCS["TMISG541",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",139],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG542,PROJCS["TMISG542",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG543,PROJCS["TMISG543",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",143],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG551,PROJCS["TMISG551",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",145],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG552,PROJCS["TMISG552",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG553,PROJCS["TMISG553",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",149],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG561,PROJCS["TMISG561",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",151],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG562,PROJCS["TMISG562",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISG563,PROJCS["TMISG563",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",155],PARAMETER["scale_factor",0.99994],PARAMETER["false_easting",300000],PARAMETER["false_northing",5000000]] +TMISRAEL,PROJCS["TMISRAEL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.20451694444443],PARAMETER["central_meridian",57.29616339480504],PARAMETER["scale_factor",529.584],PARAMETER["false_easting",219],PARAMETER["false_northing",626907.39]] +TMJORDAN,PROJCS["TMJORDAN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31.7340969],PARAMETER["central_meridian",35.2120806],PARAMETER["scale_factor",1],PARAMETER["false_easting",170251.56],PARAMETER["false_northing",126867.91]] +TMKOREA,PROJCS["TMKOREA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",127],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMMAINEF,PROJCS["TMMAINEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",43.8333333],PARAMETER["central_meridian",-68.49999999999994],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMAINEF83,PROJCS["TMMAINEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",43.66988819611419],PARAMETER["central_meridian",-68.50000002199494],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",984249.9998],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMAINEM,PROJCS["TMMAINEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",43.66666666666665],PARAMETER["central_meridian",-68.49999999999994],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",300000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMMAINWF,PROJCS["TMMAINWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.8333333],PARAMETER["central_meridian",-70.16666669999996],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMAINWF83,PROJCS["TMMAINWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.83333331781164],PARAMETER["central_meridian",-70.16666669630645],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",2952749.999],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMAINWM,PROJCS["TMMAINWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.83333331781164],PARAMETER["central_meridian",-70.16666663901067],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",900000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMMCBO4,PROJCS["TMMCBO4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",10.6449092],PARAMETER["central_meridian",-71.60515809999998],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMMICHCF,PROJCS["TMMICHCF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.5],PARAMETER["central_meridian",-85.74999999999996],PARAMETER["scale_factor",0.999909091],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",0.304788967]] +TMMICHEF,PROJCS["TMMICHEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.5],PARAMETER["central_meridian",-83.66666669999994],PARAMETER["scale_factor",0.999942857],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",0.304788967]] +TMMICHWF,PROJCS["TMMICHWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.5],PARAMETER["central_meridian",-88.74999999999996],PARAMETER["scale_factor",0.999909091],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",0.304788967]] +TMMISOCF,PROJCS["TMMISOCF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.8333333],PARAMETER["central_meridian",-92.49999999999994],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISOCF83,PROJCS["TMMISOCF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.83333329716242],PARAMETER["central_meridian",-92.49999999457094],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISOCM,PROJCS["TMMISOCM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.8333333544582],PARAMETER["central_meridian",-92.49999999457094],PARAMETER["scale_factor",0.99993333],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMMISOEF,PROJCS["TMMISOEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.8333333],PARAMETER["central_meridian",-90.5],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISOEF83,PROJCS["TMMISOEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.83333329716242],PARAMETER["central_meridian",-90.49999999685626],PARAMETER["scale_factor",0.999933333],PARAMETER["false_easting",820208.3332],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISOEM,PROJCS["TMMISOEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35.8333333544582],PARAMETER["central_meridian",-90.49999999685626],PARAMETER["scale_factor",0.99993333],PARAMETER["false_easting",250000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMMISOWF,PROJCS["TMMISOWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.1666667],PARAMETER["central_meridian",-94.49999999999996],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISOWF83,PROJCS["TMMISOWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.16666667786134],PARAMETER["central_meridian",-94.49999999228559],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",2788708.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISOWM,PROJCS["TMMISOWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",36.16666667786134],PARAMETER["central_meridian",-94.49999999228559],PARAMETER["scale_factor",0.99994118],PARAMETER["false_easting",850000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMMISSEF,PROJCS["TMMISSEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",29.6666667],PARAMETER["central_meridian",-88.83333329999995],PARAMETER["scale_factor",0.99996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISSEF83,PROJCS["TMMISSEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",29.49999998061527],PARAMETER["central_meridian",-88.83333332254475],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",984249.9998],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISSEM,PROJCS["TMMISSEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",29.5],PARAMETER["central_meridian",-88.83333329999995],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",300000],PARAMETER["false_northing",0]] +TMMISSWF,PROJCS["TMMISSWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.5],PARAMETER["central_meridian",-90.33333329999995],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISSWF83,PROJCS["TMMISSWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",29.49999998061527],PARAMETER["central_meridian",-90.33333327785891],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",2296583.333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMMISSWM,PROJCS["TMMISSWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",29.5],PARAMETER["central_meridian",-90.33333329999995],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",700000],PARAMETER["false_northing",0]] +TMMON087,PROJCS["TMMON087",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",86.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",15500000],PARAMETER["false_northing",0]] +TMMON093,PROJCS["TMMON093",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",1],PARAMETER["false_easting",16500000],PARAMETER["false_northing",0]] +TMMON099,PROJCS["TMMON099",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",98.99999999999997],PARAMETER["scale_factor",1],PARAMETER["false_easting",17500000],PARAMETER["false_northing",0]] +TMMON105,PROJCS["TMMON105",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",1],PARAMETER["false_easting",18500000],PARAMETER["false_northing",0]] +TMMON111,PROJCS["TMMON111",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",1],PARAMETER["false_easting",19500000],PARAMETER["false_northing",0]] +TMMON117,PROJCS["TMMON117",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",1],PARAMETER["false_easting",20500000],PARAMETER["false_northing",0]] +TMMRD,PROJCS["TMMRD",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMNAMIBIAM13,PROJCS["TMNAMIBIAM13",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-21.99999997486133],PARAMETER["central_meridian",12.99999995649744],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMNAMIBIAM17,PROJCS["TMNAMIBIAM17",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-21.99999997486133],PARAMETER["central_meridian",16.99999995192677],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMNAMIBIAM19,PROJCS["TMNAMIBIAM19",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-21.99999997486133],PARAMETER["central_meridian",18.99999994964144],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMNEVACF,PROJCS["TMNEVACF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.75],PARAMETER["central_meridian",-116.6666667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEVACF83,PROJCS["TMNEVACF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.74999999610218],PARAMETER["central_meridian",-116.6666666861443],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",19685000],UNIT["US Foot",0.30480061]] +TMNEVACM,PROJCS["TMNEVACM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.75],PARAMETER["central_meridian",-116.6666667],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",6000000]] +TMNEVAEF,PROJCS["TMNEVAEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.75],PARAMETER["central_meridian",-115.5833333],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEVAEF83,PROJCS["TMNEVAEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.74999999610218],PARAMETER["central_meridian",-115.5833333277883],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",656166.6665],PARAMETER["false_northing",26246666.66],UNIT["US Foot",0.30480061]] +TMNEVAEM,PROJCS["TMNEVAEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.75],PARAMETER["central_meridian",-115.5833333],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",200000],PARAMETER["false_northing",8000000]] +TMNEVAWF,PROJCS["TMNEVAWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.75],PARAMETER["central_meridian",-118.5833333],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEVAWF83,PROJCS["TMNEVAWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.74999999610218],PARAMETER["central_meridian",-118.5833332957124],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",2624666.666],PARAMETER["false_northing",13123333.33],UNIT["US Foot",0.30480061]] +TMNEVAWM,PROJCS["TMNEVAWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",34.75],PARAMETER["central_meridian",-118.5833333],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",800000],PARAMETER["false_northing",4000000]] +TMNEWHFT,PROJCS["TMNEWHFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.5],PARAMETER["central_meridian",-71.66666669999998],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWHFT83,PROJCS["TMNEWHFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.4999999944085],PARAMETER["central_meridian",-71.6666667089164],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",984249.9998],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWHM,PROJCS["TMNEWHM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.4999999944085],PARAMETER["central_meridian",-71.66666665162062],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",300000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNEWJFT,PROJCS["TMNEWJFT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.8333333],PARAMETER["central_meridian",-74.66666669999998],PARAMETER["scale_factor",0.999975],PARAMETER["false_easting",2000000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWJFT83,PROJCS["TMNEWJFT83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.83333332238231],PARAMETER["central_meridian",-74.49999999999996],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",492124.9999],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWJM,PROJCS["TMNEWJM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.83333332238231],PARAMETER["central_meridian",-74.50000001513894],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",150000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNEWMCF,PROJCS["TMNEWMCF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-106.25],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWMCF83,PROJCS["TMNEWMCF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.99999999322522],PARAMETER["central_meridian",-106.2499999860212],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWMCM,PROJCS["TMNEWMCM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.99999999322522],PARAMETER["central_meridian",-106.2499999860212],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNEWMEF,PROJCS["TMNEWMEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-104.3333333],PARAMETER["scale_factor",0.999909091],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWMEF83,PROJCS["TMNEWMEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.99999999322522],PARAMETER["central_meridian",-104.3333333191573],PARAMETER["scale_factor",0.999909091],PARAMETER["false_easting",541337.4999],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWMEM,PROJCS["TMNEWMEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-104.3333333],PARAMETER["scale_factor",0.999909091],PARAMETER["false_easting",165000],PARAMETER["false_northing",0]] +TMNEWMWF,PROJCS["TMNEWMWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",31],PARAMETER["central_meridian",-107.8333333],PARAMETER["scale_factor",0.999916667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWMWF83,PROJCS["TMNEWMWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.99999999322522],PARAMETER["central_meridian",-107.8333332721862],PARAMETER["scale_factor",0.999916667],PARAMETER["false_easting",2723091.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWMWM,PROJCS["TMNEWMWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",30.99999999322522],PARAMETER["central_meridian",-107.833333329482],PARAMETER["scale_factor",0.999916667],PARAMETER["false_easting",830000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNEWYCF,PROJCS["TMNEWYCF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",-76.58333329999996],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWYCF83,PROJCS["TMNEWYCF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.0000000115891],PARAMETER["central_meridian",-76.5833332864086],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",820208.3332],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWYCM,PROJCS["TMNEWYCM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.0000000115891],PARAMETER["central_meridian",-76.5833333437044],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",250000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNEWYEF,PROJCS["TMNEWYEF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",-74.33333329999998],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWYEF83,PROJCS["TMNEWYEF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.83333332238231],PARAMETER["central_meridian",-74.4999999979502],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",492124.9999],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWYEM,PROJCS["TMNEWYEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",38.83333332238231],PARAMETER["central_meridian",-74.50000001513894],PARAMETER["scale_factor",0.999966667],PARAMETER["false_easting",150000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNEWYWF,PROJCS["TMNEWYWF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40],PARAMETER["central_meridian",-78.58333329999994],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWYWF83,PROJCS["TMNEWYWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.0000000115891],PARAMETER["central_meridian",-78.58333328412327],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",1148291.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMNEWYWM,PROJCS["TMNEWYWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.0000000115891],PARAMETER["central_meridian",-78.58333334141905],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",350000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMNIGE,PROJCS["TMNIGE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4],PARAMETER["central_meridian",12.5],PARAMETER["scale_factor",0.99975],PARAMETER["false_easting",1110369.7],PARAMETER["false_northing",0]] +TMNIGM,PROJCS["TMNIGM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4],PARAMETER["central_meridian",8.499999999999998],PARAMETER["scale_factor",0.99975],PARAMETER["false_easting",670553.98],PARAMETER["false_northing",0]] +TMNIGW,PROJCS["TMNIGW",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",4],PARAMETER["central_meridian",4.499999999999997],PARAMETER["scale_factor",0.99975],PARAMETER["false_easting",230738.26],PARAMETER["false_northing",0]] +TMNORAND,PROJCS["TMNORAND",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-79.5],PARAMETER["scale_factor",0.999861],PARAMETER["false_easting",304800],PARAMETER["false_northing",0]] +TMNSEA,PROJCS["TMNSEA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMNYEMEN,PROJCS["TMNYEMEN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",42],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMNZAMUR,PROJCS["TMNZAMUR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-42.68888888888888],PARAMETER["central_meridian",173.01],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZBLUF,PROJCS["TMNZBLUF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-46.60000000000002],PARAMETER["central_meridian",168.342777777778],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZBULL,PROJCS["TMNZBULL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.81055555555552],PARAMETER["central_meridian",171.5811111111108],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZBYPL,PROJCS["TMNZBYPL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-37.76111111111108],PARAMETER["central_meridian",176.4661111111112],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZCOLL,PROJCS["TMNZCOLL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-40.71472222222221],PARAMETER["central_meridian",172.6719444444446],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZGAWL,PROJCS["TMNZGAWL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.74861111111112],PARAMETER["central_meridian",171.3605555555558],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZGREY,PROJCS["TMNZGREY",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-42.33361111111111],PARAMETER["central_meridian",171.5497222222221],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZHAWK,PROJCS["TMNZHAWK",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-39.65083333333333],PARAMETER["central_meridian",176.6736111111113],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZHOKI,PROJCS["TMNZHOKI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-42.88611111111113],PARAMETER["central_meridian",170.979722222222],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZJACK,PROJCS["TMNZJACK",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.97777777777775],PARAMETER["central_meridian",168.6061111111109],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZKARA,PROJCS["TMNZKARA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.28972222222218],PARAMETER["central_meridian",172.1088888888891],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZLIND,PROJCS["TMNZLIND",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-44.73499999999998],PARAMETER["central_meridian",169.4675],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZMARL,PROJCS["TMNZMARL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.54444444444444],PARAMETER["central_meridian",173.8019444444443],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZMTED,PROJCS["TMNZMTED",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-36.87972222222223],PARAMETER["central_meridian",174.7641666666668],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZMTNI,PROJCS["TMNZMTNI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.13277777777778],PARAMETER["central_meridian",168.3986111111113],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZMTPL,PROJCS["TMNZMTPL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.59055555555558],PARAMETER["central_meridian",172.7269444444442],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZMTYO,PROJCS["TMNZMTYO",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.56361111111111],PARAMETER["central_meridian",167.7386111111109],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZNELS,PROJCS["TMNZNELS",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.27444444444441],PARAMETER["central_meridian",173.2991666666666],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZNI,PROJCS["TMNZNI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-38.99999999999996],PARAMETER["central_meridian",175.5],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",400000],UNIT["unnamed",0.914398415]] +TMNZNTAI,PROJCS["TMNZNTAI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.86138888888888],PARAMETER["central_meridian",170.2824999999997],PARAMETER["scale_factor",0.99996],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZOBSE,PROJCS["TMNZOBSE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-45.81611111111108],PARAMETER["central_meridian",170.6283333333334],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZOKAR,PROJCS["TMNZOKAR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-43.11000000000002],PARAMETER["central_meridian",170.2608333333334],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZPOVE,PROJCS["TMNZPOVE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-38.62444444444446],PARAMETER["central_meridian",177.8855555555558],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZSI,PROJCS["TMNZSI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-44],PARAMETER["central_meridian",171.5],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000],UNIT["unnamed",0.914398415]] +TMNZTARA,PROJCS["TMNZTARA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-39.13555555555555],PARAMETER["central_meridian",174.2277777777776],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZTIMA,PROJCS["TMNZTIMA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-44.40194444444444],PARAMETER["central_meridian",171.0572222222222],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZTUHI,PROJCS["TMNZTUHI",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-39.51222222222219],PARAMETER["central_meridian",175.64],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZWAIR,PROJCS["TMNZWAIR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-40.92527777777774],PARAMETER["central_meridian",175.6472222222223],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZWANG,PROJCS["TMNZWANG",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-40.24194444444444],PARAMETER["central_meridian",175.4880555555555],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMNZWELL,PROJCS["TMNZWELL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.30111111111108],PARAMETER["central_meridian",174.7763888888886],PARAMETER["scale_factor",1],PARAMETER["false_easting",400000],PARAMETER["false_northing",800000],UNIT["unnamed",1]] +TMOGADEN,PROJCS["TMOGADEN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",43],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMOMAN,PROJCS["TMOMAN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",54],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMOSGB,PROJCS["TMOSGB",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",49],PARAMETER["central_meridian",-1.999999999999966],PARAMETER["scale_factor",0.999601272],PARAMETER["false_easting",400000],PARAMETER["false_northing",-100000]] +TMOSIRL,PROJCS["TMOSIRL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",53.5],PARAMETER["central_meridian",-7.999999999999978],PARAMETER["scale_factor",1.000035],PARAMETER["false_easting",200000],PARAMETER["false_northing",250000]] +TMPARAG1,PROJCS["TMPARAG1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-62.99999999999995],PARAMETER["scale_factor",1],PARAMETER["false_easting",4500000],PARAMETER["false_northing",10002288.3]] +TMPARAG2,PROJCS["TMPARAG2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-59.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",5500000],PARAMETER["false_northing",10002288.3]] +TMPARAG3,PROJCS["TMPARAG3",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-56.99999999999994],PARAMETER["scale_factor",1],PARAMETER["false_easting",6500000],PARAMETER["false_northing",10002288.3]] +TMPARAG4,PROJCS["TMPARAG4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-54],PARAMETER["scale_factor",1],PARAMETER["false_easting",7500000],PARAMETER["false_northing",10002288.3]] +TMPERUBE,PROJCS["TMPERUBE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-4.670833299999996],PARAMETER["central_meridian",-81.33497219999994],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMPERUC,PROJCS["TMPERUC",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-9.49999999999998],PARAMETER["central_meridian",-75.99999999999996],PARAMETER["scale_factor",0.99933],PARAMETER["false_easting",720000],PARAMETER["false_northing",1039979.16]] +TMPERUE,PROJCS["TMPERUE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-9.49999999999998],PARAMETER["central_meridian",-70.49999999999996],PARAMETER["scale_factor",0.9995299],PARAMETER["false_easting",1324000],PARAMETER["false_northing",1040084.56]] +TMPERUW,PROJCS["TMPERUW",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-5.999999999999955],PARAMETER["central_meridian",-80.49999999999996],PARAMETER["scale_factor",0.9998301],PARAMETER["false_easting",222000],PARAMETER["false_northing",1426834.74]] +TMPHIL1,PROJCS["TMPHIL1",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMPHIL2,PROJCS["TMPHIL2",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",119],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMPHIL3,PROJCS["TMPHIL3",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",121],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMPHIL4,PROJCS["TMPHIL4",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMPHIL5,PROJCS["TMPHIL5",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",125],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMPHIL6,PROJCS["TMPHIL6",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",127],PARAMETER["scale_factor",0.99995],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMPNG55S,PROJCS["TMPNG55S",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMPOLAND,PROJCS["TMPOLAND",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",18],PARAMETER["scale_factor",0.999923],PARAMETER["false_easting",6500000],PARAMETER["false_northing",0]] +TMPORT,PROJCS["TMPORT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",39.66666666666666],PARAMETER["central_meridian",-8.13190611111111],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",300000]] +TMPORTL,PROJCS["TMPORTL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",39.66666666666666],PARAMETER["central_meridian",1],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",300000]] +TMPORT_SHG73,PROJCS["TMPORT_SHG73",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",39.66666666666666],PARAMETER["central_meridian",-8.13190611111111],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMQATAR,PROJCS["TMQATAR",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",24.45],PARAMETER["central_meridian",51.2166666],PARAMETER["scale_factor",1],PARAMETER["false_easting",200000],PARAMETER["false_northing",300000]] +TMRHODIF,PROJCS["TMRHODIF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.0833333],PARAMETER["central_meridian",-71.49999999999996],PARAMETER["scale_factor",0.9999938],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMRHODIF83,PROJCS["TMRHODIF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.08333331264934],PARAMETER["central_meridian",-71.49999998991905],PARAMETER["scale_factor",0.99999375],PARAMETER["false_easting",328083.3333],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMRHODIM,PROJCS["TMRHODIM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",41.0833333],PARAMETER["central_meridian",-71.49999999999996],PARAMETER["scale_factor",0.99999375],PARAMETER["false_easting",100000],PARAMETER["false_northing",0]] +TMRT90,PROJCS["TMRT90",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15.80827799022624],PARAMETER["scale_factor",1],PARAMETER["false_easting",1500000],PARAMETER["false_northing",0]] +TMS114E,PROJCS["TMS114E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",114],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMS116E,PROJCS["TMS116E",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",116],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMSAM19S,PROJCS["TMSAM19S",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-69.00000000709966],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMSAM20S,PROJCS["TMSAM20S",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-19.99999998860582],PARAMETER["central_meridian",-59.99999998873577],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMSAMER,PROJCS["TMSAMER",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-59.99999999999994],PARAMETER["scale_factor",0.99],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMSAMERA,PROJCS["TMSAMERA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-54],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +TMSHABWA,PROJCS["TMSHABWA",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",45],PARAMETER["scale_factor",1],PARAMETER["false_easting",8500000],PARAMETER["false_northing",0]] +TMSHK167,PROJCS["TMSHK167",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",167],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMSHLCNS,PROJCS["TMSHLCNS",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",0],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMSHLHOL,PROJCS["TMSHLHOL",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",5],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMSHLYEM,PROJCS["TMSHLYEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",42],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMSLO,PROJCS["TMSLO",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",500000],PARAMETER["false_northing",-5000000]] +TMSUDAN,PROJCS["TMSUDAN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",30],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMSURNAM,PROJCS["TMSURNAM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-54],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMSVIET,PROJCS["TMSVIET",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",106],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMSVNM,PROJCS["TMSVNM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",106],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMTIBU,PROJCS["TMTIBU",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",8.3847502],PARAMETER["central_meridian",-72.42263859999996],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",500000]] +TMTRUCST,PROJCS["TMTRUCST",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",55],PARAMETER["scale_factor",1],PARAMETER["false_easting",1200000],PARAMETER["false_northing",0]] +TMTUNIS,PROJCS["TMTUNIS",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",11],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMTURK,PROJCS["TMTURK",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",33],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMTYRRE,PROJCS["TMTYRRE",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",14],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMTYRRW,PROJCS["TMTYRRW",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",11],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMUNZ170,PROJCS["TMUNZ170",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",170],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMVERMTF,PROJCS["TMVERMTF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.5],PARAMETER["central_meridian",-72.5],PARAMETER["scale_factor",0.999964286],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMVERMTF83,PROJCS["TMVERMTF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.4999999944085],PARAMETER["central_meridian",-72.50000001742427],PARAMETER["scale_factor",0.999964286],PARAMETER["false_easting",1640416.666],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMVERMTM,PROJCS["TMVERMTM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",42.4999999944085],PARAMETER["central_meridian",-72.50000001742427],PARAMETER["scale_factor",0.999964286],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["unnamed",1]] +TMVICMAP,PROJCS["TMVICMAP",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",145],PARAMETER["scale_factor",1],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]] +TMVIETS,PROJCS["TMVIETS",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",106],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMWTOECF83,LOCAL_CS["TMWTOECF83 - (unsupported)"] +TMWYO1FT,PROJCS["TMWYO1FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.6666667],PARAMETER["central_meridian",-105.1666667],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMWYO2FT,PROJCS["TMWYO2FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.6666667],PARAMETER["central_meridian",-107.3333333],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMWYO3FT,PROJCS["TMWYO3FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.6666667],PARAMETER["central_meridian",-108.75],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMWYO4FT,PROJCS["TMWYO4FT",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.6666667],PARAMETER["central_meridian",-110.0833333],PARAMETER["scale_factor",0.999941177],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMWYOE83,LOCAL_CS["TMWYOE83 - (unsupported)"] +TMWYOECM,PROJCS["TMWYOECM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.5],PARAMETER["central_meridian",-107.3333333],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",400000],PARAMETER["false_northing",100000]] +TMWYOEM,PROJCS["TMWYOEM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.5],PARAMETER["central_meridian",-105.1666667],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",200000],PARAMETER["false_northing",0]] +TMWYOWCF83,PROJCS["TMWYOWCF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.49999999669383],PARAMETER["central_meridian",-108.7500000261364],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",1968500],PARAMETER["false_northing",0],UNIT["US Foot",0.30480061]] +TMWYOWCM,PROJCS["TMWYOWCM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.5],PARAMETER["central_meridian",-108.75],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",600000],PARAMETER["false_northing",0]] +TMWYOWF83,PROJCS["TMWYOWF83",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.49999999669383],PARAMETER["central_meridian",-110.083333319749],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",2624666.666],PARAMETER["false_northing",328083.3333],UNIT["US Foot",0.30480061]] +TMWYOWM,PROJCS["TMWYOWM",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",40.49999999669383],PARAMETER["central_meridian",-110.083333319749],PARAMETER["scale_factor",0.9999375],PARAMETER["false_easting",800000],PARAMETER["false_northing",100000],UNIT["unnamed",1]] +TMYEMEN,PROJCS["TMYEMEN",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",42],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]] +TMYUG5,PROJCS["TMYUG5",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",1],PARAMETER["false_easting",5500000],PARAMETER["false_northing",0]] +TMYUG5SF,PROJCS["TMYUG5SF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",5500000],PARAMETER["false_northing",0]] +TMYUG6,PROJCS["TMYUG6",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",18],PARAMETER["scale_factor",1],PARAMETER["false_easting",6500000],PARAMETER["false_northing",0]] +TMYUG6SF,PROJCS["TMYUG6SF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",18],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",6500000],PARAMETER["false_northing",0]] +TMYUG7,PROJCS["TMYUG7",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",21],PARAMETER["scale_factor",1],PARAMETER["false_easting",7500000],PARAMETER["false_northing",0]] +TMYUG7SF,PROJCS["TMYUG7SF",PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",21],PARAMETER["scale_factor",0.9999],PARAMETER["false_easting",7500000],PARAMETER["false_northing",0]] +VG120E,PROJCS["VG120E",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",120],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG120W,PROJCS["VG120W",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",-120],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG150E,PROJCS["VG150E",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",150],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG150W,PROJCS["VG150W",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",-150],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG180E,PROJCS["VG180E",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",180],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG30E,PROJCS["VG30E",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",30],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG30W,PROJCS["VG30W",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",-29.99999999999995],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG60E,PROJCS["VG60E",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",60],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG60W,PROJCS["VG60W",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",-59.99999999999994],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG90E,PROJCS["VG90E",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",90],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG90EAST,PROJCS["VG90EAST",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",90],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG90W,PROJCS["VG90W",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VG90WEST,PROJCS["VG90WEST",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",-89.99999999999994],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VGSPHERE,PROJCS["VGSPHERE",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0]] +VGWORLD,PROJCS["VGWORLD",PROJECTION["VanDerGrinten"],PARAMETER["central_meridian",0],PARAMETER["false_easting",20000000],PARAMETER["false_northing",20000000]] +W3SPHERE,LOCAL_CS["W3SPHERE - (unsupported)"] +ACCRA,GEOGCS["ACCRA",DATUM["ACCRA",SPHEROID["WAROFFFT",20926201,296]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ADINDAN,GEOGCS["ADINDAN",DATUM["ADINDAN",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +AGD66,GEOGCS["AUSTRALIAN GEODETIC",DATUM["AGD66",SPHEROID["ANS",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +AGD66NTV,GEOGCS["AUSTRALIAN GEODETIC",DATUM["AGD66NTV",SPHEROID["ANS",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +AGD84,GEOGCS["AUSTRALIAN GEODETIC",DATUM["AGD84",SPHEROID["ANS",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +AINABD70,GEOGCS["AIN EL ABD (1970)",DATUM["AINABD70",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ARATU,GEOGCS["ARATU",DATUM["ARATU",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ARC1950,GEOGCS["NEW ARC 1950",DATUM["ARC1950",SPHEROID["CLA80RSA",6378249.145,293.4663077]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ARC1960,GEOGCS["NEW ARC 1960",DATUM["ARC1960",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BAHRAIN,GEOGCS["BAHRAIN (AIN EL ABD)",DATUM["BAHRAIN",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BATAVIA,GEOGCS["BATAVIA(JAKARTA)",DATUM["BATAVIA",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BEDUARAM,GEOGCS["BEDUARAM",DATUM["BEDUARAM",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BEIJING,GEOGCS["BEIJING 1954",DATUM["BEIJING",SPHEROID["KRAS1940",6378245,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BELG50,GEOGCS["BELGIUM 1950",DATUM["BELG50",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BERNNEW,GEOGCS["BERN",DATUM["BERNNEW",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +BOGOTA,GEOGCS["BOGOTA",DATUM["BOGOTA",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CAMACUPA,GEOGCS["CAMACUPA",DATUM["CAMACUPA",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CAPE,GEOGCS["CAPE DATUM",DATUM["CAPE",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CARTHAGE,GEOGCS["CARTHAGE",DATUM["CARTHAGE",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CHUA,GEOGCS["CHUA ASTRONOMIC",DATUM["CHUA",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CLRK1866,GEOGCS["NORTH AMERICAN 1927",DATUM["CLRK1866",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CMPOINCH,GEOGCS["CAMPO INCHAUSPE",DATUM["CMPOINCH",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +CORRALEG,GEOGCS["CORREGO ALEGRE",DATUM["CORRALEG",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +DEIR,GEOGCS["DEIR EZ ZOR",DATUM["DEIR",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ED50,GEOGCS["EUROPEAN DATUM 1950",DATUM["ED50",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ED50EGYT,GEOGCS["EUROPEAN DATUM 1950",DATUM["ED50EGYT",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ED50SVAL,GEOGCS["ED50 (SVALBARD)",DATUM["ED50SVAL",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +EDMCE75,GEOGCS["EUROPEAN [ED(MCE)75]",DATUM["EDMCE75",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +EGSA87,GEOGCS["GREEK DATUM (1989)",DATUM["EGSA87",SPHEROID["GRS80",6378137,298.2572236]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +EGYPT07,GEOGCS["EGYPT 1907",DATUM["EGYPT07",SPHEROID["HELM1906",6378200,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +EGYPT24,GEOGCS["NEW EGYPT 1930",DATUM["EGYPT24",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +EVVIETNM,GEOGCS["EVEREST-VIETNAM",DATUM["EVVIETNM",SPHEROID["EV37ADJ",6377276.345,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +FAHUD,GEOGCS["FAHUD",DATUM["FAHUD",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +FINKKJ,GEOGCS["FINKKJ (Finland)",DATUM["FINKKJ",SPHEROID["HAYF1910",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +GDA94,GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +GDA2020,GEOGCS["GDA2020",DATUM["Geocentric_Datum_of_Australia_2020",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","7844"]] +GEM6,GEOGCS["GEM6",DATUM["GEM6",SPHEROID[,6378144,298.257]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +HGRS87,GEOGCS["GREEK DATUM (1989)",DATUM["HGRS87",SPHEROID["GRS80",6378137,298.2572236]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +HK80,GEOGCS["HONG KONG 1980",DATUM["HK80",SPHEROID["HAYF1910",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +IND74,GEOGCS["INDONESIAN 1974",DATUM["IND74",SPHEROID["INDNAT",6378160,298.247]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +INDIAN54,GEOGCS["INDIAN 1954",DATUM["INDIAN54",SPHEROID["EV37ADJ",6377276.345,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +INDIAN60,GEOGCS["INDIAN 1960",DATUM["INDIAN60",SPHEROID["EVERST1830",6377276.345,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +INDIAN75,GEOGCS["INDIAN 1975",DATUM["INDIAN75",SPHEROID["EV37ADJ",6377276.345,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +ISRLURIM,GEOGCS["ISRAEL URIM",DATUM["ISRLURIM",SPHEROID["CLA80BEN",6378300.79,293.4663696]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +JA1875,GEOGCS["JAMAICA 1875",DATUM["JA1875",SPHEROID["CLA80IFT",20926202,293.4663077]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +JAD69,GEOGCS["JAMAICA 1969",DATUM["JAD69",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +KKJ,GEOGCS["KKJ (Finland)",DATUM["KKJ",SPHEROID["HAYF1910",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +KALIANPR,GEOGCS["KALIANPUR",DATUM["KALIANPR",SPHEROID["EVINDMTR",6377301.243,300.8017255]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +KARBALA,GEOGCS["KARBALA",DATUM["KARBALA",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +KERTAU,GEOGCS["KERTAU",DATUM["KERTAU",SPHEROID["EVMODMAL",6377304.063,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +KOC,GEOGCS["KUWAIT OIL COMPANY",DATUM["KOC",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +KOREA,GEOGCS["KOREA TM",DATUM["KOREA",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +LACANOA,GEOGCS["LA CANOA",DATUM["LACANOA",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +LEIGON,GEOGCS["LEIGON",DATUM["LEIGON",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +LISBOA,GEOGCS["LISBOA (LISBON)",DATUM["LISBOA",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +LISBON,GEOGCS["LISBON (LISBOA)",DATUM["LISBON",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +LISBONBESSEL,GEOGCS["LISBON (LISBOA)BESSEL",DATUM["LISBONBESSEL",SPHEROID["BESSELPORT",6377397.155,297.15281285]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +LUZON11,GEOGCS["LUZON 1911",DATUM["LUZON11",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MAHE71,GEOGCS["MAHE 1971",DATUM["MAHE71",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MAKASSAR,GEOGCS["MAKASSAR",DATUM["MAKASSAR",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MALONG79,GEOGCS["MALONGO 1979",DATUM["MALONG79",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MALONG90,GEOGCS["MALONGO 1990",DATUM["MALONG90",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MANOKA,GEOGCS["MANOKA",DATUM["MANOKA",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MELRICA,GEOGCS["MELRICA (PORTUGAL)",DATUM["MELRICA",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MGIBESS,GEOGCS["MGIBESS",DATUM["MGIBESS",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MINAA,GEOGCS["MINAA",DATUM["MINAA",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MONTEMAR,GEOGCS["MONTE MARIO",DATUM["MONTEMAR",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +MONTROME,GEOGCS["MONTE MARIO",DATUM["MONTROME",SPHEROID["INT24",6378388,297]],PRIMEM["Rome",12.45233333333333],UNIT["degree",0.0174532925199433]] +MPORO,GEOGCS["M'PORALOKO",DATUM["MPORO",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAD27,GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.978698213898,AUTHORITY["EPSG","7008"]],TOWGS84[-3,142,183,0,0,0,0],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9108"]],AXIS["Lat",NORTH],AXIS["Long",EAST],AUTHORITY["EPSG","4267"]] +NAD27A74,GEOGCS["NORTH AMERICAN 1927 (Adjusted 1974)",DATUM["NAD27A74",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAD27A76,GEOGCS["NORTH AMERICAN 1927 (Adjusted 1976)",DATUM["NAD27A76",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAD27AFT,GEOGCS["NORTH AMERICAN 1927",DATUM["NAD27AFT",SPHEROID["CLA66AFT",20925832.16,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAD27MOD,GEOGCS["NORTH AMERICAN 1927",DATUM["NAD27MOD",SPHEROID["CLA66MOD",20926631.53,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAD27MTR,GEOGCS["NORTH AMERICAN 1927",DATUM["NAD27MTR",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAD83,GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9108"]],AXIS["Lat",NORTH],AXIS["Long",EAST],AUTHORITY["EPSG","4269"]] +NAHRWAN,GEOGCS["NAHRWAN 1967",DATUM["NAHRWAN",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NAMIBIA,GEOGCS["NAMIBIA",DATUM["NAMIBIA",SPHEROID["BESS1841",6377483.865,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NTF,GEOGCS["N.T.F.",DATUM["NTF",SPHEROID["CLA80IGN",6378249.2,293.4660213],TOWGS84[-168,-60,320,0,0,0,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NTFPARG,GEOGCS["N.T.F",DATUM["NTFPARG",SPHEROID["CLA80IGN",6378249.2,293.4660213],TOWGS84[-168,-60,320,0,0,0,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NTFPARIS,GEOGCS["N.T.F",DATUM["NTFPARIS",SPHEROID["CLA80IGN",6378249.2,293.4660213],TOWGS84[-168,-60,320,0,0,0,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NWL9D,GEOGCS["NWL-9D",DATUM["NWL9D",SPHEROID["NWL9D",6378145,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NZGD49,GEOGCS["NEW ZEALAND 1949",DATUM["NZGD49",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +NZGD2000,GEOGCS["NEW ZEALAND 2000",DATUM["NZGD2000",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +OSGB36,GEOGCS["ORDNANCE SURVEY 1936",DATUM["OSGB36",SPHEROID["AIRY",6377563.396,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +OSGB70,GEOGCS["OSGB 1970 (SN)",DATUM["OSGB70",SPHEROID["AIRY",6377563.396,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +OSSN80,GEOGCS["OS (SN) 1980",DATUM["OSSN80",SPHEROID["AIRY",6377563.396,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PADANG,GEOGCS["PADANG 1884",DATUM["PADANG",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PALEST23,GEOGCS["PALESTINE 1923",DATUM["PALEST23",SPHEROID["CLA80BEN",6378300.79,293.4663696]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PLESSIS,GEOGCS["FRANCE 1822",DATUM["PLESSIS",SPHEROID["PLES1822",6376523,308.64]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +POTSDAM,GEOGCS["POTSDAM",DATUM["POTSDAM",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PRS92,GEOGCS["PHILIPPINES REFERENCE SYSTEM 1992",DATUM["PRS92",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PSAD56,GEOGCS["PSAD 1956",DATUM["PSAD56",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PTNOIRE,GEOGCS["POINT NOIRE (ASTRO)",DATUM["PTNOIRE",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PULKOVO,GEOGCS["PULKOVO 1942",DATUM["PULKOVO",SPHEROID["KRAS1940",6378245,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +QATAR,GEOGCS["QATAR",DATUM["QATAR",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +QATAR51,GEOGCS["QATAR GRID 1948",DATUM["QATAR51",SPHEROID["HELM1906",6378200,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +QORNOQ,GEOGCS["QORNOQ",DATUM["QORNOQ",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +RD,GEOGCS["RIJKDRIEHOEKSMETING",DATUM["RD",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +RGF93,GEOGCS["RESEAU GEODESIQUE FRANCAIS 1993",DATUM["RESEAU GEODESIQUE FRANCAIS 1993",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG024"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGF93G"]] +SAD69,GEOGCS["SOUTH AMERICAN 1969",DATUM["SAD69",SPHEROID["INT67",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +SECLV,GEOGCS["QASCO",DATUM["SECLV",SPHEROID["ANS",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +SLOVENIA,GEOGCS["SLOVENIAN DATUM",DATUM["SLOVENIA",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +SPHERE,GEOGCS["NOT SPECIFIED",DATUM["SPHERE",SPHEROID["SPHERE",6371000,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +SPHERE2,GEOGCS["NOT SPECIFIED",DATUM["SPHERE2",SPHEROID["SPHERE",6370997,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +SUDAN,GEOGCS["SUDAN DATUM",DATUM["SUDAN",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TANANAR,GEOGCS["TANANARIVE 1925",DATUM["TANANAR",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TANANPAR,GEOGCS["TANANARIVE 1925",DATUM["TANANPAR",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TIMBALAI,GEOGCS["TIMBALAI",DATUM["TIMBALAI",SPHEROID["EVERST67",6377298.556,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TIMBALFT,GEOGCS["TIMBALAI",DATUM["TIMBALFT",SPHEROID["EVIMPFT",20922931.8,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TM65,GEOGCS["TM65",DATUM["TM65",SPHEROID["AIRYMOD",6377340.189,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TM75,GEOGCS["TM75",DATUM["TM75",SPHEROID["AIRYMOD",6377340.189,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TOKYO,GEOGCS["TOKYO",DATUM["TOKYO",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +TRUCIAL,GEOGCS["TRUCIAL COAST 1948",DATUM["TRUCIAL",SPHEROID["HELM1906",6378200,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USAIRMOD,GEOGCS["NOT SPECIFIED",DATUM["USAIRMOD",SPHEROID["AIRYMOD",6377340.189,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USAIRY,GEOGCS["NOT SPECIFIED",DATUM["USAIRY",SPHEROID["AIRY",6377563.396,299.3249646]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USANS,GEOGCS["NOT SPECIFIED",DATUM["USANS",SPHEROID["ANS",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USBESMOD,GEOGCS["NOT SPECIFIED",DATUM["USBESMOD",SPHEROID["BESSMOD",6377492.018,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USBESS,GEOGCS["NOT SPECIFIED",DATUM["USBESS",SPHEROID["BESS1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USC58MTR,GEOGCS["NOT SPECIFIED",DATUM["USC58MTR",SPHEROID["CLA58MTR",6378293.645,294.2606764]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USC66AFT,GEOGCS["NOT SPECIFIED",DATUM["USC66AFT",SPHEROID["CLA66AFT",20925832.16,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USC66MTR,GEOGCS["NOT SPECIFIED",DATUM["USC66MTR",SPHEROID["CLA66MTR",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USC80IGN,GEOGCS["NOT SPECIFIED",DATUM["USC80IGN",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USC80MOD,GEOGCS["NOT SPECIFIED",DATUM["USC80MOD",SPHEROID["CLA80MOD",6378249.145,293.465]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USC80RSA,GEOGCS["NOT SPECIFIED",DATUM["USC80RSA",SPHEROID["CLA80RSA",6378249.145,293.4663077]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USEV37AD,GEOGCS["NOT SPECIFIED",DATUM["USEV37AD",SPHEROID["EV37ADJ",6377276.345,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USEV67,GEOGCS["NOT SPECIFIED",DATUM["USEV67",SPHEROID["EVERST67",6377298.556,300.8017]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USGRS67,GEOGCS["NOT SPECIFIED",DATUM["USGRS67",SPHEROID["GRS67",6378160,298.2471674]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USGRS80,GEOGCS["NOT SPECIFIED",DATUM["USGRS80",SPHEROID["GRS80",6378137,298.2572221]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USHAYF10,GEOGCS["NOT SPECIFIED",DATUM["USHAYF10",SPHEROID["HAYF1910",6378388,296.9592625]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USHELM,GEOGCS["NOT SPECIFIED",DATUM["USHELM",SPHEROID["HELM1906",6378200,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USINT24,GEOGCS["NOT SPECIFIED",DATUM["USINT24",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USINT67,GEOGCS["NOT SPECIFIED",DATUM["USINT67",SPHEROID["INT67",6378160,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USKRAS40,GEOGCS["NOT SPECIFIED",DATUM["USKRAS40",SPHEROID["KRAS1940",6378245,298.3]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USNWL10D,GEOGCS["NOT SPECIFIED",DATUM["USNWL10D",SPHEROID["NWL10D",6378135,298.26]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USNWL9D,GEOGCS["NOT SPECIFIED",DATUM["USNWL9D",SPHEROID["NWL9D",6378145,298.25]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +USSPHERE,GEOGCS["NOT SPECIFIED",DATUM["USSPHERE",SPHEROID["SPHERE",6371000,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +WGS72BE,GEOGCS["BROADCAST EPHEMERIS",DATUM["WGS72BE",SPHEROID["NWL10D",6378135,298.26]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +WGS72DOD,GEOGCS["WGS 72 (DoD)",DATUM["WGS72DOD",SPHEROID["NWL10D",6378135,298.26]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +WGS84,GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9108"]],AXIS["Lat",NORTH],AXIS["Long",EAST],AUTHORITY["EPSG","4326"]] +XIAN80,GEOGCS["XIAN 1980",DATUM["XIAN80",SPHEROID["GRS80",6378137,298.2572221]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +YEMHSL,GEOGCS["YEMEN HSL (LOCAL)",DATUM["YEMHSL",SPHEROID["INT24",6378388,297]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +YOFF2000,GEOGCS["YOFF2000",DATUM["YOFF2000",SPHEROID["CLA80IGN",6378249.2,293.4660213]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +VENUS,GEOGCS["VENUS MGN",DATUM["VENUS",SPHEROID["VENUS",6051920,1]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]] +PLESSIS,GEOGCS["ANCIENNE TRIANGULATION DES INGENIEURS",DATUM["ANCIENNE TRIANGULATION DES INGENIEURS GEOGRAPHES",SPHEROID["PLESSIS 1817",6376523.0000,308.6400000000000,AUTHORITY["IGNF","ELG017"]],TOWGS84[1127.0000,22.0000,57.0000,0,0,0,0],AUTHORITY["IGNF","REG008"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","ATIGEO"]] +CSG67,GEOGCS["GUYANE CSG67",DATUM["CSG 1967",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[-193.0660,236.9930,105.4470,0.4814,-0.8074,0.1276,1.564900],AUTHORITY["IGNF","REG407"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","CSG67GEO"]] +ED50FRA,GEOGCS["ED50 FRANCE",DATUM["ED50 FRANCE",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[-84.0000,-97.0000,-117.0000,0,0,0,0],AUTHORITY["IGNF","REG101"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","ED50G"]] +GUAD48,GEOGCS["GUADELOUPE STE ANNE",DATUM["GUADELOUPE STE ANNE",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[-472.2900,-5.6300,-304.1200,0.4362,-0.8374,0.2563,1.898400],AUTHORITY["IGNF","REG425"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","GUAD48GEO"]] +STMART,GEOGCS["GUADELOUPE FORT MARIGOT",DATUM["GUADELOUPE FORT MARIGOT",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[136.5960,248.1480,-429.7890,0,0,0,0],AUTHORITY["IGNF","REG426"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","GUADFM49GEO"]] +IGN72,GEOGCS["IGN 1972 GRANDE-TERRE / ILE DES PINS",DATUM["IGN72 GRANDE-TERRE / ILE DES PINS",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[-11.6400,-348.6000,291.6800,0,0,0,0],AUTHORITY["IGNF","REG548"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","IGN72GEO"]] +MART38,GEOGCS["MARTINIQUE FORT-DESAIX",DATUM["MARTINIQUE FOT-DESAIX",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[126.9260,547.9390,130.4090,-2.7867,5.1612,-0.8584,13.822650],AUTHORITY["IGNF","REG424"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","MART38GEO"]] +MCBN50,GEOGCS["MAYOTTE COMBANI",DATUM["Combani",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[-599.9280,-275.5520,-195.6650,-0.0835,-0.4715,0.0602,49.281400],AUTHORITY["IGNF","REG318"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","MAYO50GEO"]] +REUN47,GEOGCS["REUNION 1947",DATUM["REUNION-PITON-DES-NEIGES",SPHEROID["International-Hayford 1909",6378388.0000,297.0000000000000,AUTHORITY["IGNF","ELG001"]],TOWGS84[789.5240,-626.4860,-89.9040,0.6006,76.7946,-10.5788,-32.324100],AUTHORITY["IGNF","REG317"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","REUN47GEO"]] +RGFG95,GEOGCS["RESEAU GEODESIQUE FRANCAIS DE GUYANE 1995",DATUM["RESEAU GEODESIQUE FRANCAIS DE GUYANE 1995",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG486"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGFG95GEO"]] +RGM04,GEOGCS["RGM04 (RESEAU GEODESIQUE DE MAYOTTE 2004)",DATUM["RGM04 (RESEAU GEODESIQUE DE MAYOTTE 2004)",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG702"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGM04GEO"]] +RGNC,GEOGCS["RESEAU GEODESIQUE DE NOUVELLE-CALEDONIE",DATUM["RESEAU GEODESIQUE DE NOUVELLE-CALEDONIE (RGNC 1991)",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG547"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGNCGEO"]] +RGPF,GEOGCS["RGPF (RESEAU GEODESIQUE DE POLYNESIE FRANCAISE)",DATUM["RGPF (RESEAU GEODESIQUE DE POLYNESIE FRANCAISE)",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG032"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGPFGEO"]] +RGR92,GEOGCS["RESEAU GEODESIQUE DE LA REUNION 1992",DATUM["RESEAU GEODESIQUE DE LA REUNION 1992 (RGR92)",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG700"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGR92GEO"]] +RGSPM06,GEOGCS["SAINT-PIERRE-ET-MIQUELON (2006)",DATUM["ST PIERRE ET MIQUELON 2006",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG706"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGSPM06GEO"]] +RGTAAF07,GEOGCS["RESEAU GEODESIQUE DES TAAF (2007)",DATUM["RESEAU GEODESIQUE DES TERRES AUSTRALES ET ANTARCTIQUES FRANCAISES 2007",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG036"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","RGTAAF07"]]',' +RRAF,GEOGCS["RESEAU DE REFERENCE DES ANTILLES FRANCAISES (1988-1991)",DATUM["RESEAU DE REFERENCE DES ANTILLES FRANCAISES (1988-1991)",SPHEROID["IAG GRS 1980",6378137.0000,298.2572221010000,AUTHORITY["IGNF","ELG037"]],TOWGS84[0.0000,0.0000,0.0000,0,0,0,0],AUTHORITY["IGNF","REG495"]],PRIMEM["Greenwich",0.000000000,AUTHORITY["IGNF","LGO01"]],UNIT["degree",0.01745329251994330],AXIS["Longitude",EAST],AXIS["Latitude",NORTH],AUTHORITY["IGNF","WGS84RRAFGEO"]] +GEOPORTALANF,PROJCS["GEOPORTAIL - ANTILLES FRANCAISES",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9002"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",15.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALANF"]] +GEOPORTALASP,PROJCS["GEOPORTAIL - AMSTERDAM ET SAINT-PAUL",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9012"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-38.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALASP"]] +GEOPORTALCRZ,PROJCS["GEOPORTAIL - CROZET",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9011"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-46.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALCRZ"]] +GEOPORTALFXX,PROJCS["GEOPORTAIL - FRANCE METROPOLITAINE",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9001"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",46.500000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALFXX"]] +GEOPORTALGUF,PROJCS["GEOPORTAIL - GUYANE",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9003"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",4.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALGUF"]] +GEOPORTALKER,PROJCS["GEOPORTAIL - KERGUELEN",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9010"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-49.500000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALKER"]] +GEOPORTALMYT,PROJCS["GEOPORTAIL - MAYOTTE",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9005"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-12.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALMYT"]] +GEOPORTALNCL,PROJCS["GEOPORTAIL - NOUVELLE-CALEDONIE",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9007"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-22.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALNCL"]] +GEOPORTALPYF,PROJCS["GEOPORTAIL - POLYNESIE FRANCAISE",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9009"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-15.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALPYF"]] +GEOPORTALREU,PROJCS["GEOPORTAIL - REUNION ET DEPENDANCES",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9004"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-21.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALREU"]] +GEOPORTALSPM,PROJCS["GEOPORTAIL - SAINT-PIERRE ET MIQUELON",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9006"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",47.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALSPM"]] +GEOPORTALWLF,PROJCS["GEOPORTAIL - WALLIS ET FUTUNA",PROJECTION["Equirectangular",AUTHORITY["IGNF","PRC9008"]],PARAMETER["latitude_of_origin",0.000000000],PARAMETER["central_meridian",0.000000000],PARAMETER["standard_parallel_1",-14.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","GEOPORTALWLF"]] +MILLER,PROJCS["GEOPORTAIL - MONDE",PROJECTION["Miller_Cylindrical",AUTHORITY["IGNF","PRC9901"]],PARAMETER["central_meridian",0.000000000],PARAMETER["false_easting",0.000],PARAMETER["false_northing",0.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","MILLER"]] +GLABREUN,PROJCS["REUNION GAUSS LABORDE",PROJECTION["Gauss_Schreiber_Transverse_Mercator",AUTHORITY["IGNF","PRC0508"]],PARAMETER["latitude_of_origin",-21.116666667],PARAMETER["central_meridian",55.533333333],PARAMETER["scale_factor",1.00000000],PARAMETER["false_easting",160000.000],PARAMETER["false_northing",50000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","REUN47GAUSSL"]] +LAMBERTNC,PROJCS["LAMBERT NOUVELLE CALEDONIE",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC0149"]],PARAMETER["latitude_of_origin",-21.500000000],PARAMETER["central_meridian",166.000000000],PARAMETER["standard_parallel_1",-20.666666667],PARAMETER["standard_parallel_2",-22.333333333],PARAMETER["false_easting",400000.000],PARAMETER["false_northing",300000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGNCLAM"]] +LMCC42Z1,PROJCS["Projection conique conforme Zone 1",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8142"]],PARAMETER["latitude_of_origin",42.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",41.250000000],PARAMETER["standard_parallel_2",42.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",1200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC42"]] +LMCC43Z2,PROJCS["Projection conique conforme Zone 2",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8143"]],PARAMETER["latitude_of_origin",43.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",42.250000000],PARAMETER["standard_parallel_2",43.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",2200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC43"]] +LMCC44Z3,PROJCS["Projection conique conforme Zone 3",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8144"]],PARAMETER["latitude_of_origin",44.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",43.250000000],PARAMETER["standard_parallel_2",44.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",3200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC44"]] +LMCC45Z4,PROJCS["Projection conique conforme Zone 4",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8145"]],PARAMETER["latitude_of_origin",45.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",44.250000000],PARAMETER["standard_parallel_2",45.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",4200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC45"]] +LMCC46Z5,PROJCS["Projection conique conforme Zone 5",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8146"]],PARAMETER["latitude_of_origin",46.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",45.250000000],PARAMETER["standard_parallel_2",46.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",5200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC46"]] +LMCC47Z6,PROJCS["Projection conique conforme Zone 6",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8147"]],PARAMETER["latitude_of_origin",47.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",46.250000000],PARAMETER["standard_parallel_2",47.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",6200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC47"]] +LMCC48Z7,PROJCS["Projection conique conforme Zone 7",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8148"]],PARAMETER["latitude_of_origin",48.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",47.250000000],PARAMETER["standard_parallel_2",48.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",7200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC48"]] +LMCC49Z8,PROJCS["Projection conique conforme Zone 8",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8149"]],PARAMETER["latitude_of_origin",49.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",48.250000000],PARAMETER["standard_parallel_2",49.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",8200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC49"]] +LMCC50Z9,PROJCS["Projection conique conforme Zone 9",PROJECTION["Lambert_Conformal_Conic_2SP",AUTHORITY["IGNF","PRC8150"]],PARAMETER["latitude_of_origin",50.000000000],PARAMETER["central_meridian",3.000000000],PARAMETER["standard_parallel_1",49.250000000],PARAMETER["standard_parallel_2",50.750000000],PARAMETER["false_easting",1700000.000],PARAMETER["false_northing",9200000.000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["IGNF","RGF93CC50"]] +ETRS89,GEOGCS["ETRS89",DATUM["European_Terrestrial_Reference_System_1989",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6258"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4258"]] diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/epsg.wkt b/.venv/lib/python3.12/site-packages/fiona/gdal_data/epsg.wkt new file mode 100644 index 00000000..64898f46 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/epsg.wkt @@ -0,0 +1 @@ +include cubewerx_extra.wkt diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/esri_StatePlane_extra.wkt b/.venv/lib/python3.12/site-packages/fiona/gdal_data/esri_StatePlane_extra.wkt new file mode 100644 index 00000000..edbfa83c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/esri_StatePlane_extra.wkt @@ -0,0 +1,631 @@ +1010,PROJCS["NAD_1983_HARN_StatePlane_Alabama_East_FIPS_0101",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-85.83333333333333],PARAMETER["Scale_Factor",0.99996],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Meter",1]] +1011,PROJCS["NAD_1983_StatePlane_Alabama_East_FIPS_0101",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-85.83333333333333],PARAMETER["Scale_Factor",0.99996],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Meter",1]] +1012,PROJCS["NAD_1983_StatePlane_Alabama_East_FIPS_0101_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-85.83333333333333],PARAMETER["Scale_Factor",0.99996],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Foot_US",0.304800609601219241]] +1014,PROJCS["NAD_1927_StatePlane_Alabama_East_FIPS_0101",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-85.83333333333333],PARAMETER["Scale_Factor",0.99996],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Foot_US",0.304800609601219241]] +1020,PROJCS["NAD_1983_HARN_StatePlane_Alabama_West_FIPS_0102",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",30],UNIT["Meter",1]] +1021,PROJCS["NAD_1983_StatePlane_Alabama_West_FIPS_0102",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",30],UNIT["Meter",1]] +1022,PROJCS["NAD_1983_StatePlane_Alabama_West_FIPS_0102_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",30],UNIT["Foot_US",0.304800609601219241]] +1024,PROJCS["NAD_1927_StatePlane_Alabama_West_FIPS_0102",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",30],UNIT["Foot_US",0.304800609601219241]] +2010,PROJCS["NAD_1983_HARN_StatePlane_Arizona_East_FIPS_0201",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",213360],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +2011,PROJCS["NAD_1983_StatePlane_Arizona_East_FIPS_0201",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",213360],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +2012,PROJCS["NAD_1983_StatePlane_Arizona_East_FIPS_0201_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",699998.5999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +2013,PROJCS["NAD_1983_HARN_StatePlane_Arizona_East_FIPS_0201_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2014,PROJCS["NAD_1927_StatePlane_Arizona_East_FIPS_0201",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +2015,PROJCS["NAD_1983_HARN_StatePlane_Arizona_East_FIPS_0201_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2016,PROJCS["NAD_1983_StatePlane_Arizona_East_FIPS_0201_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-110.1666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2020,PROJCS["NAD_1983_HARN_StatePlane_Arizona_Central_FIPS_0202",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",213360],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +2021,PROJCS["NAD_1983_StatePlane_Arizona_Central_FIPS_0202",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",213360],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +2022,PROJCS["NAD_1983_StatePlane_Arizona_Central_FIPS_0202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",699998.5999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +2023,PROJCS["NAD_1983_HARN_StatePlane_Arizona_Central_FIPS_0202_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2024,PROJCS["NAD_1927_StatePlane_Arizona_Central_FIPS_0202",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +2025,PROJCS["NAD_1983_HARN_StatePlane_Arizona_Central_FIPS_0202_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2026,PROJCS["NAD_1983_StatePlane_Arizona_Central_FIPS_0202_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2030,PROJCS["NAD_1983_HARN_StatePlane_Arizona_West_FIPS_0203",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",213360],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +2031,PROJCS["NAD_1983_StatePlane_Arizona_West_FIPS_0203",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",213360],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +2032,PROJCS["NAD_1983_StatePlane_Arizona_West_FIPS_0203_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",699998.5999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +2033,PROJCS["NAD_1983_HARN_StatePlane_Arizona_West_FIPS_0203_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2034,PROJCS["NAD_1927_StatePlane_Arizona_West_FIPS_0203",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +2035,PROJCS["NAD_1983_HARN_StatePlane_Arizona_West_FIPS_0203_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +2036,PROJCS["NAD_1983_StatePlane_Arizona_West_FIPS_0203_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-113.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot",0.3048]] +3010,PROJCS["NAD_1983_HARN_StatePlane_Arkansas_North_FIPS_0301",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-92.0],PARAMETER["Standard_Parallel_1",34.93333333333333],PARAMETER["Standard_Parallel_2",36.23333333333333],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Meter",1.0]] +3011,PROJCS["NAD_1983_StatePlane_Arkansas_North_FIPS_0301",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92],PARAMETER["Standard_Parallel_1",34.93333333333333],PARAMETER["Standard_Parallel_2",36.23333333333333],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Meter",1]] +3012,PROJCS["NAD_1983_StatePlane_Arkansas_North_FIPS_0301_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92],PARAMETER["Standard_Parallel_1",34.93333333333333],PARAMETER["Standard_Parallel_2",36.23333333333333],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Foot_US",0.304800609601219241]] +3013,PROJCS["NAD_1983_HARN_StatePlane_Arkansas_North_FIPS_0301_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-92.0],PARAMETER["Standard_Parallel_1",34.93333333333333],PARAMETER["Standard_Parallel_2",36.23333333333333],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Foot_US",0.3048006096012192]] +3014,PROJCS["NAD_1927_StatePlane_Arkansas_North_FIPS_0301",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92],PARAMETER["Standard_Parallel_1",34.93333333333333],PARAMETER["Standard_Parallel_2",36.23333333333333],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Foot_US",0.304800609601219241]] +3020,PROJCS["NAD_1983_HARN_StatePlane_Arkansas_South_FIPS_0302",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",400000.0],PARAMETER["Central_Meridian",-92.0],PARAMETER["Standard_Parallel_1",33.3],PARAMETER["Standard_Parallel_2",34.76666666666667],PARAMETER["Latitude_Of_Origin",32.66666666666666],UNIT["Meter",1.0]] +3021,PROJCS["NAD_1983_StatePlane_Arkansas_South_FIPS_0302",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",400000],PARAMETER["Central_Meridian",-92],PARAMETER["Standard_Parallel_1",33.3],PARAMETER["Standard_Parallel_2",34.76666666666667],PARAMETER["Latitude_Of_Origin",32.66666666666666],UNIT["Meter",1]] +3022,PROJCS["NAD_1983_StatePlane_Arkansas_South_FIPS_0302_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",1312333.333333333],PARAMETER["Central_Meridian",-92],PARAMETER["Standard_Parallel_1",33.3],PARAMETER["Standard_Parallel_2",34.76666666666667],PARAMETER["Latitude_Of_Origin",32.66666666666666],UNIT["Foot_US",0.304800609601219241]] +3023,PROJCS["NAD_1983_HARN_StatePlane_Arkansas_South_FIPS_0302_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",1312333.333333333],PARAMETER["Central_Meridian",-92.0],PARAMETER["Standard_Parallel_1",33.3],PARAMETER["Standard_Parallel_2",34.76666666666667],PARAMETER["Latitude_Of_Origin",32.66666666666666],UNIT["Foot_US",0.3048006096012192]] +3024,PROJCS["NAD_1927_StatePlane_Arkansas_South_FIPS_0302",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92],PARAMETER["Standard_Parallel_1",33.3],PARAMETER["Standard_Parallel_2",34.76666666666667],PARAMETER["Latitude_Of_Origin",32.66666666666666],UNIT["Foot_US",0.304800609601219241]] +4010,PROJCS["NAD_1983_HARN_StatePlane_California_I_FIPS_0401",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",41.66666666666666],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Meter",1]] +4011,PROJCS["NAD_1983_StatePlane_California_I_FIPS_0401",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",41.66666666666666],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Meter",1]] +4012,PROJCS["NAD_1983_StatePlane_California_I_FIPS_0401_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",41.66666666666666],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.304800609601219241]] +4013,PROJCS["NAD_1983_HARN_StatePlane_California_I_FIPS_0401_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-122.0],PARAMETER["Standard_Parallel_1",40.0],PARAMETER["Standard_Parallel_2",41.66666666666666],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.3048006096012192]] +4014,PROJCS["NAD_1927_StatePlane_California_I_FIPS_0401",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",41.66666666666666],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.304800609601219241]] +4020,PROJCS["NAD_1983_HARN_StatePlane_California_II_FIPS_0402",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",38.33333333333334],PARAMETER["Standard_Parallel_2",39.83333333333334],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Meter",1]] +4021,PROJCS["NAD_1983_StatePlane_California_II_FIPS_0402",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",38.33333333333334],PARAMETER["Standard_Parallel_2",39.83333333333334],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Meter",1]] +4022,PROJCS["NAD_1983_StatePlane_California_II_FIPS_0402_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",38.33333333333334],PARAMETER["Standard_Parallel_2",39.83333333333334],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.304800609601219241]] +4023,PROJCS["NAD_1983_HARN_StatePlane_California_II_FIPS_0402_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-122.0],PARAMETER["Standard_Parallel_1",38.33333333333334],PARAMETER["Standard_Parallel_2",39.83333333333334],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.3048006096012192]] +4024,PROJCS["NAD_1927_StatePlane_California_II_FIPS_0402",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-122],PARAMETER["Standard_Parallel_1",38.33333333333334],PARAMETER["Standard_Parallel_2",39.83333333333334],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.304800609601219241]] +4030,PROJCS["NAD_1983_HARN_StatePlane_California_III_FIPS_0403",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",37.06666666666667],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.5],UNIT["Meter",1]] +4031,PROJCS["NAD_1983_StatePlane_California_III_FIPS_0403",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",37.06666666666667],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.5],UNIT["Meter",1]] +4032,PROJCS["NAD_1983_StatePlane_California_III_FIPS_0403_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",37.06666666666667],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.5],UNIT["Foot_US",0.304800609601219241]] +4033,PROJCS["NAD_1983_HARN_StatePlane_California_III_FIPS_0403_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",37.06666666666667],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.5],UNIT["Foot_US",0.3048006096012192]] +4034,PROJCS["NAD_1927_StatePlane_California_III_FIPS_0403",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",37.06666666666667],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.5],UNIT["Foot_US",0.304800609601219241]] +4040,PROJCS["NAD_1983_HARN_StatePlane_California_IV_FIPS_0404",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-119],PARAMETER["Standard_Parallel_1",36],PARAMETER["Standard_Parallel_2",37.25],PARAMETER["Latitude_Of_Origin",35.33333333333334],UNIT["Meter",1]] +4041,PROJCS["NAD_1983_StatePlane_California_IV_FIPS_0404",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-119],PARAMETER["Standard_Parallel_1",36],PARAMETER["Standard_Parallel_2",37.25],PARAMETER["Latitude_Of_Origin",35.33333333333334],UNIT["Meter",1]] +4042,PROJCS["NAD_1983_StatePlane_California_IV_FIPS_0404_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-119],PARAMETER["Standard_Parallel_1",36],PARAMETER["Standard_Parallel_2",37.25],PARAMETER["Latitude_Of_Origin",35.33333333333334],UNIT["Foot_US",0.304800609601219241]] +4043,PROJCS["NAD_1983_HARN_StatePlane_California_IV_FIPS_0404_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-119.0],PARAMETER["Standard_Parallel_1",36.0],PARAMETER["Standard_Parallel_2",37.25],PARAMETER["Latitude_Of_Origin",35.33333333333334],UNIT["Foot_US",0.3048006096012192]] +4044,PROJCS["NAD_1927_StatePlane_California_IV_FIPS_0404",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-119],PARAMETER["Standard_Parallel_1",36],PARAMETER["Standard_Parallel_2",37.25],PARAMETER["Latitude_Of_Origin",35.33333333333334],UNIT["Foot_US",0.304800609601219241]] +4050,PROJCS["NAD_1983_HARN_StatePlane_California_V_FIPS_0405",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-118],PARAMETER["Standard_Parallel_1",34.03333333333333],PARAMETER["Standard_Parallel_2",35.46666666666667],PARAMETER["Latitude_Of_Origin",33.5],UNIT["Meter",1]] +4051,PROJCS["NAD_1983_StatePlane_California_V_FIPS_0405",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-118],PARAMETER["Standard_Parallel_1",34.03333333333333],PARAMETER["Standard_Parallel_2",35.46666666666667],PARAMETER["Latitude_Of_Origin",33.5],UNIT["Meter",1]] +4052,PROJCS["NAD_1983_StatePlane_California_V_FIPS_0405_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-118],PARAMETER["Standard_Parallel_1",34.03333333333333],PARAMETER["Standard_Parallel_2",35.46666666666667],PARAMETER["Latitude_Of_Origin",33.5],UNIT["Foot_US",0.304800609601219241]] +4053,PROJCS["NAD_1983_HARN_StatePlane_California_V_FIPS_0405_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-118.0],PARAMETER["Standard_Parallel_1",34.03333333333333],PARAMETER["Standard_Parallel_2",35.46666666666667],PARAMETER["Latitude_Of_Origin",33.5],UNIT["Foot_US",0.3048006096012192]] +4054,PROJCS["NAD_1927_StatePlane_California_V_FIPS_0405",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-118],PARAMETER["Standard_Parallel_1",34.03333333333333],PARAMETER["Standard_Parallel_2",35.46666666666667],PARAMETER["Latitude_Of_Origin",33.5],UNIT["Foot_US",0.304800609601219241]] +4060,PROJCS["NAD_1983_HARN_StatePlane_California_VI_FIPS_0406",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-116.25],PARAMETER["Standard_Parallel_1",32.78333333333333],PARAMETER["Standard_Parallel_2",33.88333333333333],PARAMETER["Latitude_Of_Origin",32.16666666666666],UNIT["Meter",1]] +4061,PROJCS["NAD_1983_StatePlane_California_VI_FIPS_0406",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-116.25],PARAMETER["Standard_Parallel_1",32.78333333333333],PARAMETER["Standard_Parallel_2",33.88333333333333],PARAMETER["Latitude_Of_Origin",32.16666666666666],UNIT["Meter",1]] +4062,PROJCS["NAD_1983_StatePlane_California_VI_FIPS_0406_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-116.25],PARAMETER["Standard_Parallel_1",32.78333333333333],PARAMETER["Standard_Parallel_2",33.88333333333333],PARAMETER["Latitude_Of_Origin",32.16666666666666],UNIT["Foot_US",0.304800609601219241]] +4063,PROJCS["NAD_1983_HARN_StatePlane_California_VI_FIPS_0406_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.666666666],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-116.25],PARAMETER["Standard_Parallel_1",32.78333333333333],PARAMETER["Standard_Parallel_2",33.88333333333333],PARAMETER["Latitude_Of_Origin",32.16666666666666],UNIT["Foot_US",0.3048006096012192]] +4064,PROJCS["NAD_1927_StatePlane_California_VI_FIPS_0406",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-116.25],PARAMETER["Standard_Parallel_1",32.78333333333333],PARAMETER["Standard_Parallel_2",33.88333333333333],PARAMETER["Latitude_Of_Origin",32.16666666666666],UNIT["Foot_US",0.304800609601219241]] +4074,PROJCS["NAD_1927_StatePlane_California_VII_FIPS_0407",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4186692.58],PARAMETER["False_Northing",4160926.74],PARAMETER["Central_Meridian",-118.3333333333333],PARAMETER["Standard_Parallel_1",33.86666666666667],PARAMETER["Standard_Parallel_2",34.41666666666666],PARAMETER["Latitude_Of_Origin",34.13333333333333],UNIT["Foot_US",0.304800609601219241]] +5010,PROJCS["NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",914401.8289],PARAMETER["False_Northing",304800.6096],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",39.71666666666667],PARAMETER["Standard_Parallel_2",40.78333333333333],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Meter",1]] +5011,PROJCS["NAD_1983_StatePlane_Colorado_North_FIPS_0501",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",914401.8289],PARAMETER["False_Northing",304800.6096],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",39.71666666666667],PARAMETER["Standard_Parallel_2",40.78333333333333],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Meter",1]] +5012,PROJCS["NAD_1983_StatePlane_Colorado_North_FIPS_0501_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000.000316083],PARAMETER["False_Northing",999999.999996],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",39.71666666666667],PARAMETER["Standard_Parallel_2",40.78333333333333],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.304800609601219241]] +5013,PROJCS["NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000.000316083],PARAMETER["False_Northing",999999.999996],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",39.71666666666667],PARAMETER["Standard_Parallel_2",40.78333333333333],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.3048006096012192]] +5014,PROJCS["NAD_1927_StatePlane_Colorado_North_FIPS_0501",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",39.71666666666667],PARAMETER["Standard_Parallel_2",40.78333333333333],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.304800609601219241]] +5020,PROJCS["NAD_1983_HARN_StatePlane_Colorado_Central_FIPS_0502",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",914401.8289],PARAMETER["False_Northing",304800.6096],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",38.45],PARAMETER["Standard_Parallel_2",39.75],PARAMETER["Latitude_Of_Origin",37.83333333333334],UNIT["Meter",1]] +5021,PROJCS["NAD_1983_StatePlane_Colorado_Central_FIPS_0502",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",914401.8289],PARAMETER["False_Northing",304800.6096],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",38.45],PARAMETER["Standard_Parallel_2",39.75],PARAMETER["Latitude_Of_Origin",37.83333333333334],UNIT["Meter",1]] +5022,PROJCS["NAD_1983_StatePlane_Colorado_Central_FIPS_0502_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000.000316083],PARAMETER["False_Northing",999999.999996],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",38.45],PARAMETER["Standard_Parallel_2",39.75],PARAMETER["Latitude_Of_Origin",37.83333333333334],UNIT["Foot_US",0.304800609601219241]] +5023,PROJCS["NAD_1983_HARN_StatePlane_Colorado_Central_FIPS_0502_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000.000316083],PARAMETER["False_Northing",999999.999996],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",38.45],PARAMETER["Standard_Parallel_2",39.75],PARAMETER["Latitude_Of_Origin",37.83333333333334],UNIT["Foot_US",0.3048006096012192]] +5024,PROJCS["NAD_1927_StatePlane_Colorado_Central_FIPS_0502",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",38.45],PARAMETER["Standard_Parallel_2",39.75],PARAMETER["Latitude_Of_Origin",37.83333333333334],UNIT["Foot_US",0.304800609601219241]] +5030,PROJCS["NAD_1983_HARN_StatePlane_Colorado_South_FIPS_0503",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",914401.8289],PARAMETER["False_Northing",304800.6096],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",37.23333333333333],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +5031,PROJCS["NAD_1983_StatePlane_Colorado_South_FIPS_0503",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",914401.8289],PARAMETER["False_Northing",304800.6096],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",37.23333333333333],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +5032,PROJCS["NAD_1983_StatePlane_Colorado_South_FIPS_0503_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000.000316083],PARAMETER["False_Northing",999999.999996],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",37.23333333333333],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +5033,PROJCS["NAD_1983_HARN_StatePlane_Colorado_South_FIPS_0503_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000.000316083],PARAMETER["False_Northing",999999.999996],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",37.23333333333333],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.3048006096012192]] +5034,PROJCS["NAD_1927_StatePlane_Colorado_South_FIPS_0503",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.5],PARAMETER["Standard_Parallel_1",37.23333333333333],PARAMETER["Standard_Parallel_2",38.43333333333333],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +6000,PROJCS["NAD_1983_HARN_StatePlane_Connecticut_FIPS_0600",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",304800.6096],PARAMETER["False_Northing",152400.3048],PARAMETER["Central_Meridian",-72.75],PARAMETER["Standard_Parallel_1",41.2],PARAMETER["Standard_Parallel_2",41.86666666666667],PARAMETER["Latitude_Of_Origin",40.83333333333334],UNIT["Meter",1]] +6001,PROJCS["NAD_1983_StatePlane_Connecticut_FIPS_0600",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",304800.6096],PARAMETER["False_Northing",152400.3048],PARAMETER["Central_Meridian",-72.75],PARAMETER["Standard_Parallel_1",41.2],PARAMETER["Standard_Parallel_2",41.86666666666667],PARAMETER["Latitude_Of_Origin",40.83333333333334],UNIT["Meter",1]] +6002,PROJCS["NAD_1983_StatePlane_Connecticut_FIPS_0600_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",999999.999996],PARAMETER["False_Northing",499999.999998],PARAMETER["Central_Meridian",-72.75],PARAMETER["Standard_Parallel_1",41.2],PARAMETER["Standard_Parallel_2",41.86666666666667],PARAMETER["Latitude_Of_Origin",40.83333333333334],UNIT["Foot_US",0.304800609601219241]] +6003,PROJCS["NAD_1983_HARN_StatePlane_Connecticut_FIPS_0600_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",999999.999996],PARAMETER["False_Northing",499999.999998],PARAMETER["Central_Meridian",-72.75],PARAMETER["Standard_Parallel_1",41.2],PARAMETER["Standard_Parallel_2",41.86666666666667],PARAMETER["Latitude_Of_Origin",40.83333333333334],UNIT["Foot_US",0.3048006096012192]] +6004,PROJCS["NAD_1927_StatePlane_Connecticut_FIPS_0600",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-72.75],PARAMETER["Standard_Parallel_1",41.2],PARAMETER["Standard_Parallel_2",41.86666666666667],PARAMETER["Latitude_Of_Origin",40.83333333333334],UNIT["Foot_US",0.304800609601219241]] +7000,PROJCS["NAD_1983_HARN_StatePlane_Delaware_FIPS_0700",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-75.41666666666667],PARAMETER["Scale_Factor",0.999995],PARAMETER["Latitude_Of_Origin",38],UNIT["Meter",1]] +7001,PROJCS["NAD_1983_StatePlane_Delaware_FIPS_0700",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-75.41666666666667],PARAMETER["Scale_Factor",0.999995],PARAMETER["Latitude_Of_Origin",38],UNIT["Meter",1]] +7002,PROJCS["NAD_1983_StatePlane_Delaware_FIPS_0700_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-75.41666666666667],PARAMETER["Scale_Factor",0.999995],PARAMETER["Latitude_Of_Origin",38],UNIT["Foot_US",0.304800609601219241]] +7003,PROJCS["NAD_1983_HARN_StatePlane_Delaware_FIPS_0700_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-75.41666666666667],PARAMETER["Scale_Factor",0.999995],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Foot_US",0.3048006096012192]] +7004,PROJCS["NAD_1927_StatePlane_Delaware_FIPS_0700",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-75.41666666666667],PARAMETER["Scale_Factor",0.999995],PARAMETER["Latitude_Of_Origin",38],UNIT["Foot_US",0.304800609601219241]] +9010,PROJCS["NAD_1983_HARN_StatePlane_Florida_East_FIPS_0901",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Meter",1]] +9011,PROJCS["NAD_1983_StatePlane_Florida_East_FIPS_0901",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Meter",1]] +9012,PROJCS["NAD_1983_StatePlane_Florida_East_FIPS_0901_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Foot_US",0.304800609601219241]] +9013,PROJCS["NAD_1983_HARN_StatePlane_Florida_East_FIPS_0901_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-81.0],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Foot_US",0.3048006096012192]] +9014,PROJCS["NAD_1927_StatePlane_Florida_East_FIPS_0901",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Foot_US",0.304800609601219241]] +9020,PROJCS["NAD_1983_HARN_StatePlane_Florida_West_FIPS_0902",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Meter",1]] +9021,PROJCS["NAD_1983_StatePlane_Florida_West_FIPS_0902",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Meter",1]] +9022,PROJCS["NAD_1983_StatePlane_Florida_West_FIPS_0902_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Foot_US",0.304800609601219241]] +9023,PROJCS["NAD_1983_HARN_StatePlane_Florida_West_FIPS_0902_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-82.0],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Foot_US",0.3048006096012192]] +9024,PROJCS["NAD_1927_StatePlane_Florida_West_FIPS_0902",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",24.33333333333333],UNIT["Foot_US",0.304800609601219241]] +9030,PROJCS["NAD_1983_HARN_StatePlane_Florida_North_FIPS_0903",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.5],PARAMETER["Standard_Parallel_1",29.58333333333333],PARAMETER["Standard_Parallel_2",30.75],PARAMETER["Latitude_Of_Origin",29],UNIT["Meter",1]] +9031,PROJCS["NAD_1983_StatePlane_Florida_North_FIPS_0903",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.5],PARAMETER["Standard_Parallel_1",29.58333333333333],PARAMETER["Standard_Parallel_2",30.75],PARAMETER["Latitude_Of_Origin",29],UNIT["Meter",1]] +9032,PROJCS["NAD_1983_StatePlane_Florida_North_FIPS_0903_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.5],PARAMETER["Standard_Parallel_1",29.58333333333333],PARAMETER["Standard_Parallel_2",30.75],PARAMETER["Latitude_Of_Origin",29],UNIT["Foot_US",0.304800609601219241]] +9033,PROJCS["NAD_1983_HARN_StatePlane_Florida_North_FIPS_0903_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.5],PARAMETER["Standard_Parallel_1",29.58333333333333],PARAMETER["Standard_Parallel_2",30.75],PARAMETER["Latitude_Of_Origin",29.0],UNIT["Foot_US",0.3048006096012192]] +9034,PROJCS["NAD_1927_StatePlane_Florida_North_FIPS_0903",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.5],PARAMETER["Standard_Parallel_1",29.58333333333333],PARAMETER["Standard_Parallel_2",30.75],PARAMETER["Latitude_Of_Origin",29],UNIT["Foot_US",0.304800609601219241]] +10010,PROJCS["NAD_1983_HARN_StatePlane_Georgia_East_FIPS_1001",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Meter",1]] +10011,PROJCS["NAD_1983_StatePlane_Georgia_East_FIPS_1001",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Meter",1]] +10012,PROJCS["NAD_1983_StatePlane_Georgia_East_FIPS_1001_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Foot_US",0.304800609601219241]] +10013,PROJCS["NAD_1983_HARN_StatePlane_Georgia_East_FIPS_1001_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-82.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30.0],UNIT["Foot_US",0.3048006096012192]] +10014,PROJCS["NAD_1927_StatePlane_Georgia_East_FIPS_1001",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Foot_US",0.304800609601219241]] +10020,PROJCS["NAD_1983_HARN_StatePlane_Georgia_West_FIPS_1002",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Meter",1]] +10021,PROJCS["NAD_1983_StatePlane_Georgia_West_FIPS_1002",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Meter",1]] +10022,PROJCS["NAD_1983_StatePlane_Georgia_West_FIPS_1002_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Foot_US",0.304800609601219241]] +10023,PROJCS["NAD_1983_HARN_StatePlane_Georgia_West_FIPS_1002_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30.0],UNIT["Foot_US",0.3048006096012192]] +10024,PROJCS["NAD_1927_StatePlane_Georgia_West_FIPS_1002",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.16666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",30],UNIT["Foot_US",0.304800609601219241]] +11010,PROJCS["NAD_1983_HARN_StatePlane_Idaho_East_FIPS_1101",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-112.1666666666667],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +11011,PROJCS["NAD_1983_StatePlane_Idaho_East_FIPS_1101",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-112.1666666666667],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +11012,PROJCS["NAD_1983_StatePlane_Idaho_East_FIPS_1101_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-112.1666666666667],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +11013,PROJCS["NAD_1983_HARN_StatePlane_Idaho_East_FIPS_1101_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-112.1666666666667],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.3048006096012192]] +11014,PROJCS["NAD_1927_StatePlane_Idaho_East_FIPS_1101",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-112.1666666666667],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +11020,PROJCS["NAD_1983_HARN_StatePlane_Idaho_Central_FIPS_1102",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-114],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +11021,PROJCS["NAD_1983_StatePlane_Idaho_Central_FIPS_1102",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-114],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +11022,PROJCS["NAD_1983_StatePlane_Idaho_Central_FIPS_1102_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-114],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +11023,PROJCS["NAD_1983_HARN_StatePlane_Idaho_Central_FIPS_1102_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-114.0],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.3048006096012192]] +11024,PROJCS["NAD_1927_StatePlane_Idaho_Central_FIPS_1102",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-114],PARAMETER["Scale_Factor",0.9999473684210526],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +11030,PROJCS["NAD_1983_HARN_StatePlane_Idaho_West_FIPS_1103",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-115.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +11031,PROJCS["NAD_1983_StatePlane_Idaho_West_FIPS_1103",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-115.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +11032,PROJCS["NAD_1983_StatePlane_Idaho_West_FIPS_1103_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-115.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +11033,PROJCS["NAD_1983_HARN_StatePlane_Idaho_West_FIPS_1103_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-115.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.3048006096012192]] +11034,PROJCS["NAD_1927_StatePlane_Idaho_West_FIPS_1103",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-115.75],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +12010,PROJCS["NAD_1983_HARN_StatePlane_Illinois_East_FIPS_1201",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.33333333333333],PARAMETER["Scale_Factor",0.9999749999999999],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +12011,PROJCS["NAD_1983_StatePlane_Illinois_East_FIPS_1201",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.33333333333333],PARAMETER["Scale_Factor",0.9999749999999999],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +12012,PROJCS["NAD_1983_StatePlane_Illinois_East_FIPS_1201_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984249.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.33333333333333],PARAMETER["Scale_Factor",0.9999749999999999],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +12013,PROJCS["NAD_1983_HARN_StatePlane_Illinois_East_FIPS_1201_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984250.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-88.33333333333333],PARAMETER["Scale_Factor",0.999975],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.3048006096012192]] +12014,PROJCS["NAD_1927_StatePlane_Illinois_East_FIPS_1201",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.33333333333333],PARAMETER["Scale_Factor",0.9999749999999999],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +12020,PROJCS["NAD_1983_HARN_StatePlane_Illinois_West_FIPS_1202",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.16666666666667],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +12021,PROJCS["NAD_1983_StatePlane_Illinois_West_FIPS_1202",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.16666666666667],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +12022,PROJCS["NAD_1983_StatePlane_Illinois_West_FIPS_1202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.16666666666667],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +12023,PROJCS["NAD_1983_HARN_StatePlane_Illinois_West_FIPS_1202_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-90.16666666666667],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.3048006096012192]] +12024,PROJCS["NAD_1927_StatePlane_Illinois_West_FIPS_1202",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.16666666666667],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +13010,PROJCS["NAD_1983_HARN_StatePlane_Indiana_East_FIPS_1301",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",100000],PARAMETER["False_Northing",250000],PARAMETER["Central_Meridian",-85.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Meter",1]] +13011,PROJCS["NAD_1983_StatePlane_Indiana_East_FIPS_1301",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",100000],PARAMETER["False_Northing",250000],PARAMETER["Central_Meridian",-85.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Meter",1]] +13012,PROJCS["NAD_1983_StatePlane_Indiana_East_FIPS_1301_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",328083.3333333333],PARAMETER["False_Northing",820208.3333333333],PARAMETER["Central_Meridian",-85.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.304800609601219241]] +13013,PROJCS["NAD_1983_HARN_StatePlane_Indiana_East_FIPS_1301_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",328083.3333333333],PARAMETER["False_Northing",820208.3333333333],PARAMETER["Central_Meridian",-85.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.3048006096012192]] +13014,PROJCS["NAD_1927_StatePlane_Indiana_East_FIPS_1301",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-85.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.304800609601219241]] +13020,PROJCS["NAD_1983_HARN_StatePlane_Indiana_West_FIPS_1302",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",900000],PARAMETER["False_Northing",250000],PARAMETER["Central_Meridian",-87.08333333333333],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Meter",1]] +13021,PROJCS["NAD_1983_StatePlane_Indiana_West_FIPS_1302",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",900000],PARAMETER["False_Northing",250000],PARAMETER["Central_Meridian",-87.08333333333333],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Meter",1]] +13022,PROJCS["NAD_1983_StatePlane_Indiana_West_FIPS_1302_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2952750],PARAMETER["False_Northing",820208.3333333333],PARAMETER["Central_Meridian",-87.08333333333333],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.304800609601219241]] +13023,PROJCS["NAD_1983_HARN_StatePlane_Indiana_West_FIPS_1302_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2952750.0],PARAMETER["False_Northing",820208.3333333333],PARAMETER["Central_Meridian",-87.08333333333333],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.3048006096012192]] +13024,PROJCS["NAD_1927_StatePlane_Indiana_West_FIPS_1302",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87.08333333333333],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.304800609601219241]] +14010,PROJCS["NAD_1983_HARN_StatePlane_Iowa_North_FIPS_1401",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1500000.0],PARAMETER["False_Northing",1000000.0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",42.06666666666667],PARAMETER["Standard_Parallel_2",43.26666666666667],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Meter",1.0]] +14011,PROJCS["NAD_1983_StatePlane_Iowa_North_FIPS_1401",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1500000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",42.06666666666667],PARAMETER["Standard_Parallel_2",43.26666666666667],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Meter",1]] +14012,PROJCS["NAD_1983_StatePlane_Iowa_North_FIPS_1401_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921249.999999999],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",42.06666666666667],PARAMETER["Standard_Parallel_2",43.26666666666667],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot_US",0.304800609601219241]] +14013,PROJCS["NAD_1983_HARN_StatePlane_Iowa_North_FIPS_1401_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921250.0],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",42.06666666666667],PARAMETER["Standard_Parallel_2",43.26666666666667],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot_US",0.3048006096012192]] +14014,PROJCS["NAD_1927_StatePlane_Iowa_North_FIPS_1401",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",42.06666666666667],PARAMETER["Standard_Parallel_2",43.26666666666667],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot_US",0.304800609601219241]] +14020,PROJCS["NAD_1983_HARN_StatePlane_Iowa_South_FIPS_1402",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",40.61666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.0],UNIT["Meter",1.0]] +14021,PROJCS["NAD_1983_StatePlane_Iowa_South_FIPS_1402",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",40.61666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40],UNIT["Meter",1]] +14022,PROJCS["NAD_1983_StatePlane_Iowa_South_FIPS_1402_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",40.61666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +14023,PROJCS["NAD_1983_HARN_StatePlane_Iowa_South_FIPS_1402_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",40.61666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.0],UNIT["Foot_US",0.3048006096012192]] +14024,PROJCS["NAD_1927_StatePlane_Iowa_South_FIPS_1402",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-93.5],PARAMETER["Standard_Parallel_1",40.61666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +15010,PROJCS["NAD_1983_HARN_StatePlane_Kansas_North_FIPS_1501",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",38.71666666666667],PARAMETER["Standard_Parallel_2",39.78333333333333],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Meter",1]] +15011,PROJCS["NAD_1983_StatePlane_Kansas_North_FIPS_1501",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",38.71666666666667],PARAMETER["Standard_Parallel_2",39.78333333333333],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Meter",1]] +15012,PROJCS["NAD_1983_StatePlane_Kansas_North_FIPS_1501_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",38.71666666666667],PARAMETER["Standard_Parallel_2",39.78333333333333],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot_US",0.304800609601219241]] +15013,PROJCS["NAD_1983_HARN_StatePlane_Kansas_North_FIPS_1501_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-98.0],PARAMETER["Standard_Parallel_1",38.71666666666667],PARAMETER["Standard_Parallel_2",39.78333333333333],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot_US",0.3048006096012192]] +15014,PROJCS["NAD_1927_StatePlane_Kansas_North_FIPS_1501",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",38.71666666666667],PARAMETER["Standard_Parallel_2",39.78333333333333],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot_US",0.304800609601219241]] +15020,PROJCS["NAD_1983_HARN_StatePlane_Kansas_South_FIPS_1502",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",400000],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",37.26666666666667],PARAMETER["Standard_Parallel_2",38.56666666666667],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +15021,PROJCS["NAD_1983_StatePlane_Kansas_South_FIPS_1502",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",400000],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",37.26666666666667],PARAMETER["Standard_Parallel_2",38.56666666666667],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +15022,PROJCS["NAD_1983_StatePlane_Kansas_South_FIPS_1502_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",1312333.333333333],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",37.26666666666667],PARAMETER["Standard_Parallel_2",38.56666666666667],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +15023,PROJCS["NAD_1983_HARN_StatePlane_Kansas_South_FIPS_1502_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",1312333.333333333],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",37.26666666666667],PARAMETER["Standard_Parallel_2",38.56666666666667],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.3048006096012192]] +15024,PROJCS["NAD_1927_StatePlane_Kansas_South_FIPS_1502",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",37.26666666666667],PARAMETER["Standard_Parallel_2",38.56666666666667],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +16000,PROJCS["NAD_1983_HARN_StatePlane_Kentucky_FIPS_1600",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1500000.0],PARAMETER["False_Northing",1000000.0],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",37.08333333333334],PARAMETER["Standard_Parallel_2",38.66666666666666],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Meter",1.0]] +16001,PROJCS["NAD_1983_StatePlane_Kentucky_FIPS_1600",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1500000.0],PARAMETER["False_Northing",1000000.0],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",37.08333333333334],PARAMETER["Standard_Parallel_2",38.66666666666666],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Meter",1.0]] +16002,PROJCS["NAD_1983_StatePlane_Kentucky_FIPS_1600_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921250.0],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",37.08333333333334],PARAMETER["Standard_Parallel_2",38.66666666666666],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.3048006096012192]] +16003,PROJCS["NAD_1983_HARN_StatePlane_Kentucky_FIPS_1600_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921250.0],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",37.08333333333334],PARAMETER["Standard_Parallel_2",38.66666666666666],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.3048006096012192]] +16010,PROJCS["NAD_1983_HARN_StatePlane_Kentucky_North_FIPS_1601",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.25],PARAMETER["Standard_Parallel_1",37.96666666666667],PARAMETER["Standard_Parallel_2",38.96666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Meter",1]] +16011,PROJCS["NAD_1983_StatePlane_Kentucky_North_FIPS_1601",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.25],PARAMETER["Standard_Parallel_1",37.96666666666667],PARAMETER["Standard_Parallel_2",38.96666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Meter",1]] +16012,PROJCS["NAD_1983_StatePlane_Kentucky_North_FIPS_1601_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.25],PARAMETER["Standard_Parallel_1",37.96666666666667],PARAMETER["Standard_Parallel_2",38.96666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.304800609601219241]] +16013,PROJCS["NAD_1983_HARN_StatePlane_Kentucky_North_FIPS_1601_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.25],PARAMETER["Standard_Parallel_1",37.96666666666667],PARAMETER["Standard_Parallel_2",38.96666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.3048006096012192]] +16014,PROJCS["NAD_1927_StatePlane_Kentucky_North_FIPS_1601",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.25],PARAMETER["Standard_Parallel_1",37.96666666666667],PARAMETER["Standard_Parallel_2",38.96666666666667],PARAMETER["Latitude_Of_Origin",37.5],UNIT["Foot_US",0.304800609601219241]] +16020,PROJCS["NAD_1983_HARN_StatePlane_Kentucky_South_FIPS_1602",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",36.73333333333333],PARAMETER["Standard_Parallel_2",37.93333333333333],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Meter",1]] +16021,PROJCS["NAD_1983_StatePlane_Kentucky_South_FIPS_1602",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",500000],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",36.73333333333333],PARAMETER["Standard_Parallel_2",37.93333333333333],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Meter",1]] +16022,PROJCS["NAD_1983_StatePlane_Kentucky_South_FIPS_1602_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",36.73333333333333],PARAMETER["Standard_Parallel_2",37.93333333333333],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.304800609601219241]] +16023,PROJCS["NAD_1983_HARN_StatePlane_Kentucky_South_FIPS_1602_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",1640416.666666667],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",36.73333333333333],PARAMETER["Standard_Parallel_2",37.93333333333333],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.3048006096012192]] +16024,PROJCS["NAD_1927_StatePlane_Kentucky_South_FIPS_1602",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-85.75],PARAMETER["Standard_Parallel_1",36.73333333333333],PARAMETER["Standard_Parallel_2",37.93333333333333],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.304800609601219241]] +17010,PROJCS["NAD_1983_HARN_StatePlane_Louisiana_North_FIPS_1701",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Standard_Parallel_1",31.16666666666667],PARAMETER["Standard_Parallel_2",32.66666666666666],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Meter",1]] +17011,PROJCS["NAD_1983_StatePlane_Louisiana_North_FIPS_1701",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Standard_Parallel_1",31.16666666666667],PARAMETER["Standard_Parallel_2",32.66666666666666],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Meter",1]] +17012,PROJCS["NAD_1983_StatePlane_Louisiana_North_FIPS_1701_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3280833.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Standard_Parallel_1",31.16666666666667],PARAMETER["Standard_Parallel_2",32.66666666666666],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Foot_US",0.304800609601219241]] +17013,PROJCS["NAD_1983_HARN_StatePlane_Louisiana_North_FIPS_1701_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3280833.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Standard_Parallel_1",31.16666666666667],PARAMETER["Standard_Parallel_2",32.66666666666666],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Foot_US",0.3048006096012192]] +17014,PROJCS["NAD_1927_StatePlane_Louisiana_North_FIPS_1701",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Standard_Parallel_1",31.16666666666667],PARAMETER["Standard_Parallel_2",32.66666666666666],PARAMETER["Latitude_Of_Origin",30.66666666666667],UNIT["Foot_US",0.304800609601219241]] +17020,PROJCS["NAD_1983_HARN_StatePlane_Louisiana_South_FIPS_1702",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",29.3],PARAMETER["Standard_Parallel_2",30.7],PARAMETER["Latitude_Of_Origin",28.5],UNIT["Meter",1]] +17021,PROJCS["NAD_1983_StatePlane_Louisiana_South_FIPS_1702",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",29.3],PARAMETER["Standard_Parallel_2",30.7],PARAMETER["Latitude_Of_Origin",28.5],UNIT["Meter",1]] +17022,PROJCS["NAD_1983_StatePlane_Louisiana_South_FIPS_1702_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3280833.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",29.3],PARAMETER["Standard_Parallel_2",30.7],PARAMETER["Latitude_Of_Origin",28.5],UNIT["Foot_US",0.304800609601219241]] +17023,PROJCS["NAD_1983_HARN_StatePlane_Louisiana_South_FIPS_1702_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3280833.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",29.3],PARAMETER["Standard_Parallel_2",30.7],PARAMETER["Latitude_Of_Origin",28.5],UNIT["Foot_US",0.3048006096012192]] +17024,PROJCS["NAD_1927_StatePlane_Louisiana_South_FIPS_1702",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",29.3],PARAMETER["Standard_Parallel_2",30.7],PARAMETER["Latitude_Of_Origin",28.66666666666667],UNIT["Foot_US",0.304800609601219241]] +17031,PROJCS["NAD_1983_StatePlane_Louisiana_Offshore_FIPS_1703",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1000000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.5],UNIT["Meter",1.0]] +17032,PROJCS["NAD_1983_StatePlane_Louisiana_Offshore_FIPS_1703_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3280833.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.5],UNIT["Foot_US",0.3048006096012192]] +17034,PROJCS["NAD_1927_StatePlane_Louisiana_Offshore_FIPS_1703",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-91.33333333333333],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.66666666666667],UNIT["Foot_US",0.3048006096012192]] +18010,PROJCS["NAD_1983_HARN_StatePlane_Maine_East_FIPS_1801",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-68.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Meter",1]] +18011,PROJCS["NAD_1983_StatePlane_Maine_East_FIPS_1801",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-68.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Meter",1]] +18012,PROJCS["NAD_1983_StatePlane_Maine_East_FIPS_1801_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984249.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-68.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Foot_US",0.304800609601219241]] +18014,PROJCS["NAD_1927_StatePlane_Maine_East_FIPS_1801",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-68.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.304800609601219241]] +18020,PROJCS["NAD_1983_HARN_StatePlane_Maine_West_FIPS_1802",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",900000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.16666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.83333333333334],UNIT["Meter",1]] +18021,PROJCS["NAD_1983_StatePlane_Maine_West_FIPS_1802",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",900000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.16666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.83333333333334],UNIT["Meter",1]] +18022,PROJCS["NAD_1983_StatePlane_Maine_West_FIPS_1802_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2952750],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.16666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.83333333333334],UNIT["Foot_US",0.304800609601219241]] +18024,PROJCS["NAD_1927_StatePlane_Maine_West_FIPS_1802",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.16666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.83333333333334],UNIT["Foot_US",0.304800609601219241]] +19000,PROJCS["NAD_1983_HARN_StatePlane_Maryland_FIPS_1900",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77],PARAMETER["Standard_Parallel_1",38.3],PARAMETER["Standard_Parallel_2",39.45],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Meter",1]] +19001,PROJCS["NAD_1983_StatePlane_Maryland_FIPS_1900",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77],PARAMETER["Standard_Parallel_1",38.3],PARAMETER["Standard_Parallel_2",39.45],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Meter",1]] +19002,PROJCS["NAD_1983_StatePlane_Maryland_FIPS_1900_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77],PARAMETER["Standard_Parallel_1",38.3],PARAMETER["Standard_Parallel_2",39.45],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.304800609601219241]] +19003,PROJCS["NAD_1983_HARN_StatePlane_Maryland_FIPS_1900_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-77.0],PARAMETER["Standard_Parallel_1",38.3],PARAMETER["Standard_Parallel_2",39.45],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.3048006096012192]] +19004,PROJCS["NAD_1927_StatePlane_Maryland_FIPS_1900",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77],PARAMETER["Standard_Parallel_1",38.3],PARAMETER["Standard_Parallel_2",39.45],PARAMETER["Latitude_Of_Origin",37.83333333333334],UNIT["Foot_US",0.304800609601219241]] +20010,PROJCS["NAD_1983_HARN_StatePlane_Massachusetts_Mainland_FIPS_2001",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",750000],PARAMETER["Central_Meridian",-71.5],PARAMETER["Standard_Parallel_1",41.71666666666667],PARAMETER["Standard_Parallel_2",42.68333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Meter",1]] +20011,PROJCS["NAD_1983_StatePlane_Massachusetts_Mainland_FIPS_2001",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",750000],PARAMETER["Central_Meridian",-71.5],PARAMETER["Standard_Parallel_1",41.71666666666667],PARAMETER["Standard_Parallel_2",42.68333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Meter",1]] +20012,PROJCS["NAD_1983_StatePlane_Massachusetts_Mainland_FIPS_2001_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",2460625],PARAMETER["Central_Meridian",-71.5],PARAMETER["Standard_Parallel_1",41.71666666666667],PARAMETER["Standard_Parallel_2",42.68333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Foot_US",0.304800609601219241]] +20013,PROJCS["NAD_1983_HARN_StatePlane_Massachusetts_Mainland_FIPS_2001_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",2460625.0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Standard_Parallel_1",41.71666666666667],PARAMETER["Standard_Parallel_2",42.68333333333333],PARAMETER["Latitude_Of_Origin",41.0],UNIT["Foot_US",0.3048006096012192]] +20014,PROJCS["NAD_1927_StatePlane_Massachusetts_Mainland_FIPS_2001",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Standard_Parallel_1",41.71666666666667],PARAMETER["Standard_Parallel_2",42.68333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Foot_US",0.304800609601219241]] +20020,PROJCS["NAD_1983_HARN_StatePlane_Massachusetts_Island_FIPS_2002",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.5],PARAMETER["Standard_Parallel_1",41.28333333333333],PARAMETER["Standard_Parallel_2",41.48333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Meter",1]] +20021,PROJCS["NAD_1983_StatePlane_Massachusetts_Island_FIPS_2002",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.5],PARAMETER["Standard_Parallel_1",41.28333333333333],PARAMETER["Standard_Parallel_2",41.48333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Meter",1]] +20022,PROJCS["NAD_1983_StatePlane_Massachusetts_Island_FIPS_2002_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.5],PARAMETER["Standard_Parallel_1",41.28333333333333],PARAMETER["Standard_Parallel_2",41.48333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Foot_US",0.304800609601219241]] +20023,PROJCS["NAD_1983_HARN_StatePlane_Massachusetts_Island_FIPS_2002_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-70.5],PARAMETER["Standard_Parallel_1",41.28333333333333],PARAMETER["Standard_Parallel_2",41.48333333333333],PARAMETER["Latitude_Of_Origin",41.0],UNIT["Foot_US",0.3048006096012192]] +20024,PROJCS["NAD_1927_StatePlane_Massachusetts_Island_FIPS_2002",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-70.5],PARAMETER["Standard_Parallel_1",41.28333333333333],PARAMETER["Standard_Parallel_2",41.48333333333333],PARAMETER["Latitude_Of_Origin",41],UNIT["Foot_US",0.304800609601219241]] +21110,PROJCS["NAD_1983_HARN_StatePlane_Michigan_North_FIPS_2111",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",8000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Meter",1]] +21111,PROJCS["NAD_1983_StatePlane_Michigan_North_FIPS_2111",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",8000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Meter",1]] +21112,PROJCS["NAD_1983_StatePlane_Michigan_North_FIPS_2111_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",26246666.66666666],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Foot_US",0.304800609601219241]] +21113,PROJCS["NAD_1983_HARN_StatePlane_Michigan_North_FIPS_2111_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",26246719.16010498],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-87.0],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Foot",0.3048]] +21114,PROJCS["NAD_1927_StatePlane_Michigan_North_FIPS_2111",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-87],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Foot_US",0.304800609601219241]] +21115,PROJCS["NAD_1983_HARN_StatePlane_Michigan_North_FIPS_2111_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",26246719.16010498],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-87.0],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Foot",0.3048]] +21116,PROJCS["NAD_1983_StatePlane_Michigan_North_FIPS_2111_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",26246719.16010498],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-87.0],PARAMETER["Standard_Parallel_1",45.48333333333333],PARAMETER["Standard_Parallel_2",47.08333333333334],PARAMETER["Latitude_Of_Origin",44.78333333333333],UNIT["Foot",0.3048]] +21120,PROJCS["NAD_1983_HARN_StatePlane_Michigan_Central_FIPS_2112",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Meter",1]] +21121,PROJCS["NAD_1983_StatePlane_Michigan_Central_FIPS_2112",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Meter",1]] +21122,PROJCS["NAD_1983_StatePlane_Michigan_Central_FIPS_2112_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",19685000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Foot_US",0.304800609601219241]] +21123,PROJCS["NAD_1983_HARN_StatePlane_Michigan_Central_FIPS_2112_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",19685039.37007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Foot",0.3048]] +21124,PROJCS["NAD_1927_StatePlane_Michigan_Central_FIPS_2112",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.33333333333333],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Foot_US",0.304800609601219241]] +21125,PROJCS["NAD_1983_HARN_StatePlane_Michigan_Central_FIPS_2112_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",19685039.37007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Foot",0.3048]] +21126,PROJCS["NAD_1983_StatePlane_Michigan_Central_FIPS_2112_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",19685039.37007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",44.18333333333333],PARAMETER["Standard_Parallel_2",45.7],PARAMETER["Latitude_Of_Origin",43.31666666666667],UNIT["Foot",0.3048]] +21130,PROJCS["NAD_1983_HARN_StatePlane_Michigan_South_FIPS_2113",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Meter",1]] +21131,PROJCS["NAD_1983_StatePlane_Michigan_South_FIPS_2113",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Meter",1]] +21132,PROJCS["NAD_1983_StatePlane_Michigan_South_FIPS_2113_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",13123333.33333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot_US",0.304800609601219241]] +21133,PROJCS["NAD_1983_HARN_StatePlane_Michigan_South_FIPS_2113_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",13123359.58005249],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot",0.3048]] +21134,PROJCS["NAD_1927_StatePlane_Michigan_South_FIPS_2113",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-84.33333333333333],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot_US",0.304800609601219241]] +21135,PROJCS["NAD_1983_HARN_StatePlane_Michigan_South_FIPS_2113_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",13123359.58005249],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot",0.3048]] +21136,PROJCS["NAD_1983_StatePlane_Michigan_South_FIPS_2113_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",13123359.58005249],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-84.36666666666666],PARAMETER["Standard_Parallel_1",42.1],PARAMETER["Standard_Parallel_2",43.66666666666666],PARAMETER["Latitude_Of_Origin",41.5],UNIT["Foot",0.3048]] +22010,PROJCS["NAD_1983_HARN_StatePlane_Minnesota_North_FIPS_2201",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000.0],PARAMETER["False_Northing",100000.0],PARAMETER["Central_Meridian",-93.1],PARAMETER["Standard_Parallel_1",47.03333333333333],PARAMETER["Standard_Parallel_2",48.63333333333333],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1.0]] +22011,PROJCS["NAD_1983_StatePlane_Minnesota_North_FIPS_2201",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-93.09999999999999],PARAMETER["Standard_Parallel_1",47.03333333333333],PARAMETER["Standard_Parallel_2",48.63333333333333],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Meter",1]] +22012,PROJCS["NAD_1983_StatePlane_Minnesota_North_FIPS_2201_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-93.09999999999999],PARAMETER["Standard_Parallel_1",47.03333333333333],PARAMETER["Standard_Parallel_2",48.63333333333333],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Foot_US",0.304800609601219241]] +22013,PROJCS["NAD_1983_HARN_StatePlane_Minnesota_North_FIPS_2201_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-93.1],PARAMETER["Standard_Parallel_1",47.03333333333333],PARAMETER["Standard_Parallel_2",48.63333333333333],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Foot_US",0.3048006096012192]] +22014,PROJCS["NAD_1927_StatePlane_Minnesota_North_FIPS_2201",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-93.09999999999999],PARAMETER["Standard_Parallel_1",47.03333333333333],PARAMETER["Standard_Parallel_2",48.63333333333333],PARAMETER["Latitude_Of_Origin",46.5],UNIT["Foot_US",0.304800609601219241]] +22020,PROJCS["NAD_1983_HARN_StatePlane_Minnesota_Central_FIPS_2202",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000.0],PARAMETER["False_Northing",100000.0],PARAMETER["Central_Meridian",-94.25],PARAMETER["Standard_Parallel_1",45.61666666666667],PARAMETER["Standard_Parallel_2",47.05],PARAMETER["Latitude_Of_Origin",45.0],UNIT["Meter",1.0]] +22021,PROJCS["NAD_1983_StatePlane_Minnesota_Central_FIPS_2202",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-94.25],PARAMETER["Standard_Parallel_1",45.61666666666667],PARAMETER["Standard_Parallel_2",47.05],PARAMETER["Latitude_Of_Origin",45],UNIT["Meter",1]] +22022,PROJCS["NAD_1983_StatePlane_Minnesota_Central_FIPS_2202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-94.25],PARAMETER["Standard_Parallel_1",45.61666666666667],PARAMETER["Standard_Parallel_2",47.05],PARAMETER["Latitude_Of_Origin",45],UNIT["Foot_US",0.304800609601219241]] +22023,PROJCS["NAD_1983_HARN_StatePlane_Minnesota_Central_FIPS_2202_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-94.25],PARAMETER["Standard_Parallel_1",45.61666666666667],PARAMETER["Standard_Parallel_2",47.05],PARAMETER["Latitude_Of_Origin",45.0],UNIT["Foot_US",0.3048006096012192]] +22024,PROJCS["NAD_1927_StatePlane_Minnesota_Central_FIPS_2202",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-94.25],PARAMETER["Standard_Parallel_1",45.61666666666667],PARAMETER["Standard_Parallel_2",47.05],PARAMETER["Latitude_Of_Origin",45],UNIT["Foot_US",0.304800609601219241]] +22030,PROJCS["NAD_1983_HARN_StatePlane_Minnesota_South_FIPS_2203",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000.0],PARAMETER["False_Northing",100000.0],PARAMETER["Central_Meridian",-94.0],PARAMETER["Standard_Parallel_1",43.78333333333333],PARAMETER["Standard_Parallel_2",45.21666666666667],PARAMETER["Latitude_Of_Origin",43.0],UNIT["Meter",1.0]] +22031,PROJCS["NAD_1983_StatePlane_Minnesota_South_FIPS_2203",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-94],PARAMETER["Standard_Parallel_1",43.78333333333333],PARAMETER["Standard_Parallel_2",45.21666666666667],PARAMETER["Latitude_Of_Origin",43],UNIT["Meter",1]] +22032,PROJCS["NAD_1983_StatePlane_Minnesota_South_FIPS_2203_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-94],PARAMETER["Standard_Parallel_1",43.78333333333333],PARAMETER["Standard_Parallel_2",45.21666666666667],PARAMETER["Latitude_Of_Origin",43],UNIT["Foot_US",0.304800609601219241]] +22033,PROJCS["NAD_1983_HARN_StatePlane_Minnesota_South_FIPS_2203_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-94.0],PARAMETER["Standard_Parallel_1",43.78333333333333],PARAMETER["Standard_Parallel_2",45.21666666666667],PARAMETER["Latitude_Of_Origin",43.0],UNIT["Foot_US",0.3048006096012192]] +22034,PROJCS["NAD_1927_StatePlane_Minnesota_South_FIPS_2203",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-94],PARAMETER["Standard_Parallel_1",43.78333333333333],PARAMETER["Standard_Parallel_2",45.21666666666667],PARAMETER["Latitude_Of_Origin",43],UNIT["Foot_US",0.304800609601219241]] +23010,PROJCS["NAD_1983_HARN_StatePlane_Mississippi_East_FIPS_2301",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.83333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Meter",1]] +23011,PROJCS["NAD_1983_StatePlane_Mississippi_East_FIPS_2301",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.83333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Meter",1]] +23012,PROJCS["NAD_1983_StatePlane_Mississippi_East_FIPS_2301_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984249.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.83333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Foot_US",0.304800609601219241]] +23013,PROJCS["NAD_1983_HARN_StatePlane_Mississippi_East_FIPS_2301_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984250.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-88.83333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Foot_US",0.3048006096012192]] +23014,PROJCS["NAD_1927_StatePlane_Mississippi_East_FIPS_2301",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-88.83333333333333],PARAMETER["Scale_Factor",0.99996],PARAMETER["Latitude_Of_Origin",29.66666666666667],UNIT["Foot_US",0.304800609601219241]] +23020,PROJCS["NAD_1983_HARN_StatePlane_Mississippi_West_FIPS_2302",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.33333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Meter",1]] +23021,PROJCS["NAD_1983_StatePlane_Mississippi_West_FIPS_2302",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.33333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Meter",1]] +23022,PROJCS["NAD_1983_StatePlane_Mississippi_West_FIPS_2302_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.33333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Foot_US",0.304800609601219241]] +23023,PROJCS["NAD_1983_HARN_StatePlane_Mississippi_West_FIPS_2302_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-90.33333333333333],PARAMETER["Scale_Factor",0.99995],PARAMETER["Latitude_Of_Origin",29.5],UNIT["Foot_US",0.3048006096012192]] +23024,PROJCS["NAD_1927_StatePlane_Mississippi_West_FIPS_2302",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.33333333333333],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",30.5],UNIT["Foot_US",0.304800609601219241]] +24010,PROJCS["NAD_1983_HARN_StatePlane_Missouri_East_FIPS_2401",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",250000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-90.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Meter",1.0]] +24011,PROJCS["NAD_1983_StatePlane_Missouri_East_FIPS_2401",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",250000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Meter",1]] +24012,PROJCS["NAD_1983_StatePlane_Missouri_East_FIPS_2401_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",820208.3333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Foot_US",0.304800609601219241]] +24014,PROJCS["NAD_1927_StatePlane_Missouri_East_FIPS_2401",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Foot_US",0.304800609601219241]] +24020,PROJCS["NAD_1983_HARN_StatePlane_Missouri_Central_FIPS_2402",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Meter",1.0]] +24021,PROJCS["NAD_1983_StatePlane_Missouri_Central_FIPS_2402",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Meter",1]] +24022,PROJCS["NAD_1983_StatePlane_Missouri_Central_FIPS_2402_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Foot_US",0.304800609601219241]] +24024,PROJCS["NAD_1927_StatePlane_Missouri_Central_FIPS_2402",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-92.5],PARAMETER["Scale_Factor",0.9999333333333333],PARAMETER["Latitude_Of_Origin",35.83333333333334],UNIT["Foot_US",0.304800609601219241]] +24030,PROJCS["NAD_1983_HARN_StatePlane_Missouri_West_FIPS_2403",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",850000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-94.5],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.16666666666666],UNIT["Meter",1.0]] +24031,PROJCS["NAD_1983_StatePlane_Missouri_West_FIPS_2403",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",850000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-94.5],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.16666666666666],UNIT["Meter",1]] +24032,PROJCS["NAD_1983_StatePlane_Missouri_West_FIPS_2403_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2788708.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-94.5],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.16666666666666],UNIT["Foot_US",0.304800609601219241]] +24034,PROJCS["NAD_1927_StatePlane_Missouri_West_FIPS_2403",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-94.5],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",36.16666666666666],UNIT["Foot_US",0.304800609601219241]] +25000,PROJCS["NAD_1983_HARN_StatePlane_Montana_FIPS_2500",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",45],PARAMETER["Standard_Parallel_2",49],PARAMETER["Latitude_Of_Origin",44.25],UNIT["Meter",1]] +25001,PROJCS["NAD_1983_StatePlane_Montana_FIPS_2500",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",45],PARAMETER["Standard_Parallel_2",49],PARAMETER["Latitude_Of_Origin",44.25],UNIT["Meter",1]] +25002,PROJCS["NAD_1983_StatePlane_Montana_FIPS_2500_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",45],PARAMETER["Standard_Parallel_2",49],PARAMETER["Latitude_Of_Origin",44.25],UNIT["Foot_US",0.304800609601219241]] +25003,PROJCS["NAD_1983_HARN_StatePlane_Montana_FIPS_2500_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",45.0],PARAMETER["Standard_Parallel_2",49.0],PARAMETER["Latitude_Of_Origin",44.25],UNIT["Foot",0.3048]] +25005,PROJCS["NAD_1983_HARN_StatePlane_Montana_FIPS_2500_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",45.0],PARAMETER["Standard_Parallel_2",49.0],PARAMETER["Latitude_Of_Origin",44.25],UNIT["Foot",0.3048]] +25006,PROJCS["NAD_1983_StatePlane_Montana_FIPS_2500_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",45.0],PARAMETER["Standard_Parallel_2",49.0],PARAMETER["Latitude_Of_Origin",44.25],UNIT["Foot",0.3048]] +25014,PROJCS["NAD_1927_StatePlane_Montana_North_FIPS_2501",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",47.85],PARAMETER["Standard_Parallel_2",48.71666666666667],PARAMETER["Latitude_Of_Origin",47],UNIT["Foot_US",0.304800609601219241]] +25024,PROJCS["NAD_1927_StatePlane_Montana_Central_FIPS_2502",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",46.45],PARAMETER["Standard_Parallel_2",47.88333333333333],PARAMETER["Latitude_Of_Origin",45.83333333333334],UNIT["Foot_US",0.304800609601219241]] +25034,PROJCS["NAD_1927_StatePlane_Montana_South_FIPS_2503",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-109.5],PARAMETER["Standard_Parallel_1",44.86666666666667],PARAMETER["Standard_Parallel_2",46.4],PARAMETER["Latitude_Of_Origin",44],UNIT["Foot_US",0.304800609601219241]] +26000,PROJCS["NAD_1983_HARN_StatePlane_Nebraska_FIPS_2600",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",43],PARAMETER["Latitude_Of_Origin",39.83333333333334],UNIT["Meter",1]] +26001,PROJCS["NAD_1983_StatePlane_Nebraska_FIPS_2600",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",43],PARAMETER["Latitude_Of_Origin",39.83333333333334],UNIT["Meter",1]] +26002,PROJCS["NAD_1983_StatePlane_Nebraska_FIPS_2600_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",40],PARAMETER["Standard_Parallel_2",43],PARAMETER["Latitude_Of_Origin",39.83333333333334],UNIT["Foot_US",0.304800609601219241]] +26014,PROJCS["NAD_1927_StatePlane_Nebraska_North_FIPS_2601",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",41.85],PARAMETER["Standard_Parallel_2",42.81666666666667],PARAMETER["Latitude_Of_Origin",41.33333333333334],UNIT["Foot_US",0.304800609601219241]] +26024,PROJCS["NAD_1927_StatePlane_Nebraska_South_FIPS_2602",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-99.5],PARAMETER["Standard_Parallel_1",40.28333333333333],PARAMETER["Standard_Parallel_2",41.71666666666667],PARAMETER["Latitude_Of_Origin",39.66666666666666],UNIT["Foot_US",0.304800609601219241]] +27010,PROJCS["NAD_1983_HARN_StatePlane_Nevada_East_FIPS_2701",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",8000000],PARAMETER["Central_Meridian",-115.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Meter",1]] +27011,PROJCS["NAD_1983_StatePlane_Nevada_East_FIPS_2701",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",8000000],PARAMETER["Central_Meridian",-115.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Meter",1]] +27012,PROJCS["NAD_1983_StatePlane_Nevada_East_FIPS_2701_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",26246666.66666666],PARAMETER["Central_Meridian",-115.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.304800609601219241]] +27013,PROJCS["NAD_1983_HARN_StatePlane_Nevada_East_FIPS_2701_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",26246666.66666666],PARAMETER["Central_Meridian",-115.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.3048006096012192]] +27014,PROJCS["NAD_1927_StatePlane_Nevada_East_FIPS_2701",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-115.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.304800609601219241]] +27020,PROJCS["NAD_1983_HARN_StatePlane_Nevada_Central_FIPS_2702",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",6000000],PARAMETER["Central_Meridian",-116.6666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Meter",1]] +27021,PROJCS["NAD_1983_StatePlane_Nevada_Central_FIPS_2702",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",6000000],PARAMETER["Central_Meridian",-116.6666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Meter",1]] +27022,PROJCS["NAD_1983_StatePlane_Nevada_Central_FIPS_2702_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",19685000],PARAMETER["Central_Meridian",-116.6666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.304800609601219241]] +27023,PROJCS["NAD_1983_HARN_StatePlane_Nevada_Central_FIPS_2702_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",19685000.0],PARAMETER["Central_Meridian",-116.6666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.3048006096012192]] +27024,PROJCS["NAD_1927_StatePlane_Nevada_Central_FIPS_2702",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-116.6666666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.304800609601219241]] +27030,PROJCS["NAD_1983_HARN_StatePlane_Nevada_West_FIPS_2703",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",4000000],PARAMETER["Central_Meridian",-118.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Meter",1]] +27031,PROJCS["NAD_1983_StatePlane_Nevada_West_FIPS_2703",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",4000000],PARAMETER["Central_Meridian",-118.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Meter",1]] +27032,PROJCS["NAD_1983_StatePlane_Nevada_West_FIPS_2703_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-118.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.304800609601219241]] +27033,PROJCS["NAD_1983_HARN_StatePlane_Nevada_West_FIPS_2703_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-118.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.3048006096012192]] +27034,PROJCS["NAD_1927_StatePlane_Nevada_West_FIPS_2703",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-118.5833333333333],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",34.75],UNIT["Foot_US",0.304800609601219241]] +28000,PROJCS["NAD_1983_HARN_StatePlane_New_Hampshire_FIPS_2800",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Meter",1]] +28001,PROJCS["NAD_1983_StatePlane_New_Hampshire_FIPS_2800",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Meter",1]] +28002,PROJCS["NAD_1983_StatePlane_New_Hampshire_FIPS_2800_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984249.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Foot_US",0.304800609601219241]] +28003,PROJCS["NAD_1983_HARN_StatePlane_New_Hampshire_FIPS_2800_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",984250.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-71.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Foot_US",0.3048006096012192]] +28004,PROJCS["NAD_1927_StatePlane_New_Hampshire_FIPS_2800",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.66666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Foot_US",0.304800609601219241]] +29000,PROJCS["NAD_1983_HARN_StatePlane_New_Jersey_FIPS_2900",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",150000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Meter",1]] +29001,PROJCS["NAD_1983_StatePlane_New_Jersey_FIPS_2900",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",150000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Meter",1]] +29002,PROJCS["NAD_1983_StatePlane_New_Jersey_FIPS_2900_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",492124.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Foot_US",0.304800609601219241]] +29003,PROJCS["NAD_1983_HARN_StatePlane_New_Jersey_FIPS_2900_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",492125.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Foot_US",0.3048006096012192]] +29004,PROJCS["NAD_1927_StatePlane_New_Jersey_FIPS_2900",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.66666666666667],PARAMETER["Scale_Factor",0.9999749999999999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Foot_US",0.304800609601219241]] +30010,PROJCS["NAD_1983_HARN_StatePlane_New_Mexico_East_FIPS_3001",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",165000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-104.3333333333333],PARAMETER["Scale_Factor",0.9999090909090909],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +30011,PROJCS["NAD_1983_StatePlane_New_Mexico_East_FIPS_3001",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",165000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-104.3333333333333],PARAMETER["Scale_Factor",0.9999090909090909],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +30012,PROJCS["NAD_1983_StatePlane_New_Mexico_East_FIPS_3001_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",541337.4999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-104.3333333333333],PARAMETER["Scale_Factor",0.9999090909090909],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +30013,PROJCS["NAD_1983_HARN_StatePlane_New_Mexico_East_FIPS_3001_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",541337.5],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-104.3333333333333],PARAMETER["Scale_Factor",0.9999090909090909],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot_US",0.3048006096012192]] +30014,PROJCS["NAD_1927_StatePlane_New_Mexico_East_FIPS_3001",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-104.3333333333333],PARAMETER["Scale_Factor",0.9999090909090909],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +30020,PROJCS["NAD_1983_HARN_StatePlane_New_Mexico_Central_FIPS_3002",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-106.25],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +30021,PROJCS["NAD_1983_StatePlane_New_Mexico_Central_FIPS_3002",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-106.25],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +30022,PROJCS["NAD_1983_StatePlane_New_Mexico_Central_FIPS_3002_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-106.25],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +30023,PROJCS["NAD_1983_HARN_StatePlane_New_Mexico_Central_FIPS_3002_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-106.25],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot_US",0.3048006096012192]] +30024,PROJCS["NAD_1927_StatePlane_New_Mexico_Central_FIPS_3002",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-106.25],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +30030,PROJCS["NAD_1983_HARN_StatePlane_New_Mexico_West_FIPS_3003",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",830000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-107.8333333333333],PARAMETER["Scale_Factor",0.9999166666666667],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +30031,PROJCS["NAD_1983_StatePlane_New_Mexico_West_FIPS_3003",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",830000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-107.8333333333333],PARAMETER["Scale_Factor",0.9999166666666667],PARAMETER["Latitude_Of_Origin",31],UNIT["Meter",1]] +30032,PROJCS["NAD_1983_StatePlane_New_Mexico_West_FIPS_3003_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2723091.666666666],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-107.8333333333333],PARAMETER["Scale_Factor",0.9999166666666667],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +30033,PROJCS["NAD_1983_HARN_StatePlane_New_Mexico_West_FIPS_3003_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2723091.666666666],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-107.8333333333333],PARAMETER["Scale_Factor",0.9999166666666667],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot_US",0.3048006096012192]] +30034,PROJCS["NAD_1927_StatePlane_New_Mexico_West_FIPS_3003",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-107.8333333333333],PARAMETER["Scale_Factor",0.9999166666666667],PARAMETER["Latitude_Of_Origin",31],UNIT["Foot_US",0.304800609601219241]] +31010,PROJCS["NAD_1983_HARN_StatePlane_New_York_East_FIPS_3101",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",150000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Meter",1]] +31011,PROJCS["NAD_1983_StatePlane_New_York_East_FIPS_3101",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",150000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Meter",1]] +31012,PROJCS["NAD_1983_StatePlane_New_York_East_FIPS_3101_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",492124.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Foot_US",0.304800609601219241]] +31013,PROJCS["NAD_1983_HARN_StatePlane_New_York_East_FIPS_3101_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",492125.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-74.5],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",38.83333333333334],UNIT["Foot_US",0.3048006096012192]] +31014,PROJCS["NAD_1927_StatePlane_New_York_East_FIPS_3101",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74.33333333333333],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +31020,PROJCS["NAD_1983_HARN_StatePlane_New_York_Central_FIPS_3102",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",250000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-76.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Meter",1]] +31021,PROJCS["NAD_1983_StatePlane_New_York_Central_FIPS_3102",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",250000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-76.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Meter",1]] +31022,PROJCS["NAD_1983_StatePlane_New_York_Central_FIPS_3102_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",820208.3333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-76.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +31023,PROJCS["NAD_1983_HARN_StatePlane_New_York_Central_FIPS_3102_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",820208.3333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-76.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.0],UNIT["Foot_US",0.3048006096012192]] +31024,PROJCS["NAD_1927_StatePlane_New_York_Central_FIPS_3102",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-76.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +31030,PROJCS["NAD_1983_HARN_StatePlane_New_York_West_FIPS_3103",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",350000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-78.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Meter",1]] +31031,PROJCS["NAD_1983_StatePlane_New_York_West_FIPS_3103",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",350000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-78.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Meter",1]] +31032,PROJCS["NAD_1983_StatePlane_New_York_West_FIPS_3103_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1148291.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-78.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +31033,PROJCS["NAD_1983_HARN_StatePlane_New_York_West_FIPS_3103_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1148291.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-78.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.0],UNIT["Foot_US",0.3048006096012192]] +31034,PROJCS["NAD_1927_StatePlane_New_York_West_FIPS_3103",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-78.58333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40],UNIT["Foot_US",0.304800609601219241]] +31040,PROJCS["NAD_1983_HARN_StatePlane_New_York_Long_Island_FIPS_3104",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74],PARAMETER["Standard_Parallel_1",40.66666666666666],PARAMETER["Standard_Parallel_2",41.03333333333333],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Meter",1]] +31041,PROJCS["NAD_1983_StatePlane_New_York_Long_Island_FIPS_3104",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74],PARAMETER["Standard_Parallel_1",40.66666666666666],PARAMETER["Standard_Parallel_2",41.03333333333333],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Meter",1]] +31042,PROJCS["NAD_1983_StatePlane_New_York_Long_Island_FIPS_3104_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",984249.9999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-74],PARAMETER["Standard_Parallel_1",40.66666666666666],PARAMETER["Standard_Parallel_2",41.03333333333333],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Foot_US",0.304800609601219241]] +31043,PROJCS["NAD_1983_HARN_StatePlane_New_York_Long_Island_FIPS_3104_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",984250.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-74.0],PARAMETER["Standard_Parallel_1",40.66666666666666],PARAMETER["Standard_Parallel_2",41.03333333333333],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Foot_US",0.3048006096012192]] +31044,PROJCS["NAD_1927_StatePlane_New_York_Long_Island_FIPS_3104",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-74],PARAMETER["Standard_Parallel_1",40.66666666666666],PARAMETER["Standard_Parallel_2",41.03333333333333],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.304800609601219241]] +32000,PROJCS["NAD_1983_HARN_StatePlane_North_Carolina_FIPS_3200",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",609601.2192024384],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-79.0],PARAMETER["Standard_Parallel_1",34.33333333333334],PARAMETER["Standard_Parallel_2",36.16666666666666],PARAMETER["Latitude_Of_Origin",33.75],UNIT["Meter",1.0]] +32001,PROJCS["NAD_1983_StatePlane_North_Carolina_FIPS_3200",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",609601.22],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79],PARAMETER["Standard_Parallel_1",34.33333333333334],PARAMETER["Standard_Parallel_2",36.16666666666666],PARAMETER["Latitude_Of_Origin",33.75],UNIT["Meter",1]] +32002,PROJCS["NAD_1983_StatePlane_North_Carolina_FIPS_3200_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000.002616666],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79],PARAMETER["Standard_Parallel_1",34.33333333333334],PARAMETER["Standard_Parallel_2",36.16666666666666],PARAMETER["Latitude_Of_Origin",33.75],UNIT["Foot_US",0.304800609601219241]] +32003,PROJCS["NAD_1983_HARN_StatePlane_North_Carolina_FIPS_3200_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-79.0],PARAMETER["Standard_Parallel_1",34.33333333333334],PARAMETER["Standard_Parallel_2",36.16666666666666],PARAMETER["Latitude_Of_Origin",33.75],UNIT["Foot_US",0.3048006096012192]] +32004,PROJCS["NAD_1927_StatePlane_North_Carolina_FIPS_3200",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79],PARAMETER["Standard_Parallel_1",34.33333333333334],PARAMETER["Standard_Parallel_2",36.16666666666666],PARAMETER["Latitude_Of_Origin",33.75],UNIT["Foot_US",0.304800609601219241]] +33010,PROJCS["NAD_1983_HARN_StatePlane_North_Dakota_North_FIPS_3301",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Meter",1]] +33011,PROJCS["NAD_1983_StatePlane_North_Dakota_North_FIPS_3301",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Meter",1]] +33012,PROJCS["NAD_1983_StatePlane_North_Dakota_North_FIPS_3301_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Foot_US",0.304800609601219241]] +33013,PROJCS["NAD_1983_HARN_StatePlane_North_Dakota_North_FIPS_3301_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47.0],UNIT["Foot",0.3048]] +33014,PROJCS["NAD_1927_StatePlane_North_Dakota_North_FIPS_3301",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Foot_US",0.304800609601219241]] +33015,PROJCS["NAD_1983_HARN_StatePlane_North_Dakota_North_FIPS_3301_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47.0],UNIT["Foot",0.3048]] +33016,PROJCS["NAD_1983_StatePlane_North_Dakota_North_FIPS_3301_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",47.43333333333333],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47.0],UNIT["Foot",0.3048]] +33020,PROJCS["NAD_1983_HARN_StatePlane_North_Dakota_South_FIPS_3302",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Meter",1]] +33021,PROJCS["NAD_1983_StatePlane_North_Dakota_South_FIPS_3302",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Meter",1]] +33022,PROJCS["NAD_1983_StatePlane_North_Dakota_South_FIPS_3302_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Foot_US",0.304800609601219241]] +33023,PROJCS["NAD_1983_HARN_StatePlane_North_Dakota_South_FIPS_3302_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Foot",0.3048]] +33024,PROJCS["NAD_1927_StatePlane_North_Dakota_South_FIPS_3302",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Foot_US",0.304800609601219241]] +33025,PROJCS["NAD_1983_HARN_StatePlane_North_Dakota_South_FIPS_3302_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Foot",0.3048]] +33026,PROJCS["NAD_1983_StatePlane_North_Dakota_South_FIPS_3302_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968503.937007874],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.5],PARAMETER["Standard_Parallel_1",46.18333333333333],PARAMETER["Standard_Parallel_2",47.48333333333333],PARAMETER["Latitude_Of_Origin",45.66666666666666],UNIT["Foot",0.3048]] +34004,PROJCS["NAD_1927_StatePlane_Vermont_FIPS_3400",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-72.5],PARAMETER["Scale_Factor",0.9999642857142857],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Foot_US",0.304800609601219241]] +34010,PROJCS["NAD_1983_HARN_StatePlane_Ohio_North_FIPS_3401",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",40.43333333333333],PARAMETER["Standard_Parallel_2",41.7],PARAMETER["Latitude_Of_Origin",39.66666666666666],UNIT["Meter",1]] +34011,PROJCS["NAD_1983_StatePlane_Ohio_North_FIPS_3401",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",40.43333333333333],PARAMETER["Standard_Parallel_2",41.7],PARAMETER["Latitude_Of_Origin",39.66666666666666],UNIT["Meter",1]] +34012,PROJCS["NAD_1983_StatePlane_Ohio_North_FIPS_3401_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",40.43333333333333],PARAMETER["Standard_Parallel_2",41.7],PARAMETER["Latitude_Of_Origin",39.66666666666666],UNIT["Foot_US",0.304800609601219241]] +34013,PROJCS["NAD_1983_HARN_StatePlane_Ohio_North_FIPS_3401_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",40.43333333333333],PARAMETER["Standard_Parallel_2",41.7],PARAMETER["Latitude_Of_Origin",39.66666666666666],UNIT["Foot_US",0.3048006096012192]] +34014,PROJCS["NAD_1927_StatePlane_Ohio_North_FIPS_3401",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",40.43333333333333],PARAMETER["Standard_Parallel_2",41.7],PARAMETER["Latitude_Of_Origin",39.66666666666666],UNIT["Foot_US",0.304800609601219241]] +34020,PROJCS["NAD_1983_HARN_StatePlane_Ohio_South_FIPS_3402",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",38.73333333333333],PARAMETER["Standard_Parallel_2",40.03333333333333],PARAMETER["Latitude_Of_Origin",38],UNIT["Meter",1]] +34021,PROJCS["NAD_1983_StatePlane_Ohio_South_FIPS_3402",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",38.73333333333333],PARAMETER["Standard_Parallel_2",40.03333333333333],PARAMETER["Latitude_Of_Origin",38],UNIT["Meter",1]] +34022,PROJCS["NAD_1983_StatePlane_Ohio_South_FIPS_3402_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",38.73333333333333],PARAMETER["Standard_Parallel_2",40.03333333333333],PARAMETER["Latitude_Of_Origin",38],UNIT["Foot_US",0.304800609601219241]] +34023,PROJCS["NAD_1983_HARN_StatePlane_Ohio_South_FIPS_3402_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",38.73333333333333],PARAMETER["Standard_Parallel_2",40.03333333333333],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Foot_US",0.3048006096012192]] +34024,PROJCS["NAD_1927_StatePlane_Ohio_South_FIPS_3402",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-82.5],PARAMETER["Standard_Parallel_1",38.73333333333333],PARAMETER["Standard_Parallel_2",40.03333333333333],PARAMETER["Latitude_Of_Origin",38],UNIT["Foot_US",0.304800609601219241]] +35010,PROJCS["NAD_1983_HARN_StatePlane_Oklahoma_North_FIPS_3501",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",35.56666666666667],PARAMETER["Standard_Parallel_2",36.76666666666667],PARAMETER["Latitude_Of_Origin",35],UNIT["Meter",1]] +35011,PROJCS["NAD_1983_StatePlane_Oklahoma_North_FIPS_3501",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",35.56666666666667],PARAMETER["Standard_Parallel_2",36.76666666666667],PARAMETER["Latitude_Of_Origin",35],UNIT["Meter",1]] +35012,PROJCS["NAD_1983_StatePlane_Oklahoma_North_FIPS_3501_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",35.56666666666667],PARAMETER["Standard_Parallel_2",36.76666666666667],PARAMETER["Latitude_Of_Origin",35],UNIT["Foot_US",0.304800609601219241]] +35013,PROJCS["NAD_1983_HARN_StatePlane_Oklahoma_North_FIPS_3501_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-98.0],PARAMETER["Standard_Parallel_1",35.56666666666667],PARAMETER["Standard_Parallel_2",36.76666666666667],PARAMETER["Latitude_Of_Origin",35.0],UNIT["Foot_US",0.3048006096012192]] +35014,PROJCS["NAD_1927_StatePlane_Oklahoma_North_FIPS_3501",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",35.56666666666667],PARAMETER["Standard_Parallel_2",36.76666666666667],PARAMETER["Latitude_Of_Origin",35],UNIT["Foot_US",0.304800609601219241]] +35020,PROJCS["NAD_1983_HARN_StatePlane_Oklahoma_South_FIPS_3502",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",33.93333333333333],PARAMETER["Standard_Parallel_2",35.23333333333333],PARAMETER["Latitude_Of_Origin",33.33333333333334],UNIT["Meter",1]] +35021,PROJCS["NAD_1983_StatePlane_Oklahoma_South_FIPS_3502",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",33.93333333333333],PARAMETER["Standard_Parallel_2",35.23333333333333],PARAMETER["Latitude_Of_Origin",33.33333333333334],UNIT["Meter",1]] +35022,PROJCS["NAD_1983_StatePlane_Oklahoma_South_FIPS_3502_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",33.93333333333333],PARAMETER["Standard_Parallel_2",35.23333333333333],PARAMETER["Latitude_Of_Origin",33.33333333333334],UNIT["Foot_US",0.304800609601219241]] +35023,PROJCS["NAD_1983_HARN_StatePlane_Oklahoma_South_FIPS_3502_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-98.0],PARAMETER["Standard_Parallel_1",33.93333333333333],PARAMETER["Standard_Parallel_2",35.23333333333333],PARAMETER["Latitude_Of_Origin",33.33333333333334],UNIT["Foot_US",0.3048006096012192]] +35024,PROJCS["NAD_1927_StatePlane_Oklahoma_South_FIPS_3502",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98],PARAMETER["Standard_Parallel_1",33.93333333333333],PARAMETER["Standard_Parallel_2",35.23333333333333],PARAMETER["Latitude_Of_Origin",33.33333333333334],UNIT["Foot_US",0.304800609601219241]] +36010,PROJCS["NAD_1983_HARN_StatePlane_Oregon_North_FIPS_3601",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Meter",1]] +36011,PROJCS["NAD_1983_StatePlane_Oregon_North_FIPS_3601",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Meter",1]] +36012,PROJCS["NAD_1983_StatePlane_Oregon_North_FIPS_3601_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",8202083.333333332],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Foot_US",0.304800609601219241]] +36013,PROJCS["NAD_1983_HARN_StatePlane_Oregon_North_FIPS_3601_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",8202099.737532808],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46.0],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Foot",0.3048]] +36014,PROJCS["NAD_1927_StatePlane_Oregon_North_FIPS_3601",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Foot_US",0.304800609601219241]] +36015,PROJCS["NAD_1983_HARN_StatePlane_Oregon_North_FIPS_3601_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",8202099.737532808],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46.0],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Foot",0.3048]] +36016,PROJCS["NAD_1983_StatePlane_Oregon_North_FIPS_3601_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",8202099.737532808],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",44.33333333333334],PARAMETER["Standard_Parallel_2",46.0],PARAMETER["Latitude_Of_Origin",43.66666666666666],UNIT["Foot",0.3048]] +36020,PROJCS["NAD_1983_HARN_StatePlane_Oregon_South_FIPS_3602",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +36021,PROJCS["NAD_1983_StatePlane_Oregon_South_FIPS_3602",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Meter",1]] +36022,PROJCS["NAD_1983_StatePlane_Oregon_South_FIPS_3602_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921249.999999999],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +36023,PROJCS["NAD_1983_HARN_StatePlane_Oregon_South_FIPS_3602_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921259.842519685],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot",0.3048]] +36024,PROJCS["NAD_1927_StatePlane_Oregon_South_FIPS_3602",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot_US",0.304800609601219241]] +36025,PROJCS["NAD_1983_HARN_StatePlane_Oregon_South_FIPS_3602_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921259.842519685],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot",0.3048]] +36026,PROJCS["NAD_1983_StatePlane_Oregon_South_FIPS_3602_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",4921259.842519685],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",42.33333333333334],PARAMETER["Standard_Parallel_2",44.0],PARAMETER["Latitude_Of_Origin",41.66666666666666],UNIT["Foot",0.3048]] +37010,PROJCS["NAD_1983_HARN_StatePlane_Pennsylvania_North_FIPS_3701",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",40.88333333333333],PARAMETER["Standard_Parallel_2",41.95],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Meter",1.0]] +37011,PROJCS["NAD_1983_StatePlane_Pennsylvania_North_FIPS_3701",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",40.88333333333333],PARAMETER["Standard_Parallel_2",41.95],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Meter",1]] +37012,PROJCS["NAD_1983_StatePlane_Pennsylvania_North_FIPS_3701_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",40.88333333333333],PARAMETER["Standard_Parallel_2",41.95],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Foot_US",0.304800609601219241]] +37013,PROJCS["NAD_1983_HARN_StatePlane_Pennsylvania_North_FIPS_3701_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",40.88333333333333],PARAMETER["Standard_Parallel_2",41.95],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Foot_US",0.3048006096012192]] +37014,PROJCS["NAD_1927_StatePlane_Pennsylvania_North_FIPS_3701",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",40.88333333333333],PARAMETER["Standard_Parallel_2",41.95],PARAMETER["Latitude_Of_Origin",40.16666666666666],UNIT["Foot_US",0.304800609601219241]] +37020,PROJCS["NAD_1983_HARN_StatePlane_Pennsylvania_South_FIPS_3702",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",39.93333333333333],PARAMETER["Standard_Parallel_2",40.96666666666667],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Meter",1.0]] +37021,PROJCS["NAD_1983_StatePlane_Pennsylvania_South_FIPS_3702",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",39.93333333333333],PARAMETER["Standard_Parallel_2",40.96666666666667],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Meter",1]] +37022,PROJCS["NAD_1983_StatePlane_Pennsylvania_South_FIPS_3702_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",39.93333333333333],PARAMETER["Standard_Parallel_2",40.96666666666667],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.304800609601219241]] +37023,PROJCS["NAD_1983_HARN_StatePlane_Pennsylvania_South_FIPS_3702_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",39.93333333333333],PARAMETER["Standard_Parallel_2",40.96666666666667],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.3048006096012192]] +37024,PROJCS["NAD_1927_StatePlane_Pennsylvania_South_FIPS_3702",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-77.75],PARAMETER["Standard_Parallel_1",39.93333333333333],PARAMETER["Standard_Parallel_2",40.96666666666667],PARAMETER["Latitude_Of_Origin",39.33333333333334],UNIT["Foot_US",0.304800609601219241]] +38000,PROJCS["NAD_1983_HARN_StatePlane_Rhode_Island_FIPS_3800",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",100000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Scale_Factor",0.99999375],PARAMETER["Latitude_Of_Origin",41.08333333333334],UNIT["Meter",1]] +38001,PROJCS["NAD_1983_StatePlane_Rhode_Island_FIPS_3800",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",100000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Scale_Factor",0.99999375],PARAMETER["Latitude_Of_Origin",41.08333333333334],UNIT["Meter",1]] +38002,PROJCS["NAD_1983_StatePlane_Rhode_Island_FIPS_3800_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",328083.3333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Scale_Factor",0.99999375],PARAMETER["Latitude_Of_Origin",41.08333333333334],UNIT["Foot_US",0.304800609601219241]] +38003,PROJCS["NAD_1983_HARN_StatePlane_Rhode_Island_FIPS_3800_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",328083.3333333333],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Scale_Factor",0.99999375],PARAMETER["Latitude_Of_Origin",41.08333333333334],UNIT["Foot_US",0.3048006096012192]] +38004,PROJCS["NAD_1927_StatePlane_Rhode_Island_FIPS_3800",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-71.5],PARAMETER["Scale_Factor",0.99999375],PARAMETER["Latitude_Of_Origin",41.08333333333334],UNIT["Foot_US",0.304800609601219241]] +39000,PROJCS["NAD_1983_HARN_StatePlane_South_Carolina_FIPS_3900",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",609600.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-81.0],PARAMETER["Standard_Parallel_1",32.5],PARAMETER["Standard_Parallel_2",34.83333333333334],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Meter",1.0]] +39001,PROJCS["NAD_1983_StatePlane_South_Carolina_FIPS_3900",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",609600],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",32.5],PARAMETER["Standard_Parallel_2",34.83333333333334],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Meter",1]] +39002,PROJCS["NAD_1983_StatePlane_South_Carolina_FIPS_3900_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1999996],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",32.5],PARAMETER["Standard_Parallel_2",34.83333333333334],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Foot_US",0.304800609601219241]] +39003,PROJCS["NAD_1983_HARN_StatePlane_South_Carolina_FIPS_3900_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-81.0],PARAMETER["Standard_Parallel_1",32.5],PARAMETER["Standard_Parallel_2",34.83333333333334],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Foot",0.3048]] +39005,PROJCS["NAD_1983_HARN_StatePlane_South_Carolina_FIPS_3900_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-81.0],PARAMETER["Standard_Parallel_1",32.5],PARAMETER["Standard_Parallel_2",34.83333333333334],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Foot",0.3048]] +39006,PROJCS["NAD_1983_StatePlane_South_Carolina_FIPS_3900_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-81.0],PARAMETER["Standard_Parallel_1",32.5],PARAMETER["Standard_Parallel_2",34.83333333333334],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Foot",0.3048]] +39014,PROJCS["NAD_1927_StatePlane_South_Carolina_North_FIPS_3901",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",33.76666666666667],PARAMETER["Standard_Parallel_2",34.96666666666667],PARAMETER["Latitude_Of_Origin",33],UNIT["Foot_US",0.304800609601219241]] +39024,PROJCS["NAD_1927_StatePlane_South_Carolina_South_FIPS_3902",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",32.33333333333334],PARAMETER["Standard_Parallel_2",33.66666666666666],PARAMETER["Latitude_Of_Origin",31.83333333333333],UNIT["Foot_US",0.304800609601219241]] +40010,PROJCS["NAD_1983_HARN_StatePlane_South_Dakota_North_FIPS_4001",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",44.41666666666666],PARAMETER["Standard_Parallel_2",45.68333333333333],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Meter",1]] +40011,PROJCS["NAD_1983_StatePlane_South_Dakota_North_FIPS_4001",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",44.41666666666666],PARAMETER["Standard_Parallel_2",45.68333333333333],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Meter",1]] +40012,PROJCS["NAD_1983_StatePlane_South_Dakota_North_FIPS_4001_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",44.41666666666666],PARAMETER["Standard_Parallel_2",45.68333333333333],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.304800609601219241]] +40013,PROJCS["NAD_1983_HARN_StatePlane_South_Dakota_North_FIPS_4001_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.0],PARAMETER["Standard_Parallel_1",44.41666666666666],PARAMETER["Standard_Parallel_2",45.68333333333333],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.3048006096012192]] +40014,PROJCS["NAD_1927_StatePlane_South_Dakota_North_FIPS_4001",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100],PARAMETER["Standard_Parallel_1",44.41666666666666],PARAMETER["Standard_Parallel_2",45.68333333333333],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.304800609601219241]] +40020,PROJCS["NAD_1983_HARN_StatePlane_South_Dakota_South_FIPS_4002",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",42.83333333333334],PARAMETER["Standard_Parallel_2",44.4],PARAMETER["Latitude_Of_Origin",42.33333333333334],UNIT["Meter",1]] +40021,PROJCS["NAD_1983_StatePlane_South_Dakota_South_FIPS_4002",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",42.83333333333334],PARAMETER["Standard_Parallel_2",44.4],PARAMETER["Latitude_Of_Origin",42.33333333333334],UNIT["Meter",1]] +40022,PROJCS["NAD_1983_StatePlane_South_Dakota_South_FIPS_4002_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",42.83333333333334],PARAMETER["Standard_Parallel_2",44.4],PARAMETER["Latitude_Of_Origin",42.33333333333334],UNIT["Foot_US",0.304800609601219241]] +40023,PROJCS["NAD_1983_HARN_StatePlane_South_Dakota_South_FIPS_4002_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",42.83333333333334],PARAMETER["Standard_Parallel_2",44.4],PARAMETER["Latitude_Of_Origin",42.33333333333334],UNIT["Foot_US",0.3048006096012192]] +40024,PROJCS["NAD_1927_StatePlane_South_Dakota_South_FIPS_4002",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",42.83333333333334],PARAMETER["Standard_Parallel_2",44.4],PARAMETER["Latitude_Of_Origin",42.33333333333334],UNIT["Foot_US",0.304800609601219241]] +41000,PROJCS["NAD_1983_HARN_StatePlane_Tennessee_FIPS_4100",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-86],PARAMETER["Standard_Parallel_1",35.25],PARAMETER["Standard_Parallel_2",36.41666666666666],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Meter",1]] +41001,PROJCS["NAD_1983_StatePlane_Tennessee_FIPS_4100",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-86],PARAMETER["Standard_Parallel_1",35.25],PARAMETER["Standard_Parallel_2",36.41666666666666],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Meter",1]] +41002,PROJCS["NAD_1983_StatePlane_Tennessee_FIPS_4100_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-86],PARAMETER["Standard_Parallel_1",35.25],PARAMETER["Standard_Parallel_2",36.41666666666666],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Foot_US",0.304800609601219241]] +41003,PROJCS["NAD_1983_HARN_StatePlane_Tennessee_FIPS_4100_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-86.0],PARAMETER["Standard_Parallel_1",35.25],PARAMETER["Standard_Parallel_2",36.41666666666666],PARAMETER["Latitude_Of_Origin",34.33333333333334],UNIT["Foot_US",0.3048006096012192]] +41004,PROJCS["NAD_1927_StatePlane_Tennessee_FIPS_4100",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-86],PARAMETER["Standard_Parallel_1",35.25],PARAMETER["Standard_Parallel_2",36.41666666666666],PARAMETER["Latitude_Of_Origin",34.66666666666666],UNIT["Foot_US",0.304800609601219241]] +42010,PROJCS["NAD_1983_HARN_StatePlane_Texas_North_FIPS_4201",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-101.5],PARAMETER["Standard_Parallel_1",34.65],PARAMETER["Standard_Parallel_2",36.18333333333333],PARAMETER["Latitude_Of_Origin",34],UNIT["Meter",1]] +42011,PROJCS["NAD_1983_StatePlane_Texas_North_FIPS_4201",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-101.5],PARAMETER["Standard_Parallel_1",34.65],PARAMETER["Standard_Parallel_2",36.18333333333333],PARAMETER["Latitude_Of_Origin",34],UNIT["Meter",1]] +42012,PROJCS["NAD_1983_StatePlane_Texas_North_FIPS_4201_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-101.5],PARAMETER["Standard_Parallel_1",34.65],PARAMETER["Standard_Parallel_2",36.18333333333333],PARAMETER["Latitude_Of_Origin",34],UNIT["Foot_US",0.304800609601219241]] +42013,PROJCS["NAD_1983_HARN_StatePlane_Texas_North_FIPS_4201_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-101.5],PARAMETER["Standard_Parallel_1",34.65],PARAMETER["Standard_Parallel_2",36.18333333333333],PARAMETER["Latitude_Of_Origin",34.0],UNIT["Foot_US",0.3048006096012192]] +42014,PROJCS["NAD_1927_StatePlane_Texas_North_FIPS_4201",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-101.5],PARAMETER["Standard_Parallel_1",34.65],PARAMETER["Standard_Parallel_2",36.18333333333333],PARAMETER["Latitude_Of_Origin",34],UNIT["Foot_US",0.304800609601219241]] +42020,PROJCS["NAD_1983_HARN_StatePlane_Texas_North_Central_FIPS_4202",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",2000000],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",32.13333333333333],PARAMETER["Standard_Parallel_2",33.96666666666667],PARAMETER["Latitude_Of_Origin",31.66666666666667],UNIT["Meter",1]] +42021,PROJCS["NAD_1983_StatePlane_Texas_North_Central_FIPS_4202",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",2000000],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",32.13333333333333],PARAMETER["Standard_Parallel_2",33.96666666666667],PARAMETER["Latitude_Of_Origin",31.66666666666667],UNIT["Meter",1]] +42022,PROJCS["NAD_1983_StatePlane_Texas_North_Central_FIPS_4202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",6561666.666666666],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",32.13333333333333],PARAMETER["Standard_Parallel_2",33.96666666666667],PARAMETER["Latitude_Of_Origin",31.66666666666667],UNIT["Foot_US",0.304800609601219241]] +42023,PROJCS["NAD_1983_HARN_StatePlane_Texas_North_Central_FIPS_4202_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",6561666.666666666],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",32.13333333333333],PARAMETER["Standard_Parallel_2",33.96666666666667],PARAMETER["Latitude_Of_Origin",31.66666666666667],UNIT["Foot_US",0.3048006096012192]] +42024,PROJCS["NAD_1927_StatePlane_Texas_North_Central_FIPS_4202",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-97.5],PARAMETER["Standard_Parallel_1",32.13333333333333],PARAMETER["Standard_Parallel_2",33.96666666666667],PARAMETER["Latitude_Of_Origin",31.66666666666667],UNIT["Foot_US",0.304800609601219241]] +42030,PROJCS["NAD_1983_HARN_StatePlane_Texas_Central_FIPS_4203",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",3000000],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",30.11666666666667],PARAMETER["Standard_Parallel_2",31.88333333333333],PARAMETER["Latitude_Of_Origin",29.66666666666667],UNIT["Meter",1]] +42031,PROJCS["NAD_1983_StatePlane_Texas_Central_FIPS_4203",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",3000000],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",30.11666666666667],PARAMETER["Standard_Parallel_2",31.88333333333333],PARAMETER["Latitude_Of_Origin",29.66666666666667],UNIT["Meter",1]] +42032,PROJCS["NAD_1983_StatePlane_Texas_Central_FIPS_4203_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",9842499.999999998],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",30.11666666666667],PARAMETER["Standard_Parallel_2",31.88333333333333],PARAMETER["Latitude_Of_Origin",29.66666666666667],UNIT["Foot_US",0.304800609601219241]] +42033,PROJCS["NAD_1983_HARN_StatePlane_Texas_Central_FIPS_4203_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2296583.333333333],PARAMETER["False_Northing",9842500.0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",30.11666666666667],PARAMETER["Standard_Parallel_2",31.88333333333333],PARAMETER["Latitude_Of_Origin",29.66666666666667],UNIT["Foot_US",0.3048006096012192]] +42034,PROJCS["NAD_1927_StatePlane_Texas_Central_FIPS_4203",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-100.3333333333333],PARAMETER["Standard_Parallel_1",30.11666666666667],PARAMETER["Standard_Parallel_2",31.88333333333333],PARAMETER["Latitude_Of_Origin",29.66666666666667],UNIT["Foot_US",0.304800609601219241]] +42040,PROJCS["NAD_1983_HARN_StatePlane_Texas_South_Central_FIPS_4204",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",4000000],PARAMETER["Central_Meridian",-99],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333333],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Meter",1]] +42041,PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",4000000],PARAMETER["Central_Meridian",-99],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333333],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Meter",1]] +42042,PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-99],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333333],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Foot_US",0.304800609601219241]] +42043,PROJCS["NAD_1983_HARN_StatePlane_Texas_South_Central_FIPS_4204_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333333],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Foot_US",0.3048006096012192]] +42044,PROJCS["NAD_1927_StatePlane_Texas_South_Central_FIPS_4204",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-99],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333333],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Foot_US",0.304800609601219241]] +42050,PROJCS["NAD_1983_HARN_StatePlane_Texas_South_FIPS_4205",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",5000000],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.66666666666667],UNIT["Meter",1]] +42051,PROJCS["NAD_1983_StatePlane_Texas_South_FIPS_4205",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",300000],PARAMETER["False_Northing",5000000],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.66666666666667],UNIT["Meter",1]] +42052,PROJCS["NAD_1983_StatePlane_Texas_South_FIPS_4205_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",984249.9999999999],PARAMETER["False_Northing",16404166.66666666],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.66666666666667],UNIT["Foot_US",0.304800609601219241]] +42053,PROJCS["NAD_1983_HARN_StatePlane_Texas_South_FIPS_4205_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",984250.0],PARAMETER["False_Northing",16404166.66666666],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.66666666666667],UNIT["Foot_US",0.3048006096012192]] +42054,PROJCS["NAD_1927_StatePlane_Texas_South_FIPS_4205",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-98.5],PARAMETER["Standard_Parallel_1",26.16666666666667],PARAMETER["Standard_Parallel_2",27.83333333333333],PARAMETER["Latitude_Of_Origin",25.66666666666667],UNIT["Foot_US",0.304800609601219241]] +43010,PROJCS["NAD_1983_HARN_StatePlane_Utah_North_FIPS_4301",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Meter",1]] +43011,PROJCS["NAD_1983_StatePlane_Utah_North_FIPS_4301",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Meter",1]] +43012,PROJCS["NAD_1983_StatePlane_Utah_North_FIPS_4301_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Foot_US",0.304800609601219241]] +43013,PROJCS["NAD_1983_HARN_StatePlane_Utah_North_FIPS_4301_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Foot_US",0.3048006096012192]] +43014,PROJCS["NAD_1927_StatePlane_Utah_North_FIPS_4301",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Foot_US",0.304800609601219241]] +43015,PROJCS["NAD_1983_HARN_StatePlane_Utah_North_FIPS_4301_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640419.947506561],PARAMETER["False_Northing",3280839.895013123],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Foot",0.3048]] +43016,PROJCS["NAD_1983_StatePlane_Utah_North_FIPS_4301_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640419.947506561],PARAMETER["False_Northing",3280839.895013123],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",40.71666666666667],PARAMETER["Standard_Parallel_2",41.78333333333333],PARAMETER["Latitude_Of_Origin",40.33333333333334],UNIT["Foot",0.3048]] +43020,PROJCS["NAD_1983_HARN_StatePlane_Utah_Central_FIPS_4302",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",2000000],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Meter",1]] +43021,PROJCS["NAD_1983_StatePlane_Utah_Central_FIPS_4302",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",2000000],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Meter",1]] +43022,PROJCS["NAD_1983_StatePlane_Utah_Central_FIPS_4302_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",6561666.666666666],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot_US",0.304800609601219241]] +43023,PROJCS["NAD_1983_HARN_StatePlane_Utah_Central_FIPS_4302_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",6561666.666666666],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot_US",0.3048006096012192]] +43024,PROJCS["NAD_1927_StatePlane_Utah_Central_FIPS_4302",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot_US",0.304800609601219241]] +43025,PROJCS["NAD_1983_HARN_StatePlane_Utah_Central_FIPS_4302_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640419.947506561],PARAMETER["False_Northing",6561679.790026246],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot",0.3048]] +43026,PROJCS["NAD_1983_StatePlane_Utah_Central_FIPS_4302_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640419.947506561],PARAMETER["False_Northing",6561679.790026246],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",39.01666666666667],PARAMETER["Standard_Parallel_2",40.65],PARAMETER["Latitude_Of_Origin",38.33333333333334],UNIT["Foot",0.3048]] +43030,PROJCS["NAD_1983_HARN_StatePlane_Utah_South_FIPS_4303",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",3000000],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +43031,PROJCS["NAD_1983_StatePlane_Utah_South_FIPS_4303",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",3000000],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Meter",1]] +43032,PROJCS["NAD_1983_StatePlane_Utah_South_FIPS_4303_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",9842499.999999998],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +43033,PROJCS["NAD_1983_HARN_StatePlane_Utah_South_FIPS_4303_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",9842500.0],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.3048006096012192]] +43034,PROJCS["NAD_1927_StatePlane_Utah_South_FIPS_4303",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot_US",0.304800609601219241]] +43035,PROJCS["NAD_1983_HARN_StatePlane_Utah_South_FIPS_4303_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640419.947506561],PARAMETER["False_Northing",9842519.685039369],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot",0.3048]] +43036,PROJCS["NAD_1983_StatePlane_Utah_South_FIPS_4303_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640419.947506561],PARAMETER["False_Northing",9842519.685039369],PARAMETER["Central_Meridian",-111.5],PARAMETER["Standard_Parallel_1",37.21666666666667],PARAMETER["Standard_Parallel_2",38.35],PARAMETER["Latitude_Of_Origin",36.66666666666666],UNIT["Foot",0.3048]] +44000,PROJCS["NAD_1983_HARN_StatePlane_Vermont_FIPS_4400",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-72.5],PARAMETER["Scale_Factor",0.9999642857142857],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Meter",1]] +44001,PROJCS["NAD_1983_StatePlane_Vermont_FIPS_4400",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-72.5],PARAMETER["Scale_Factor",0.9999642857142857],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Meter",1]] +44002,PROJCS["NAD_1983_StatePlane_Vermont_FIPS_4400_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-72.5],PARAMETER["Scale_Factor",0.9999642857142857],PARAMETER["Latitude_Of_Origin",42.5],UNIT["Foot_US",0.304800609601219241]] +45010,PROJCS["NAD_1983_HARN_StatePlane_Virginia_North_FIPS_4501",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3500000],PARAMETER["False_Northing",2000000],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",38.03333333333333],PARAMETER["Standard_Parallel_2",39.2],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Meter",1]] +45011,PROJCS["NAD_1983_StatePlane_Virginia_North_FIPS_4501",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3500000],PARAMETER["False_Northing",2000000],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",38.03333333333333],PARAMETER["Standard_Parallel_2",39.2],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Meter",1]] +45012,PROJCS["NAD_1983_StatePlane_Virginia_North_FIPS_4501_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",11482916.66666666],PARAMETER["False_Northing",6561666.666666666],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",38.03333333333333],PARAMETER["Standard_Parallel_2",39.2],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.304800609601219241]] +45013,PROJCS["NAD_1983_HARN_StatePlane_Virginia_North_FIPS_4501_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",11482916.66666666],PARAMETER["False_Northing",6561666.666666666],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",38.03333333333333],PARAMETER["Standard_Parallel_2",39.2],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.3048006096012192]] +45014,PROJCS["NAD_1927_StatePlane_Virginia_North_FIPS_4501",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",38.03333333333333],PARAMETER["Standard_Parallel_2",39.2],PARAMETER["Latitude_Of_Origin",37.66666666666666],UNIT["Foot_US",0.304800609601219241]] +45020,PROJCS["NAD_1983_HARN_StatePlane_Virginia_South_FIPS_4502",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3500000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",36.76666666666667],PARAMETER["Standard_Parallel_2",37.96666666666667],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Meter",1]] +45021,PROJCS["NAD_1983_StatePlane_Virginia_South_FIPS_4502",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3500000],PARAMETER["False_Northing",1000000],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",36.76666666666667],PARAMETER["Standard_Parallel_2",37.96666666666667],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Meter",1]] +45022,PROJCS["NAD_1983_StatePlane_Virginia_South_FIPS_4502_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",11482916.66666666],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",36.76666666666667],PARAMETER["Standard_Parallel_2",37.96666666666667],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.304800609601219241]] +45023,PROJCS["NAD_1983_HARN_StatePlane_Virginia_South_FIPS_4502_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",11482916.66666666],PARAMETER["False_Northing",3280833.333333333],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",36.76666666666667],PARAMETER["Standard_Parallel_2",37.96666666666667],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.3048006096012192]] +45024,PROJCS["NAD_1927_StatePlane_Virginia_South_FIPS_4502",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-78.5],PARAMETER["Standard_Parallel_1",36.76666666666667],PARAMETER["Standard_Parallel_2",37.96666666666667],PARAMETER["Latitude_Of_Origin",36.33333333333334],UNIT["Foot_US",0.304800609601219241]] +46010,PROJCS["NAD_1983_HARN_StatePlane_Washington_North_FIPS_4601",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.8333333333333],PARAMETER["Standard_Parallel_1",47.5],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Meter",1]] +46011,PROJCS["NAD_1983_StatePlane_Washington_North_FIPS_4601",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.8333333333333],PARAMETER["Standard_Parallel_1",47.5],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Meter",1]] +46012,PROJCS["NAD_1983_StatePlane_Washington_North_FIPS_4601_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.8333333333333],PARAMETER["Standard_Parallel_1",47.5],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Foot_US",0.304800609601219241]] +46013,PROJCS["NAD_1983_HARN_StatePlane_Washington_North_FIPS_4601_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.8333333333333],PARAMETER["Standard_Parallel_1",47.5],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47.0],UNIT["Foot_US",0.3048006096012192]] +46014,PROJCS["NAD_1927_StatePlane_Washington_North_FIPS_4601",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.8333333333333],PARAMETER["Standard_Parallel_1",47.5],PARAMETER["Standard_Parallel_2",48.73333333333333],PARAMETER["Latitude_Of_Origin",47],UNIT["Foot_US",0.304800609601219241]] +46020,PROJCS["NAD_1983_HARN_StatePlane_Washington_South_FIPS_4602",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",45.83333333333334],PARAMETER["Standard_Parallel_2",47.33333333333334],PARAMETER["Latitude_Of_Origin",45.33333333333334],UNIT["Meter",1]] +46021,PROJCS["NAD_1983_StatePlane_Washington_South_FIPS_4602",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",45.83333333333334],PARAMETER["Standard_Parallel_2",47.33333333333334],PARAMETER["Latitude_Of_Origin",45.33333333333334],UNIT["Meter",1]] +46022,PROJCS["NAD_1983_StatePlane_Washington_South_FIPS_4602_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",45.83333333333334],PARAMETER["Standard_Parallel_2",47.33333333333334],PARAMETER["Latitude_Of_Origin",45.33333333333334],UNIT["Foot_US",0.304800609601219241]] +46023,PROJCS["NAD_1983_HARN_StatePlane_Washington_South_FIPS_4602_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",45.83333333333334],PARAMETER["Standard_Parallel_2",47.33333333333334],PARAMETER["Latitude_Of_Origin",45.33333333333334],UNIT["Foot_US",0.3048006096012192]] +46024,PROJCS["NAD_1927_StatePlane_Washington_South_FIPS_4602",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",45.83333333333334],PARAMETER["Standard_Parallel_2",47.33333333333334],PARAMETER["Latitude_Of_Origin",45.33333333333334],UNIT["Foot_US",0.304800609601219241]] +47010,PROJCS["NAD_1983_HARN_StatePlane_West_Virginia_North_FIPS_4701",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79.5],PARAMETER["Standard_Parallel_1",39],PARAMETER["Standard_Parallel_2",40.25],PARAMETER["Latitude_Of_Origin",38.5],UNIT["Meter",1]] +47011,PROJCS["NAD_1983_StatePlane_West_Virginia_North_FIPS_4701",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79.5],PARAMETER["Standard_Parallel_1",39],PARAMETER["Standard_Parallel_2",40.25],PARAMETER["Latitude_Of_Origin",38.5],UNIT["Meter",1]] +47012,PROJCS["NAD_1983_StatePlane_West_Virginia_North_FIPS_4701_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79.5],PARAMETER["Standard_Parallel_1",39],PARAMETER["Standard_Parallel_2",40.25],PARAMETER["Latitude_Of_Origin",38.5],UNIT["Foot_US",0.304800609601219241]] +47014,PROJCS["NAD_1927_StatePlane_West_Virginia_North_FIPS_4701",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-79.5],PARAMETER["Standard_Parallel_1",39],PARAMETER["Standard_Parallel_2",40.25],PARAMETER["Latitude_Of_Origin",38.5],UNIT["Foot_US",0.304800609601219241]] +47020,PROJCS["NAD_1983_HARN_StatePlane_West_Virginia_South_FIPS_4702",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",37.48333333333333],PARAMETER["Standard_Parallel_2",38.88333333333333],PARAMETER["Latitude_Of_Origin",37],UNIT["Meter",1]] +47021,PROJCS["NAD_1983_StatePlane_West_Virginia_South_FIPS_4702",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",37.48333333333333],PARAMETER["Standard_Parallel_2",38.88333333333333],PARAMETER["Latitude_Of_Origin",37],UNIT["Meter",1]] +47022,PROJCS["NAD_1983_StatePlane_West_Virginia_South_FIPS_4702_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",37.48333333333333],PARAMETER["Standard_Parallel_2",38.88333333333333],PARAMETER["Latitude_Of_Origin",37],UNIT["Foot_US",0.304800609601219241]] +47024,PROJCS["NAD_1927_StatePlane_West_Virginia_South_FIPS_4702",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-81],PARAMETER["Standard_Parallel_1",37.48333333333333],PARAMETER["Standard_Parallel_2",38.88333333333333],PARAMETER["Latitude_Of_Origin",37],UNIT["Foot_US",0.304800609601219241]] +48010,PROJCS["NAD_1983_HARN_StatePlane_Wisconsin_North_FIPS_4801",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",45.56666666666667],PARAMETER["Standard_Parallel_2",46.76666666666667],PARAMETER["Latitude_Of_Origin",45.16666666666666],UNIT["Meter",1]] +48011,PROJCS["NAD_1983_StatePlane_Wisconsin_North_FIPS_4801",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",45.56666666666667],PARAMETER["Standard_Parallel_2",46.76666666666667],PARAMETER["Latitude_Of_Origin",45.16666666666666],UNIT["Meter",1]] +48012,PROJCS["NAD_1983_StatePlane_Wisconsin_North_FIPS_4801_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",45.56666666666667],PARAMETER["Standard_Parallel_2",46.76666666666667],PARAMETER["Latitude_Of_Origin",45.16666666666666],UNIT["Foot_US",0.304800609601219241]] +48013,PROJCS["NAD_1983_HARN_StatePlane_Wisconsin_North_FIPS_4801_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-90.0],PARAMETER["Standard_Parallel_1",45.56666666666667],PARAMETER["Standard_Parallel_2",46.76666666666667],PARAMETER["Latitude_Of_Origin",45.16666666666666],UNIT["Foot_US",0.3048006096012192]] +48014,PROJCS["NAD_1927_StatePlane_Wisconsin_North_FIPS_4801",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",45.56666666666667],PARAMETER["Standard_Parallel_2",46.76666666666667],PARAMETER["Latitude_Of_Origin",45.16666666666666],UNIT["Foot_US",0.304800609601219241]] +48020,PROJCS["NAD_1983_HARN_StatePlane_Wisconsin_Central_FIPS_4802",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",44.25],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Meter",1]] +48021,PROJCS["NAD_1983_StatePlane_Wisconsin_Central_FIPS_4802",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",44.25],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Meter",1]] +48022,PROJCS["NAD_1983_StatePlane_Wisconsin_Central_FIPS_4802_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",44.25],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.304800609601219241]] +48023,PROJCS["NAD_1983_HARN_StatePlane_Wisconsin_Central_FIPS_4802_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-90.0],PARAMETER["Standard_Parallel_1",44.25],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.3048006096012192]] +48024,PROJCS["NAD_1927_StatePlane_Wisconsin_Central_FIPS_4802",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",44.25],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",43.83333333333334],UNIT["Foot_US",0.304800609601219241]] +48030,PROJCS["NAD_1983_HARN_StatePlane_Wisconsin_South_FIPS_4803",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",42.73333333333333],PARAMETER["Standard_Parallel_2",44.06666666666667],PARAMETER["Latitude_Of_Origin",42],UNIT["Meter",1]] +48031,PROJCS["NAD_1983_StatePlane_Wisconsin_South_FIPS_4803",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",42.73333333333333],PARAMETER["Standard_Parallel_2",44.06666666666667],PARAMETER["Latitude_Of_Origin",42],UNIT["Meter",1]] +48032,PROJCS["NAD_1983_StatePlane_Wisconsin_South_FIPS_4803_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",42.73333333333333],PARAMETER["Standard_Parallel_2",44.06666666666667],PARAMETER["Latitude_Of_Origin",42],UNIT["Foot_US",0.304800609601219241]] +48033,PROJCS["NAD_1983_HARN_StatePlane_Wisconsin_South_FIPS_4803_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-90.0],PARAMETER["Standard_Parallel_1",42.73333333333333],PARAMETER["Standard_Parallel_2",44.06666666666667],PARAMETER["Latitude_Of_Origin",42.0],UNIT["Foot_US",0.3048006096012192]] +48034,PROJCS["NAD_1927_StatePlane_Wisconsin_South_FIPS_4803",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",2000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-90],PARAMETER["Standard_Parallel_1",42.73333333333333],PARAMETER["Standard_Parallel_2",44.06666666666667],PARAMETER["Latitude_Of_Origin",42],UNIT["Foot_US",0.304800609601219241]] +49010,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_East_FIPS_4901",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.1666666666667],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49011,PROJCS["NAD_1983_StatePlane_Wyoming_East_FIPS_4901",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.1666666666667],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49012,PROJCS["NAD_1983_StatePlane_Wyoming_East_FIPS_4901_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.1666666666667],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.304800609601219241]] +49013,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_East_FIPS_4901_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-105.1666666666667],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.3048006096012192]] +49014,PROJCS["NAD_1927_StatePlane_Wyoming_East_FIPS_4901",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-105.1666666666667],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",40.66666666666666],UNIT["Foot_US",0.304800609601219241]] +49020,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_East_Central_FIPS_4902",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-107.3333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49021,PROJCS["NAD_1983_StatePlane_Wyoming_East_Central_FIPS_4902",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-107.3333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49022,PROJCS["NAD_1983_StatePlane_Wyoming_East_Central_FIPS_4902_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-107.3333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.304800609601219241]] +49023,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_East_Central_FIPS_4902_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1312333.333333333],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-107.3333333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.3048006096012192]] +49024,PROJCS["NAD_1927_StatePlane_Wyoming_East_Central_FIPS_4902",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-107.3333333333333],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",40.66666666666666],UNIT["Foot_US",0.304800609601219241]] +49030,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_West_Central_FIPS_4903",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-108.75],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49031,PROJCS["NAD_1983_StatePlane_Wyoming_West_Central_FIPS_4903",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-108.75],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49032,PROJCS["NAD_1983_StatePlane_Wyoming_West_Central_FIPS_4903_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1968500],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-108.75],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.304800609601219241]] +49033,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_West_Central_FIPS_4903_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-108.75],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.3048006096012192]] +49034,PROJCS["NAD_1927_StatePlane_Wyoming_West_Central_FIPS_4903",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-108.75],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",40.66666666666666],UNIT["Foot_US",0.304800609601219241]] +49040,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_West_FIPS_4904",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-110.0833333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49041,PROJCS["NAD_1983_StatePlane_Wyoming_West_FIPS_4904",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",800000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-110.0833333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Meter",1]] +49042,PROJCS["NAD_1983_StatePlane_Wyoming_West_FIPS_4904_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-110.0833333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.304800609601219241]] +49043,PROJCS["NAD_1983_HARN_StatePlane_Wyoming_West_FIPS_4904_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",2624666.666666666],PARAMETER["False_Northing",328083.3333333333],PARAMETER["Central_Meridian",-110.0833333333333],PARAMETER["Scale_Factor",0.9999375],PARAMETER["Latitude_Of_Origin",40.5],UNIT["Foot_US",0.3048006096012192]] +49044,PROJCS["NAD_1927_StatePlane_Wyoming_West_FIPS_4904",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-110.0833333333333],PARAMETER["Scale_Factor",0.9999411764705882],PARAMETER["Latitude_Of_Origin",40.66666666666666],UNIT["Foot_US",0.304800609601219241]] +50011,PROJCS["NAD_1983_StatePlane_Alaska_1_FIPS_5001",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Natural_Origin"],PARAMETER["False_Easting",5000000],PARAMETER["False_Northing",-5000000],PARAMETER["Scale_Factor",0.9999],PARAMETER["Azimuth",-36.86989764583333],PARAMETER["Longitude_Of_Center",-133.6666666666667],PARAMETER["Latitude_Of_Center",57],UNIT["Meter",1]] +50012,PROJCS["NAD_1983_StatePlane_Alaska_1_FIPS_5001_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Natural_Origin"],PARAMETER["False_Easting",16404166.66666666],PARAMETER["False_Northing",-16404166.66666666],PARAMETER["Scale_Factor",0.9999],PARAMETER["Azimuth",-36.86989764583333],PARAMETER["Longitude_Of_Center",-133.6666666666667],PARAMETER["Latitude_Of_Center",57],UNIT["Foot_US",0.304800609601219241]] +50014,PROJCS["NAD_1927_StatePlane_Alaska_1_FIPS_5001",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Natural_Origin"],PARAMETER["False_Easting",16404166.666667],PARAMETER["False_Northing",-16404166.666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Azimuth",-36.86989764583333],PARAMETER["Longitude_Of_Center",-133.6666666666667],PARAMETER["Latitude_Of_Center",57],UNIT["Foot_US",0.304800609601219241]] +50021,PROJCS["NAD_1983_StatePlane_Alaska_2_FIPS_5002",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-142],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50022,PROJCS["NAD_1983_StatePlane_Alaska_2_FIPS_5002_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-142],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50024,PROJCS["NAD_1927_StatePlane_Alaska_2_FIPS_5002",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-142],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50031,PROJCS["NAD_1983_StatePlane_Alaska_3_FIPS_5003",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-146],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50032,PROJCS["NAD_1983_StatePlane_Alaska_3_FIPS_5003_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-146],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50034,PROJCS["NAD_1927_StatePlane_Alaska_3_FIPS_5003",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-146],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50041,PROJCS["NAD_1983_StatePlane_Alaska_4_FIPS_5004",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-150],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50042,PROJCS["NAD_1983_StatePlane_Alaska_4_FIPS_5004_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-150],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50044,PROJCS["NAD_1927_StatePlane_Alaska_4_FIPS_5004",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-150],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50051,PROJCS["NAD_1983_StatePlane_Alaska_5_FIPS_5005",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-154],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50052,PROJCS["NAD_1983_StatePlane_Alaska_5_FIPS_5005_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-154],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50054,PROJCS["NAD_1927_StatePlane_Alaska_5_FIPS_5005",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-154],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50061,PROJCS["NAD_1983_StatePlane_Alaska_6_FIPS_5006",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50062,PROJCS["NAD_1983_StatePlane_Alaska_6_FIPS_5006_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50064,PROJCS["NAD_1927_StatePlane_Alaska_6_FIPS_5006",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50071,PROJCS["NAD_1983_StatePlane_Alaska_7_FIPS_5007",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-162],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50072,PROJCS["NAD_1983_StatePlane_Alaska_7_FIPS_5007_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-162],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50074,PROJCS["NAD_1927_StatePlane_Alaska_7_FIPS_5007",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",700000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-162],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50081,PROJCS["NAD_1983_StatePlane_Alaska_8_FIPS_5008",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-166],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50082,PROJCS["NAD_1983_StatePlane_Alaska_8_FIPS_5008_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-166],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50084,PROJCS["NAD_1927_StatePlane_Alaska_8_FIPS_5008",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-166],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50091,PROJCS["NAD_1983_StatePlane_Alaska_9_FIPS_5009",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-170],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Meter",1]] +50092,PROJCS["NAD_1983_StatePlane_Alaska_9_FIPS_5009_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-170],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50094,PROJCS["NAD_1927_StatePlane_Alaska_9_FIPS_5009",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",600000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-170],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",54],UNIT["Foot_US",0.304800609601219241]] +50101,PROJCS["NAD_1983_StatePlane_Alaska_10_FIPS_5010",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-176],PARAMETER["Standard_Parallel_1",51.83333333333334],PARAMETER["Standard_Parallel_2",53.83333333333334],PARAMETER["Latitude_Of_Origin",51],UNIT["Meter",1]] +50102,PROJCS["NAD_1983_StatePlane_Alaska_10_FIPS_5010_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3280833.333333333],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-176],PARAMETER["Standard_Parallel_1",51.83333333333334],PARAMETER["Standard_Parallel_2",53.83333333333334],PARAMETER["Latitude_Of_Origin",51],UNIT["Foot_US",0.304800609601219241]] +50104,PROJCS["NAD_1927_StatePlane_Alaska_10_FIPS_5010",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",3000000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-176],PARAMETER["Standard_Parallel_1",51.83333333333334],PARAMETER["Standard_Parallel_2",53.83333333333334],PARAMETER["Latitude_Of_Origin",51],UNIT["Foot_US",0.304800609601219241]] +51010,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_1_FIPS_5101",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-155.5],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",18.83333333333333],UNIT["Meter",1]] +51011,PROJCS["NAD_1983_StatePlane_Hawaii_1_FIPS_5101",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-155.5],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",18.83333333333333],UNIT["Meter",1]] +51012,PROJCS["NAD_1983_StatePlane_Hawaii_1_FIPS_5101_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-155.5],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",18.83333333333333],UNIT["Foot_US",0.304800609601219241]] +51013,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_1_FIPS_5101_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-155.5],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",18.83333333333333],UNIT["Foot_US",0.3048006096012192]] +51014,PROJCS["Old_Hawaiian_StatePlane_Hawaii_1_FIPS_5101",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-155.5],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",18.83333333333333],UNIT["Foot_US",0.304800609601219241]] +51020,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_2_FIPS_5102",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-156.6666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",20.33333333333333],UNIT["Meter",1]] +51021,PROJCS["NAD_1983_StatePlane_Hawaii_2_FIPS_5102",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-156.6666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",20.33333333333333],UNIT["Meter",1]] +51022,PROJCS["NAD_1983_StatePlane_Hawaii_2_FIPS_5102_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-156.6666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",20.33333333333333],UNIT["Foot_US",0.304800609601219241]] +51023,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_2_FIPS_5102_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-156.6666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",20.33333333333333],UNIT["Foot_US",0.3048006096012192]] +51024,PROJCS["Old_Hawaiian_StatePlane_Hawaii_2_FIPS_5102",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-156.6666666666667],PARAMETER["Scale_Factor",0.9999666666666667],PARAMETER["Latitude_Of_Origin",20.33333333333333],UNIT["Foot_US",0.304800609601219241]] +51030,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_3_FIPS_5103",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.16666666666667],UNIT["Meter",1]] +51031,PROJCS["NAD_1983_StatePlane_Hawaii_3_FIPS_5103",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.16666666666667],UNIT["Meter",1]] +51032,PROJCS["NAD_1983_StatePlane_Hawaii_3_FIPS_5103_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.16666666666667],UNIT["Foot_US",0.304800609601219241]] +51033,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_3_FIPS_5103_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-158.0],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.16666666666667],UNIT["Foot_US",0.3048006096012192]] +51034,PROJCS["Old_Hawaiian_StatePlane_Hawaii_3_FIPS_5103",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-158],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.16666666666667],UNIT["Foot_US",0.304800609601219241]] +51040,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_4_FIPS_5104",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-159.5],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.83333333333333],UNIT["Meter",1]] +51041,PROJCS["NAD_1983_StatePlane_Hawaii_4_FIPS_5104",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-159.5],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.83333333333333],UNIT["Meter",1]] +51042,PROJCS["NAD_1983_StatePlane_Hawaii_4_FIPS_5104_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-159.5],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.83333333333333],UNIT["Foot_US",0.304800609601219241]] +51043,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_4_FIPS_5104_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-159.5],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.83333333333333],UNIT["Foot_US",0.3048006096012192]] +51044,PROJCS["Old_Hawaiian_StatePlane_Hawaii_4_FIPS_5104",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-159.5],PARAMETER["Scale_Factor",0.99999],PARAMETER["Latitude_Of_Origin",21.83333333333333],UNIT["Foot_US",0.304800609601219241]] +51050,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_5_FIPS_5105",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-160.1666666666667],PARAMETER["Scale_Factor",1],PARAMETER["Latitude_Of_Origin",21.66666666666667],UNIT["Meter",1]] +51051,PROJCS["NAD_1983_StatePlane_Hawaii_5_FIPS_5105",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-160.1666666666667],PARAMETER["Scale_Factor",1],PARAMETER["Latitude_Of_Origin",21.66666666666667],UNIT["Meter",1]] +51052,PROJCS["NAD_1983_StatePlane_Hawaii_5_FIPS_5105_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-160.1666666666667],PARAMETER["Scale_Factor",1],PARAMETER["Latitude_Of_Origin",21.66666666666667],UNIT["Foot_US",0.304800609601219241]] +51053,PROJCS["NAD_1983_HARN_StatePlane_Hawaii_5_FIPS_5105_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1640416.666666667],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-160.1666666666667],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",21.66666666666667],UNIT["Foot_US",0.3048006096012192]] +51054,PROJCS["Old_Hawaiian_StatePlane_Hawaii_5_FIPS_5105",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-160.1666666666667],PARAMETER["Scale_Factor",1],PARAMETER["Latitude_Of_Origin",21.66666666666667],UNIT["Foot_US",0.304800609601219241]] +52000,PROJCS["NAD_1983_HARN_StatePlane_Puerto_Rico_Virgin_Islands_FIPS_5200",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",200000],PARAMETER["Central_Meridian",-66.43333333333334],PARAMETER["Standard_Parallel_1",18.03333333333333],PARAMETER["Standard_Parallel_2",18.43333333333333],PARAMETER["Latitude_Of_Origin",17.83333333333333],UNIT["Meter",1]] +52001,PROJCS["NAD_1983_StatePlane_Puerto_Rico_Virgin_Islands_FIPS_5200",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",200000],PARAMETER["False_Northing",200000],PARAMETER["Central_Meridian",-66.43333333333334],PARAMETER["Standard_Parallel_1",18.03333333333333],PARAMETER["Standard_Parallel_2",18.43333333333333],PARAMETER["Latitude_Of_Origin",17.83333333333333],UNIT["Meter",1]] +52002,PROJCS["NAD_1983_StatePlane_Puerto_Rico_Virgin_Islands_FIPS_5200_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",656166.6666666665],PARAMETER["False_Northing",656166.6666666665],PARAMETER["Central_Meridian",-66.43333333333334],PARAMETER["Standard_Parallel_1",18.03333333333333],PARAMETER["Standard_Parallel_2",18.43333333333333],PARAMETER["Latitude_Of_Origin",17.83333333333333],UNIT["Foot_US",0.3048006096012192]] +52014,PROJCS["NAD_1927_StatePlane_Puerto_Rico_FIPS_5201",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",-66.43333333333334],PARAMETER["Standard_Parallel_1",18.03333333333333],PARAMETER["Standard_Parallel_2",18.43333333333333],PARAMETER["Latitude_Of_Origin",17.83333333333333],UNIT["Foot_US",0.304800609601219241]] +52020,PROJCS["Puerto_Rico_StatePlane_Virgin_Islands_St_Croix_FIPS_5202",GEOGCS["GCS_Puerto_Rico",DATUM["D_Puerto_Rico",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-66.43333333333334],PARAMETER["Standard_Parallel_1",18.03333333333333],PARAMETER["Standard_Parallel_2",18.43333333333333],PARAMETER["Latitude_Of_Origin",17.83333333333333],UNIT["Foot_US",0.304800609601219241]] +52024,PROJCS["Puerto_Rico_StatePlane_Virgin_Islands_St_Croix_FIPS_5202",GEOGCS["GCS_Puerto_Rico",DATUM["D_Puerto_Rico",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",100000],PARAMETER["Central_Meridian",-66.43333333333334],PARAMETER["Standard_Parallel_1",18.03333333333333],PARAMETER["Standard_Parallel_2",18.43333333333333],PARAMETER["Latitude_Of_Origin",17.83333333333333],UNIT["Foot_US",0.304800609601219241]] +54000,PROJCS["NAD_1983_StatePlane_Guam_FIPS_5400",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Polyconic"],PARAMETER["False_Easting",50000],PARAMETER["False_Northing",50000],PARAMETER["Central_Meridian",144.7487507055556],PARAMETER["Latitude_Of_Origin",13.47246635277778],UNIT["Meter",1]] +54001,PROJCS["NAD_1983_StatePlane_Guam_FIPS_5400",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Polyconic"],PARAMETER["False_Easting",50000],PARAMETER["False_Northing",50000],PARAMETER["Central_Meridian",144.7487507055556],PARAMETER["Latitude_Of_Origin",13.47246635277778],UNIT["Meter",1]] +54002,PROJCS["NAD_1983_StatePlane_Guam_FIPS_5400_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Polyconic"],PARAMETER["False_Easting",164041.6666666666],PARAMETER["False_Northing",164041.6666666666],PARAMETER["Central_Meridian",144.7487507055556],PARAMETER["Latitude_Of_Origin",13.47246635277778],UNIT["Foot_US",0.304800609601219241]] +54004,PROJCS["NAD_1927_StatePlane_Guam_FIPS_5400",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Polyconic"],PARAMETER["False_Easting",164041.6666666667],PARAMETER["False_Northing",164041.6666666667],PARAMETER["Central_Meridian",144.7487507055556],PARAMETER["Latitude_Of_Origin",13.47246635277778],UNIT["Foot_US",0.304800609601219241]] +102964,PROJCS["NAD_1927_Alaska_Albers_Feet",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Albers"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-154.0],PARAMETER["Standard_Parallel_1",55.0],PARAMETER["Standard_Parallel_2",65.0],PARAMETER["Latitude_Of_Origin",50.0],UNIT["Foot_US",0.3048006096012192]] +102991,PROJCS["NAD_1983_Oregon_Statewide_Lambert",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",43.0],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",41.75],UNIT["Meter",1.0]] +102993,PROJCS["NAD_1983_HARN_Oregon_Statewide_Lambert",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",43.0],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",41.75],UNIT["Meter",1.0]] +102994,PROJCS["NAD_1983_HARN_Oregon_Statewide_Lambert_Feet_Intl",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312335.958005249],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",43.0],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",41.75],UNIT["Foot",0.3048]] +102996,PROJCS["NAD_1983_Oregon_Statewide_Lambert_Feet_Intl",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",1312335.958005249],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-120.5],PARAMETER["Standard_Parallel_1",43.0],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",41.75],UNIT["Foot",0.3048]] diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalicon.png b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalicon.png new file mode 100644 index 00000000..8e7731d8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalicon.png differ diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalinfo_output.schema.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalinfo_output.schema.json new file mode 100644 index 00000000..a4c50c78 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalinfo_output.schema.json @@ -0,0 +1,346 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "Schema for gdalinfo -json output", + + "oneOf": [ + { + "$ref": "#/definitions/dataset" + } + ], + + "definitions": { + + "arrayOfTwoIntegers": { + "type": "array", + "items": { + "type": "integer", + "minItems": 2, + "maxItems": 2 + } + }, + + "arrayOfTwoNumbers": { + "type": "array", + "items": { + "type": "number", + "minItems": 2, + "maxItems": 2 + } + }, + + "band": { + "type": "object", + "properties": { + "band": { + "type": "integer" + }, + "block": { + "$ref": "#/definitions/arrayOfTwoIntegers" + }, + "checksum": { + "type": "integer" + }, + "colorInterpretation": { + "type": "string" + }, + "type": { + "enum": [ + "Byte", + "Int8", + "UInt16", + "Int16", + "UInt32", + "Int32", + "UInt64", + "Int32", + "Float32", + "Float64", + "CInt16", + "CInt32", + "CFloat32", + "CFloat64" + ] + }, + "histogram": { + "type": "object", + "properties": { + "buckets": { + "type": "array", + "items": { + "type": "integer" + } + }, + "count": { + "type": "integer" + }, + "min": { + "type": "number" + }, + "max": { + "type": "number" + } + } + }, + "min": { + "type": "number" + }, + "max": { + "type": "number" + }, + "computedMin": { + "type": "number" + }, + "computedMax": { + "type": "number" + }, + "minimum": { + "type": "number" + }, + "maximum": { + "type": "number" + }, + "mean": { + "type": "number" + }, + "stdDev": { + "type": "number" + }, + "overviews": { + "type": "array", + "items": { + "type": "object", + "properties": { + "size": { + "$ref": "#/definitions/arrayOfTwoIntegers" + } + } + } + }, + "metadata": { + "$ref": "#/definitions/metadata" + } + }, + "required": [ + "band", + "block", + "type" + ], + "additionalProperties": false + }, + + "cornerCoordinates": { + "type": "object", + "properties": { + "upperLeft": { + "$ref": "#/definitions/arrayOfTwoNumbers" + }, + "lowerLeft": { + "$ref": "#/definitions/arrayOfTwoNumbers" + }, + "lowerRight": { + "$ref": "#/definitions/arrayOfTwoNumbers" + }, + "upperRight": { + "$ref": "#/definitions/arrayOfTwoNumbers" + }, + "center": { + "$ref": "#/definitions/arrayOfTwoNumbers" + } + }, + "required": [ + "upperLeft", + "lowerLeft", + "lowerRight", + "upperRight", + "center" + ], + "additionalProperties": false + }, + + "dataset": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "driverShortName": { + "type": "string" + }, + "driverLongName": { + "type": "string" + }, + "files": { + "type": "array", + "items": { + "type": "string" + } + }, + "size": { + "$comment": "note that the order of items in side is width,height", + "$ref": "#/definitions/arrayOfTwoIntegers" + }, + "coordinateSystem": { + "$ref": "#/definitions/coordinateSystem" + }, + "geoTransform": { + "type": "array", + "items": { + "type": "number", + "minItems": 6, + "maxItems": 6 + } + }, + "cornerCoordinates": { + "$ref": "#/definitions/cornerCoordinates" + }, + "wgs84Extent": { + "$ref": "https://geojson.org/schema/Geometry.json" + }, + "bands": { + "type": "array", + "items": { + "$ref": "#/definitions/band" + } + }, + "stac": { + "$ref": "#/definitions/stac" + }, + "metadata": { + "$ref": "#/definitions/metadata" + } + }, + "required": [ + "size", + "bands" + ], + "additionalProperties": false + }, + + "metadata": { + "type": "object", + "$comment": "Object whose keys are metadata domain names. The empty string is a valid metadata domain name, and is used for the default domain.", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/metadataDomain" + } + } + }, + + "metadataDomain": { + "$comment": " The values of a metadadomain are key: string pairs, or arbitrary JSON objects for metadata domain names starting with the \"json:\" prefix.", + "any": [ + { + "type": "object" + }, + { + "type": "#/definitions/keyValueDict" + } + ] + }, + + "coordinateSystem": { + "type": "object", + "properties": { + "wkt": { + "type": "string" + }, + "proj4": { + "type": "string" + }, + "projjson": { + "$ref": "https://proj.org/schemas/v0.5/projjson.schema.json" + }, + "dataAxisToSRSAxisMapping": { + "type": "array", + "items": { + "type": "number", + "minItems": 2, + "maxItems": 3 + } + }, + "coordinateEpoch": { + "type": "number" + } + }, + "required": [ + "wkt", + "dataAxisToSRSAxisMapping" + ], + "additionalProperties": false + }, + + "keyValueDict": { + "type": "object", + "patternProperties": { + "^.*$": {} + } + }, + + "stac": { + "$comment": "Derived from https://raw.githubusercontent.com/stac-extensions/projection/main/json-schema/schema.json#/definitions/fields, https://raw.githubusercontent.com/stac-extensions/eo/v1.1.0/json-schema/schema.json#/definitions/bands and https://raw.githubusercontent.com/stac-extensions/eo/v1.1.0/json-schema/schema.json#/definitions/bands", + "type": "object", + "properties": { + "proj:epsg": { + "title": "EPSG code", + "type": [ + "integer", + "null" + ] + }, + "proj:wkt2": { + "title": "Coordinate Reference System in WKT2 format", + "type": [ + "string", + "null" + ] + }, + "proj:projjson": { + "title": "Coordinate Reference System in PROJJSON format", + "oneOf": [ + { + "$ref": "https://proj.org/schemas/v0.5/projjson.schema.json" + }, + { + "type": "null" + } + ] + }, + + "proj:shape": { + "$comment": "note that the order of items in proj:shape is height,width starting with GDAL 3.8.5 (previous versions ordered it wrongly as width,height)", + "title": "Shape", + "type": "array", + "minItems": 2, + "maxItems": 2, + "items": { + "type": "integer" + } + }, + "proj:transform": { + "title": "Transform", + "type": "array", + "oneOf": [ + { + "minItems": 6, + "maxItems": 6 + }, + { + "minItems": 9, + "maxItems": 9 + } + ], + "items": { + "type": "number" + } + }, + "eo:bands": { + "$ref": "https://raw.githubusercontent.com/stac-extensions/eo/v1.1.0/json-schema/schema.json#/definitions/bands" + }, + "raster:bands": { + "$ref": "https://raw.githubusercontent.com/stac-extensions/eo/v1.1.0/json-schema/schema.json#/definitions/bands" + } + }, + "additionalProperties": false + } + } +} diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalmdiminfo_output.schema.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalmdiminfo_output.schema.json new file mode 100644 index 00000000..d2fa75da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalmdiminfo_output.schema.json @@ -0,0 +1,321 @@ +{ + "$id": "https://gdal.org/gdalmdiminfo_output.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "Validate the output of the gdalmdiminfo utility", + + "anyOf": [ + { + "$ref": "#/definitions/group" + }, + { + "$ref": "#/definitions/array" + } + ], + + "definitions": { + + "array": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "array" + ] + }, + "datatype": { + "$ref": "#/definitions/datatype" + }, + "dimensions": { + "$ref": "#/definitions/dimensions" + }, + "dimension_size": { + "type": "array", + "items": { + "type": "number" + } + }, + "block_size": { + "type": "array", + "items": { + "type": "number" + } + }, + "attributes": { + "$ref": "#/definitions/attributes" + }, + "srs": { + "$ref": "#/definitions/srs" + }, + "nodata_value": { + "$ref": "#/definitions/value" + }, + "scale": { + "type": "number" + }, + "offset": { + "type": "number" + }, + "values": { + "$ref": "#/definitions/value" + }, + "unit": { + "type": "string" + }, + "structural_info": { + "$ref": "#/definitions/structural_info" + } + }, + "additionalProperties": false + }, + + "arrays": { + "type": "object", + "properties": {}, + "additionalProperties": { + "$ref": "#/definitions/array" + } + }, + + "attribute": { + "anyOf": [ + { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "attribute" + ] + }, + "datatype": { + "$ref": "#/definitions/datatype" + }, + "value": { + "$ref": "#/definitions/value" + } + }, + "additionalProperties": false + }, + { + "$ref": "#/definitions/value" + } + ] + }, + + "attributes": { + "anyOf": [ + { + "type": "object", + "properties": {}, + "additionalProperties": { + "$ref": "#/definitions/attribute" + } + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/attribute" + } + } + ] + }, + + "compound_datatype": { + "type": "object", + "properties": + { + "name": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "components": { + "type": "array", + "items": { + "type": "object", + "properties": + { + "name": { + "type": "string" + }, + "offset": { + "type": "integer" + }, + "type": { + "$ref": "#/definitions/datatype" + } + }, + "additionalProperties": false + } + } + }, + "additionalProperties": false + }, + + "datatype": { + "anyOf": [ + { + "type": "string", + "enum": [ + "String", + "Byte", + "Int16", + "UInt16", + "Int32", + "UInt32", + "Float32", + "Float64", + "CInt16", + "CInt32", + "CFloat32", + "CFloat64" + ] + }, + { + "$ref": "#/definitions/compound_datatype" + } + ] + }, + + "dimension": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "full_name": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "type": { + "type": "string" + }, + "direction": { + "type": "string" + }, + "indexing_variable": { + "type": "string" + } + }, + "additionalProperties": false + }, + + "dimensions": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/dimension" + }, + { + "description": "Full qualified name of a dimension", + "type": "string" + } + ] + } + }, + + "group": { + "type": "object", + "properties": { + "driver": { + "type": "string" + }, + "name": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "group" + ] + }, + "dimensions": { + "$ref": "#/definitions/dimensions" + }, + "arrays": { + "$ref": "#/definitions/arrays" + }, + "attributes": { + "$ref": "#/definitions/attributes" + }, + "groups": { + "$ref": "#/definitions/groups" + }, + "structural_info": { + "$ref": "#/definitions/structural_info" + } + }, + "additionalProperties": false + }, + + "groups": { + "anyOf": [ + { + "type": "object", + "properties": {}, + "additionalProperties": { + "$ref": "#/definitions/group" + } + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/group" + } + } + ] + }, + + "srs": { + "type": "object", + "properties": { + "wkt": { + "type": "string" + }, + "data_axis_to_srs_axis_mapping": { + "type": "array", + "items": { + "type": "integer" + } + } + }, + "additionalProperties": false + }, + + "structural_info": { + "type": "object", + "properties": {}, + "additionalProperties": { + "type": "string" + } + }, + + "value": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "object" + }, + { + "type": "array" + } + ] + } + } +} diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdaltileindex.xsd b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdaltileindex.xsd new file mode 100644 index 00000000..7e0d38a35 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdaltileindex.xsd @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalvrt.xsd b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalvrt.xsd new file mode 100644 index 00000000..9f9a91d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gdalvrt.xsd @@ -0,0 +1,880 @@ + + + + + + Root element + + + + + + + + + + + + + + May be repeated + + + + + May be repeated + + + + + + Allowed only if subClass="VRTWarpedDataset" + + + + + Allowed only if subClass="VRTPansharpenedDataset" + + + + + Allowed only if subClass="VRTProcessedDataset" + + + + + Allowed only if subClass="VRTProcessedDataset" + + + + + only for multidimensional dataset + + + + + + + + + + + + + + + + + Added in GDAL 3.9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Processing step of a VRTPansharpenedDataset + + + + + Builtin allowed names are BandAffineCombination, LUT, LocalScaleOffset, Trimming. More algorithms can be registered at run-time. + + + + + + + + + + Argument of a processing function + + + + + + Allowed names are specific of each processing function + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gfs.xsd b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gfs.xsd new file mode 100644 index 00000000..936343b3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gfs.xsd @@ -0,0 +1,246 @@ + + + + + + + + + Set this element to true if all features belonging to the same layer are written sequentially in the file. The reader will then avoid unnecessary resets when layers are read completely one after the other. To get the best performance, the layers must be read in the order they appear in the file. Cf https://gdal.org/drivers/vector/gml.html#performance-issues-with-large-multi-layer-gml-files + + + + + + + + Name of the feature type; essentially used as layer name. Can be different than the name of the XML element that represents such a feature in XML data. Examples: case can change, a prefix can be added to the name, and the name can be more human readable (e.g. the full name, rather than an abbreviation). + Different GMLFeatureClass elements should have a different name. + + + + + + Defines the path in a given XML document to the elements that represent the GML feature. Can use '|' as element separator. Namespace prefixes of path elements are insignificant. + As multiple ElementPath-elements are not allowed per GMLFeatureClass, if a feature type was encoded in different places in an XML document (e.g. on collection member level, as well as inline in some other feature), the gfs file would have to contain multiple GMLFeatureClass entries, with different ElementPaths. + + + + + Defines a geometry column. This element may be repeated if there are several geometry columns. For backward compatibility with older GDAL versions, the GDAL .gfs writer will only write this element if there are several geometry columns, but it is allowed to use it if there is just a single geometry column. GeomPropertyDefn is mutually exclusive with GeometryName, GeometryElementPath and GeometryType + + + + + Name of a geometric property of the feature. Can be different than the name of the XML element that represents that property. Examples: case can change, a prefix can be added to the name, and the name can be more human readable (e.g. the full name, rather than an abbreviation, or a combination of names in the element path). Mutually exclusive with GeomPropertyDefn + + + + + Defines the path to the XML element that represents the geometry property within the XML element of the GML feature. Can use '|' as element separator. Namespace prefixes of path elements are insignificant. NOTE: The path should not include the actual GML geometry element itself. Used in combination with the GeometryName. Mutually exclusive with GeomPropertyDef. + + + + + Used in combination with the GeometryName. Mutually exclusive with GeomPropertyDef. + + + + + Defines the SRS of all geometry columns of the layer. Typically a string of the form urn:ogc:def:crs:EPSG::XXXX + + + + + Contains optional information about the feature count of the layer and its extent. This should not be used in .gfs templates, but for specific instantiation of a .gfs on a given .gml file + + + + + + Number of features in the layer + + + + + Minimum X value of the layer extent. + + + + + Maximum X value of the layer extent. + + + + + Minimum Y value of the layer extent. + + + + + Maximum X value of the layer extent. + + + + + + + + + + + + + + + The names of all GMLFeatureClasses within the GMLFeatureClassList must be unique. + + + + + + + + Geometry type, expressed either as a numeric value matching the OGRwkbGeometryType enumeration or a string ([Multi]?Point|[Multi]?LineString|[Multi]?Polygon|GeometryCollection|CircularString|CurvePolygon|[Multi]Curve|[Multi]Surface|Triangle|PolyhedralSurface|TIN)Z?M? or None to indicate a layer without geometry field. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Name of a geometric property of the feature. Can be different than the name of the XML element that represents that property. Examples: case can change, a prefix can be added to the name, and the name can be more human readable (e.g. the full name, rather than an abbreviation, or a combination of names in the element path). + + + + + Defines the path to the XML element that represents the geometry property within the XML element of the GML feature. Can use '|' as element separator. Namespace prefixes of path elements are insignificant. NOTE: The path should not include the actual GML geometry element itself. + + + + + + + + + + Can be set to false to indicate that null/missing geometries are forbidden. + + + + + + + + + + + Name of a non-geometric property of the feature. Can be different than the name of the XML element that represents that property. + +NOTE: Properties with name suffix "_href" - typically used when the ElementPath ends in @xlink:href - can be used to build junction tables. For further details, see https://gdal.org/drivers/vector/gml.html#building-junction-tables. + +Examples: case can change, a prefix can be added to the name, and the name can be more human readable (e.g. the full name, rather than an abbreviation, or a combination of names in the element path). + + + + + Defines the path to the XML element that represents the property within the XML element of the GML feature. Can use '|' as element separator. The last path segment may have an XML attribute name as suffix, using '@' as separator (e.g., width@uom). Namespace prefixes of path elements are insignificant. + + + + + Field type. Complex may be used to indicate that the value of the element is not a simple type. + + + + + + + + + + + + + + + + + + + Can be set to false to indicate that null/missing values are forbidden. + + + + + Further specializes the property type. Allowed combinations are: (Type: Integer, Subtype: Short, Integer64), (Type: IntegerList, Subtype: Integer64), (Type: Real, Subtype: Float), (Type: String, Subtype: Boolean, Date, Time, Datetime), (Type: StringList, Subtype: Boolean) + + + + + + + + + + + + + + + + Can be used to create multiple properties from the same XML element, based upon a set of mutually exclusive conditions. For further details, and examples, see https://gdal.org/drivers/vector/gml.html#using-conditions-on-xml-attributes + + + + + When set to true, indicates that values of that field are unique through all the features of the layer + + + + + Maximum width of the string representation of the values of the field. Supported use cases: (Type: String, Subtype is NOT Boolean, Date, Time, or Datetime), (Type: Integer), (Type: Real) + + + + + Only applies to Real. Maximum decimal precision (i.e. number of digits after the decimal point) of the values of the field. + + + + + Description of the field (added in GDAL 3.7) + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gml_registry.xml b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gml_registry.xml new file mode 100644 index 00000000..831a32d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gml_registry.xml @@ -0,0 +1,117 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/gml_registry.xsd b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gml_registry.xsd new file mode 100644 index 00000000..717f674d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/gml_registry.xsd @@ -0,0 +1,66 @@ + + + + + + The registry contains namespace definitions, which are used to find a single .gfs or XML schema file with which the GML driver shall process the whole GML data file. The GML driver uses the schema file defined for the first matching (prefix-)namespace-featureType combination that is found in the GML data. NOTE: The order of the namespaces within the GML registry file is important when loading a GML file that contains features from multiple namespaces. Only the feature types defined in the selected schema will be recognized. In other words, if the GML file contains features from multiple namespaces, then only the ones that match the definitions from the chosen schema will be read. + + + + + + Defines a namespace, together with the feature types that belong to it. + +NOTE: Multiple feature types within the namespace may refer to the same schema (which should then include definitions for these feature types). + + + + + + Definition of a feature type, with XML attributes to match given GML data and to define a .gfs or XML schema that contains the definition of the feature type. NOTE: The schema may contain definitions for multiple feature types. + + + + + Name of the XML element that is used to identify the feature type. Typically the local name of an XML element that encodes a feature. However, it can also be the local name of a different element that can be used in combination with @elementValue to produce a match for this feature type definition within the namespace. + + + + + Used in combination with @elementName. If present, a match for this feature type element is only produced if an XML element whose local-name (and prefix, if defined in the namespace) equals the value of @elementName and whose value equals the value of @elementValue is found in the GML data. For example: If namespace/@prefix = 'ex', featureType/@elementName = 'typeOfFeature', and featureType/@elementValue = 'FT', then a match is produced if <ex:typeOfFeature>FT</ex:typeOfFeature> is present in the GML data. + + + + + OGR .gfs file in which the feature type is defined (possibly together with other feature types). The GML driver will parse the layer definitions from that file. The schema location can be given as URL (http and https), absolute file path, and relative file path (relative to the location of the registry file). + + + + + XML schema file in which the feature type is defined (possibly together with other feature types). The GML driver will attempt to parse it and create layer definitions. The schema location can be given as URL (http and https), absolute file path, and relative file path (relative to the location of the registry file). + + + + + + + + Namespace prefix assigned to the features of this namespace in actual GML data. The prefix will be looked up in the first bytes of the GML file (e.g. xmlns:abc="http://example.org/abc"). It is used to confirm the match of the namespace uri (defined by @uri). NOTE-1: Case is important - the parser looks for the exact string, i.e., 'ex' is treated differently than 'EX'. If actual data may use different prefixes for the same namespace URI, multiple namespace elements must be added to the gml_registry - one for each applicable prefix. NOTE-2: The prefix can be omitted in the namespace definition. In that case, a match is only produced if a) the namespace uri is found in any namespace declaration in the GML file (typically as default namespace in the root element, e.g., xmlns="http://example.org/abc") and b) an XML element without XML namespace prefix, and local-name being equal to the @elementName of the feature type (plus a possibly defined @elementValue as textual value), is found. + + + + + Namespace URI as found in the first bytes of the GML file. (e.g. "http://example.org/abc" in the namespace declaration for xmlns:abc="http://example.org/abc"). + + + + + When set to true, it means that the SRS defined by a srsName attribute found in the global gml:Envelope element applies to all features of the GML file. + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_center.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_center.csv new file mode 100644 index 00000000..be37edd4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_center.csv @@ -0,0 +1,251 @@ +code,name +0,"WMO Secretariat" +1,"Melbourne" +2,"Melbourne" +3,"Melbourne" +4,"Moscow" +5,"Moscow" +6,"Moscow" +7,"US-NCEP" +8,"US-NWSTG" +9,"US-Other" +10,"Cairo" +11,"Cairo" +12,"Dakar" +13,"Dakar" +14,"Nairobi" +15,"Nairobi" +16,"Casablanca" +17,"Tunis" +18,"Tunis Casablanca" +19,"Tunis Casablanca" +20,"Las Palmas" +21,"Algiers" +22,"ACMAD" +23,"Mozambique" +24,"Pretoria" +25,"La Réunion" +26,"Khabarovsk" +27,"Khabarovsk" +28,"New Delhi" +29,"New Delhi" +30,"Novosibirsk" +31,"Novosibirsk" +32,"Tashkent" +33,"Jeddah" +34,"Tokyo" +35,"Tokyo" +36,"Bangkok" +37,"Ulan Bator" +38,"Beijing" +39,"Beijing" +40,"Seoul" +41,"Buenos Aires" +42,"Buenos Aires" +43,"Brasilia" +44,"Brasilia" +45,"Santiago" +46,"Brazilian Space Agency" +47,"Colombia" +48,"Ecuador" +49,"Peru" +50,"Venezuela" +51,"Miami" +52,"Miami-NHC" +53,"Montreal" +54,"Montreal" +55,"San Francisco" +56,"ARINC Centre" +57,"US-Air Force Weather" +58,"US-Fleet Meteorology and Oceanography" +59,"US-FSL" +60,"US-NCAR" +61,"US-Service ARGOS" +62,"US-Naval Oceanographic Office" +64,"Honolulu" +65,"Darwin" +66,"Darwin" +67,"Melbourne" +69,"Wellington" +70,"Wellington" +71,"Nadi" +72,"Singapore" +73,"Malaysia" +74,"UK-Met-Exeter" +75,"UK-Met-Exeter" +76,"Moscow" +78,"Offenbach" +79,"Offenbach" +80,"Rome" +81,"Rome" +82,"Norrköping" +83,"Norrköping" +84,"Toulouse" +85,"Toulouse" +86,"Helsinki" +87,"Belgrade" +88,"Oslo" +89,"Prague" +90,"Episkopi" +91,"Ankara" +92,"Frankfurt/Main" +93,"London" +94,"Copenhagen" +95,"Rota" +96,"Athens" +97,"ESA-European Space Agency" +98,"ECMWF" +99,"DeBilt" +100,"Brazzaville" +101,"Abidjan" +102,"Libyan Arab Jamahiriya" +103,"Madagascar" +104,"Mauritius" +105,"Niger" +106,"Seychelles" +107,"Uganda" +108,"Tanzania" +109,"Zimbabwe" +110,"Hong-Kong, China" +111,"Afghanistan" +112,"Bahrain" +113,"Bangladesh" +114,"Bhutan" +115,"Cambodia" +116,"Democratic People's Republic of Korea" +117,"Islamic Republic of Iran" +118,"Iraq" +119,"Kazakhstan" +120,"Kuwait" +121,"Kyrgyz Republic" +122,"Lao People's Democratic Republic" +123,"Macao, China" +124,"Maldives" +125,"Myanmar" +126,"Nepal" +127,"Oman" +128,"Pakistan" +129,"Qatar" +130,"Republic of Yemen" +131,"Sri Lanka" +132,"Tajikistan" +133,"Turkmenistan" +134,"United Arab Emirates" +135,"Uzbekistan" +136,"Socialist Republic of Viet Nam" +140,"Bolivia" +141,"Guyana" +142,"Paraguay" +143,"Suriname" +144,"Uruguay" +145,"French Guyana" +146,"Brazilian Navy Hydrographic Centre" +150,"Antigua and Barbuda" +151,"Bahamas" +152,"Barbados" +153,"Belize" +154,"British Caribbean Territories" +155,"San Jose" +156,"Cuba" +157,"Dominica" +158,"Dominican Republic" +159,"El Salvador" +160,"US-NESDIS" +161,"US-OAR" +162,"Guatemala" +163,"Haiti" +164,"Honduras" +165,"Jamaica" +166,"Mexico" +167,"Netherlands Antilles and Aruba" +168,"Nicaragua" +169,"Panama" +170,"Saint Lucia NMC" +171,"Trinidad and Tobago" +172,"French Departments" +190,"Cook Islands" +191,"French Polynesia" +192,"Tonga" +193,"Vanuatu" +194,"Brunei" +195,"Indonesia" +196,"Kiribati" +197,"Federated States of Micronesia" +198,"New Caledonia" +199,"Niue" +200,"Papua New Guinea" +201,"Philippines" +202,"Samoa" +203,"Solomon Islands" +210,"Frascati (ESA/ESRIN)" +211,"Lanion" +212,"Lisboa" +213,"Reykiavik" +214,"Madrid" +215,"Zürich" +216,"Service ARGOS Toulouse" +217,"Bratislava" +218,"Budapest" +219,"Ljubljana" +220,"Warsaw" +221,"Zagreb" +222,"Albania" +223,"Armenia" +224,"Austria" +225,"Azerbaijan" +226,"Belarus" +227,"Belgium" +228,"Bosnia and Herzegovina" +229,"Bulgaria" +230,"Cyprus" +231,"Estonia" +232,"Georgia" +233,"Dublin" +234,"Israel" +235,"Jordan" +236,"Latvia" +237,"Lebanon" +238,"Lithuania" +239,"Luxembourg" +240,"Malta" +241,"Monaco" +242,"Romania" +243,"Syrian Arab Republic" +244,"The former Yugoslav Republic of Macedonia" +245,"Ukraine" +246,"Republic of Moldova" +254,"EUMETSAT Operation Centre" +256,"Angola" +257,"Benin" +258,"Botswana" +259,"Burkina Faso" +260,"Burundi" +261,"Cameroon" +262,"Cape Verde" +263,"Central African republic" +264,"Chad" +265,"Comoros" +266,"Democratic Republic of the Congo" +267,"Djibouti" +268,"Eritrea" +269,"Ethiopia" +270,"Gabon" +271,"Gambia" +272,"Ghana" +273,"Guinea" +274,"Guinea Bissau" +275,"Lesotho" +276,"Liberia" +277,"Malawi" +278,"Mali" +279,"Mauritania" +280,"Namibia" +281,"Nigeria" +282,"Rwanda" +283,"Sao Tome and Principe" +284,"Sierra Leone" +285,"Somalia" +286,"Sudan" +287,"Swaziland" +288,"Togo" +289,"Zambia" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_process.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_process.csv new file mode 100644 index 00000000..6d1aed1d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_process.csv @@ -0,0 +1,102 @@ +center_code,process_code,name +7,2,"Ultra Violet Index Model" +7,3,"NCEP/ARL Transport and Dispersion Model" +7,4,"NCEP/ARL Smoke Model" +7,5,"Satellite Derived Precipitation and temperatures, from IR" +7,6,"NCEP/ARL Dust Model" +7,10,"Global Wind-Wave Forecast Model" +7,11,"Global Multi-Grid Wave Model (Static Grids)" +7,12,"Probabilistic Storm Surge" +7,19,"Limited-area Fine Mesh (LFM) analysis" +7,25,"Snow Cover Analysis" +7,30,"Forecaster generated field" +7,31,"Value added post processed field" +7,39,"Nested Grid forecast Model (NGM)" +7,42,"Global Optimum Interpolation Analysis (GOI) from GFS model" +7,43,"Global Optimum Interpolation Analysis (GOI) from 'Final' run" +7,44,"Sea Surface Temperature Analysis" +7,45,"Coastal Ocean Circulation Model" +7,46,"HYCOM - Global" +7,47,"HYCOM - North Pacific basin" +7,48,"HYCOM - North Atlantic basin" +7,49,"Ozone Analysis from TIROS Observations" +7,52,"Ozone Analysis from Nimbus 7 Observations" +7,53,"LFM-Fourth Order Forecast Model" +7,64,"Regional Optimum Interpolation Analysis (ROI)" +7,68,"80 wave triangular, 18-layer Spectral model from GFS model" +7,69,"80 wave triangular, 18 layer Spectral model from 'Medium Range Forecast' run" +7,70,"Quasi-Lagrangian Hurricane Model (QLM)" +7,73,"Fog Forecast model - Ocean Prod. Center" +7,74,"Gulf of Mexico Wind/Wave" +7,75,"Gulf of Alaska Wind/Wave" +7,76,"Bias corrected Medium Range Forecast" +7,77,"126 wave triangular, 28 layer Spectral model from GFS model" +7,78,"126 wave triangular, 28 layer Spectral model from 'Medium Range Forecast' run" +7,79,"Backup from the previous run" +7,80,"62 wave triangular, 28 layer Spectral model from 'Medium Range Forecast' run" +7,81,"Analysis from GFS (Global Forecast System)" +7,82,"Analysis from GDAS (Global Data Assimilation System)" +7,84,"MESO ETA Model (currently 12 km)" +7,86,"RUC Model from FSL (isentropic; scale: 60km at 40N)" +7,87,"CAC Ensemble Forecasts from Spectral (ENSMB)" +7,88,"NOAA Wave Watch III (NWW3) Ocean Wave Model" +7,89,"Non-hydrostatic Meso Model (NMM) Currently 8 km)" +7,90,"62 wave triangular, 28 layer spectral model extension of the 'Medium Range Forecast' run" +7,91,"62 wave triangular, 28 layer spectral model extension of the GFS model" +7,92,"62 wave triangular, 28 layer spectral model run from the 'Medium Range Forecast' final analysis" +7,93,"62 wave triangular, 28 layer spectral model run from the T62 GDAS analysis of the 'Medium Range Forecast' run" +7,94,"T170/L42 Global Spectral Model from MRF run" +7,95,"T126/L42 Global Spectral Model from MRF run" +7,96,"Global Forecast System Model" +7,98,"Climate Forecast System Model" +7,100,"RUC Surface Analysis (scale: 60km at 40N)" +7,101,"RUC Surface Analysis (scale: 40km at 40N)" +7,105,"RUC Model from FSL (isentropic; scale: 20km at 40N)" +7,107,"Global Ensemble Forecast System (GEFS)" +7,108,"LAMP" +7,109,"RTMA (Real Time Mesoscale Analysis)" +7,110,"NAM Model - 15km version" +7,111,"NAM model, generic resolution" +7,112,"WRF-NMM (Nondydrostatic Mesoscale Model) model, generic resolution" +7,113,"Products from NCEP SREF processing" +7,114,"NAEFS Products from joined NCEP, CMC global ensembles" +7,115,"Downscaled GFS from NAM eXtension" +7,116,"WRF-EM (Eulerian Mass-core) model, generic resolution " +7,120,"Ice Concentration Analysis" +7,121,"Western North Atlantic Regional Wave Model" +7,122,"Alaska Waters Regional Wave Model" +7,123,"North Atlantic Hurricane Wave Model" +7,124,"Eastern North Pacific Regional Wave Model" +7,125,"North Pacific Hurricane Wave Model" +7,126,"Sea Ice Forecast Model" +7,127,"Lake Ice Forecast Model" +7,128,"Global Ocean Forecast Model" +7,129,"Global Ocean Data Analysis System (GODAS)" +7,130,"Merge of fields from the RUC, NAM, and Spectral Model" +7,131,"Great Lakes Wave Model" +7,140,"North American Regional Reanalysis (NARR)" +7,141,"Land Data Assimilation and Forecast System" +7,150,"NWS River Forecast System (NWSRFS)" +7,151,"NWS Flash Flood Guidance System (NWSFFGS)" +7,152,"WSR-88D Stage II Precipitation Analysis" +7,153,"WSR-88D Stage III Precipitation Analysis" +7,180,"Quantitative Precipitation Forecast" +7,181,"River Forecast Center Quantitative Precipitation Forecast mosaic" +7,182,"River Forecast Center Quantitative Precipitation estimate mosaic" +7,183,"NDFD product generated by NCEP/HPC" +7,184,"Climatological Calibrated Precipiation Analysis - CCPA" +7,190,"National Convective Weather Diagnostic" +7,191,"Current Icing Potential automated product" +7,192,"Analysis product from NCEP/AWC" +7,193,"Forecast product from NCEP/AWC" +7,195,"Climate Data Assimilation System 2 (CDAS2)" +7,196,"Climate Data Assimilation System 2 (CDAS2)" +7,197,"Climate Data Assimilation System (CDAS)" +7,198,"Climate Data Assimilation System (CDAS)" +7,199,"Climate Forecast System Reanalysis (CFSR)" +7,200,"CPC Manual Forecast Product" +7,201,"CPC Automated Product" +7,210,"EPA Air Quality Forecast" +7,211,"EPA Air Quality Forecast" +7,215,"SPC Manual Forecast Product" +7,220,"NCEP/OPC automated product" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_subcenter.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_subcenter.csv new file mode 100644 index 00000000..af8cf6f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_subcenter.csv @@ -0,0 +1,63 @@ +center_code,subcenter_code,name +7,1,"NCEP Re-Analysis Project" +7,2,"NCEP Ensemble Products" +7,3,"NCEP Central Operations" +7,4,"Environmental Modeling Center" +7,5,"Hydrometeorological Prediction Center" +7,6,"Ocean Prediction Center" +7,7,"Climate Prediction Center" +7,8,"Aviation Weather Center" +7,9,"Storm Prediction Center" +7,10,"Tropical Prediction Center" +7,11,"Techniques Development Laboratory" +7,12,"NESDIS Office of Research and Applications" +7,13,"FAA" +7,14,"Meteorological Development Laboratory (MDL)" +7,15,"North American Regional Reanalysis (NARR) Project" +7,16,"Space Environment Center" +8,0,"National Digital Forecast Database" +161,1,"Great Lakes Environmental Research Laboratory" +161,2,"Forecast Systems Laboratory" +74,1,"Shanwick Oceanic Area Control Centre" +74,2,"Fucino" +74,3,"Gatineau" +74,4,"Maspalomas" +74,5,"ESA ERS Central Facility" +74,6,"Prince Albert" +74,7,"West Freugh" +74,13,"Tromso" +74,21,"Agenzia Spaziale Italiana (Italy)" +74,22,"Centre National de la Recherche Scientifique (France)" +74,23,"GeoForschungsZentrum (Germany)" +74,24,"Geodetic Observatory Pecny (Czech Republic)" +74,25,"Institut d'Estudis Espacials de Catalunya (Spain)" +74,26,"Swiss Federal Office of Topography" +74,27,"Nordic Commission of Geodesy (Norway)" +74,28,"Nordic Commission of Geodesy (Sweden)" +74,29,"Institute de Geodesie National (France)" +74,30,"Bundesamt für Kartographie und Geodäsie (Germany)" +74,31,"Institute of Engineering Satellite Surveying and Geodesy (U.K.)" +254,10,"Tromso (Norway)" +254,10,"Maspalomas (Spain)" +254,30,"Kangerlussuaq (Greenland)" +254,40,"Edmonton (Canada)" +254,50,"Bedford (Canada)" +254,60,"Gander (Canada)" +254,70,"Monterey (USA)" +254,80,"Wallops Island (USA)" +254,90,"Gilmor Creek (USA)" +254,100,"Athens (Greece)" +98,231,"CNRM, Meteo France Climate Centre (HIRETYCS)" +98,232,"MPI, Max Planck Institute Climate Centre (HIRETYCS)" +98,233,"UKMO Climate Centre (HIRETYCS)" +98,234,"ECMWF (DEMETER)" +98,235,"INGV-CNR (Bologna, Italy)(DEMETER)" +98,236,"LODYC (Paris, France)(DEMETER)" +98,237,"DMI (Copenhagen, Denmark)(DEMETER)" +98,238,"INM (Madrid, Spain)(DEMETER)" +98,239,"CERFACS (Toulouse, France)(DEMETER)" +98,240,"ECMWF (PROVOST)" +98,241,"Meteo France (PROVOST)" +98,242,"EDF (PROVOST)" +98,243,"UKMO (PROVOST)" +98,244,"Biometeorology group, University of Veterinary Medicine, Vienna (ELDAS)" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_0.csv new file mode 100644 index 00000000..0dc3f871 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"TMP","Temperature","K","UC_K2F" +1,"VTMP","Virtual temperature","K","UC_K2F" +2,"POT","Potential temperature","K","UC_K2F" +3,"EPOT","Pseudo-adiabatic potential temperature","K","UC_K2F" +4,"TMAX","Maximum temperature","K","UC_K2F" +5,"TMIN","Minimum temperature","K","UC_K2F" +6,"DPT","Dew point temperature","K","UC_K2F" +7,"DEPR","Dew point depression","K","UC_NONE" +8,"LAPR","Lapse rate","K/m","UC_NONE" +9,"TMPA","Temperature anomaly","K","UC_K2F" +10,"LHTFL","Latent heat net flux","W/(m^2)","UC_NONE" +11,"SHTFL","Sensible heat net flux","W/(m^2)","UC_NONE" +12,"HEATX","Heat index","K","UC_K2F" +13,"WCF","Wind chill factor","K","UC_K2F" +14,"MINDPD","Minimum dew point depression","K","UC_K2F" +15,"VPTMP","Virtual potential temperature","K","UC_K2F" +16,"SNOHF","Snow phase change heat flux","W/m^2","UC_NONE" +17,"SKINT","Skin temperature","K","UC_K2F" +18,"SNOT","Snow Temperature (top of snow)","K","UC_K2F" +19,"TTCHT","Turbulent Transfer Coefficient for Heat","Numeric","UC_NONE" +20,"TDCHT","Turbulent Diffusion Coefficient for Heat","m^2/s","UC_NONE" +21,"APTMP","Apparent Temperature","K","UC_K2F" +22,"TTSWR","Temperature Tendency due to Short-Wave Radiation","K/s","UC_NONE" +23,"TTLWR","Temperature Tendency due to Long-Wave Radiation","K/s","UC_NONE" +24,"TTSWRCS","Temperature Tendency due to Short-Wave Radiation, Clear Sky","K/s","UC_NONE" +25,"TTLWRCS","Temperature Tendency due to Long-Wave Radiation, Clear Sky","K/s","UC_NONE" +26,"TTPARM","Temperature Tendency due to parameterizations","K/s","UC_NONE" +27,"WETBT","Wet Bulb Temperature","K","UC_K2F" +28,"UCTMP","Unbalanced Component of Temperature","K","UC_K2F" +29,"TMPADV","Temperature Advection","K/s","UC_NONE" +30,"","Latent heat net flux due to evaporation","W m-2","UC_NONE" +31,"","Latent heat net flux due to sublimation","W m-2","UC_NONE" +32,"","Wet-bulb potential temperature","K","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_1.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_1.csv new file mode 100644 index 00000000..b728632c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_1.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"SPFH","Specific humidity","kg/kg","UC_NONE" +1,"RH","Relative humidity","%","UC_NONE" +2,"MIXR","Humidity mixing ratio","kg/kg","UC_NONE" +3,"PWAT","Precipitable water","kg/(m^2)","UC_NONE" +4,"VAPP","Vapor pressure","Pa","UC_NONE" +5,"SATD","Saturation deficit","Pa","UC_NONE" +6,"EVP","Evaporation","kg/(m^2)","UC_InchWater" +7,"PRATE","Precipitation rate","kg/(m^2 s)","UC_NONE" +8,"APCP","Total precipitation","kg/(m^2)","UC_InchWater" +9,"NCPCP","Large scale precipitation","kg/(m^2)","UC_NONE" +10,"ACPCP","Convective precipitation","kg/(m^2)","UC_NONE" +11,"SNOD","Snow depth","m","UC_M2Inch" +12,"SRWEQ","Snowfall rate water equivalent","kg/(m^2 s)","UC_NONE" +13,"WEASD","Water equivalent of accumulated snow depth","kg/(m^2)","UC_NONE" +14,"SNOC","Convective snow","kg/(m^2)","UC_NONE" +15,"SNOL","Large scale snow","kg/(m^2)","UC_NONE" +16,"SNOM","Snow melt","kg/(m^2)","UC_NONE" +17,"SNOAG","Snow age","day","UC_NONE" +18,"ABSH","Absolute humidity","kg/(m^3)","UC_NONE" +19,"PTYPE","Precipitation type","0=No precipitation; 1=Rain; 2=Thunderstorm; 3=Freezing rain; 4=Mixed/ice; 5=Snow; 6=Wet snow; 7=Mixture of rain and snow; 8=Ice pellets; 9=Graupel; 10=Hail; 11=Drizzle; 12=Freezing drizzle; 13-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +20,"ILIQW","Integrated liquid water","kg/(m^2)","UC_NONE" +21,"TCOND","Condensate","kg/kg","UC_NONE" +22,"CLWMR","Cloud mixing ratio","kg/kg","UC_NONE" +23,"ICMR","Ice water mixing ratio","kg/kg","UC_NONE" +24,"RWMR","Rain mixing ratio","kg/kg","UC_NONE" +25,"SNMR","Snow mixing ratio","kg/kg","UC_NONE" +26,"MCONV","Horizontal moisture convergence","kg/(kg s)","UC_NONE" +27,"MAXRH","Maximum relative humidity","%","UC_NONE" +28,"MAXAH","Maximum absolute humidity","kg/(m^3)","UC_NONE" +29,"ASNOW","Total snowfall","m","UC_M2Inch" +30,"PWCAT","Precipitable water category","0-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +31,"HAIL","Hail","m","UC_NONE" +32,"GRLE","Graupel (snow pellets)","kg/kg","UC_NONE" +33,"CRAIN","Categorical rain","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +34,"CFRZR","Categorical freezing rain","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +35,"CICEP","Categorical ice pellets","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +36,"CSNOW","Categorical snow","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +37,"CPRAT","Convective precipitation rate","kg/(m^2*s)","UC_NONE" +38,"MCONV","Horizontal moisture divergence","kg/(kg*s)","UC_NONE" +39,"CPOFP","Percent frozen precipitation","%","UC_NONE" +40,"PEVAP","Potential evaporation","kg/m^2","UC_NONE" +41,"PEVPR","Potential evaporation rate","W/m^2","UC_NONE" +42,"SNOWC","Snow cover","%","UC_NONE" +43,"FRAIN","Rain fraction of total cloud water","-","UC_NONE" +44,"RIME","Rime factor","-","UC_NONE" +45,"TCOLR","Total column integrated rain","kg/m^2","UC_NONE" +46,"TCOLS","Total column integrated snow","kg/m^2","UC_NONE" +47,"LSWP","Large scale water precipitation","kg/m^2","UC_NONE" +48,"CWP","Convective water precipitation","kg/m^2","UC_NONE" +49,"TWATP","Total water precipitation","kg/m^2","UC_NONE" +50,"TSNOWP","Total snow precipitation","kg/m^2","UC_NONE" +51,"TCWAT","Total column water","kg/m^2","UC_NONE" +52,"TPRATE","Total precipitation rate","kg/(m^2*s)","UC_NONE" +53,"TSRWE","Total snowfall rate water equivalent","kg/(m^2*s)","UC_NONE" +54,"LSPRATE","Large scale precipitation rate","kg/(m^2*s)","UC_NONE" +55,"CSRWE","Convective snowfall rate water equivalent","kg/(m^2*s)","UC_NONE" +56,"LSSRWE","Large scale snowfall rate water equivalent","kg/(m^2*s)","UC_NONE" +57,"TSRATE","Total snowfall rate","m/s","UC_NONE" +58,"CSRATE","Convective snowfall rate","m/s","UC_NONE" +59,"LSSRWE","Large scale snowfall rate","m/s","UC_NONE" +60,"SDWE","Snow depth water equivalent","kg/m^2","UC_NONE" +61,"SDEN","Snow density","kg/m^3","UC_NONE" +62,"SEVAP","Snow evaporation","kg/m^2","UC_NONE" +63,"","Reserved","-","UC_NONE" +64,"TCIWV","Total column integrated water vapour","kg/m^2","UC_NONE" +65,"RPRATE","Rain precipitation rate","kg/(m^2*s)","UC_NONE" +66,"SPRATE","Snow precipitation rate","kg/(m^2*s)","UC_NONE" +67,"FPRATE","Freezing rain precipitation rate","kg/(m^2*s)","UC_NONE" +68,"IPRATE","Ice pellets precipitation rate","kg/(m^2*s)","UC_NONE" +69,"TCOLW","Total Column Integrate Cloud Water","kg/m^2","UC_NONE" +70,"TCOLI","Total Column Integrate Cloud Ice","kg/m^2","UC_NONE" +71,"HAILMXR","Hail Mixing Ratio","kg/kg","UC_NONE" +72,"TCOLH","Total Column Integrate Hail","kg/m^2","UC_NONE" +73,"HAILPR","Hail Prepitation Rate","kg/(m^2*s)","UC_NONE" +74,"TCOLG","Total Column Integrate Graupel","kg/m^2","UC_NONE" +75,"GPRATE","Graupel (Snow Pellets) Prepitation Rate","kg/(m^2*s)","UC_NONE" +76,"CRRATE","Convective Rain Rate","kg/(m^2*s)","UC_NONE" +77,"LSRRATE","Large Scale Rain Rate","kg/(m^2*s)","UC_NONE" +78,"TCOLWA","Total Column Integrate Water (All components including precipitation)","kg/m^2","UC_NONE" +79,"EVARATE","Evaporation Rate","kg/(m^2*s)","UC_NONE" +80,"TOTCON","Total Condensate","kg/kg","UC_NONE" +81,"TCICON","Total Column-Integrate Condensate","kg/m^2","UC_NONE" +82,"CIMIXR","Cloud Ice Mixing Ratio","kg/kg","UC_NONE" +83,"SCLLWC","Specific Cloud Liquid Water Content","kg/kg","UC_NONE" +84,"SCLIWC","Specific Cloud Ice Water Content","kg/kg","UC_NONE" +85,"SRAINW","Specific Rain Water Content","kg/kg","UC_NONE" +86,"SSNOWW","Specific Snow Water Content","kg/kg","UC_NONE" +87,"SPRATE","Stratiform Precipitation Rate","kg/(m^2*s)","UC_NONE" +88,"CATCP","Categorical Convective Precipitation","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +89,"","Reserved","-","UC_NONE" +90,"TKMFLX","Total Kinematic Moisture Flux","kg/kg(m/s)","UC_NONE" +91,"UKMFLX","U-component (zonal) Kinematic Moisture Flux","kg/kg(m/s)","UC_NONE" +92,"VKMFLX","V-component (meridional) Kinematic Moisture Flux","kg/kg(m/s)","UC_NONE" +93,"RHWATER","Relative Humidity With Respect to Water","%","UC_NONE" +94,"RHICE","Relative Humidity With Respect to Ice","%","UC_NONE" +95,"FZPRATE","Freezing or Frozen Precipitation Rate","kg/(m^2*s)","UC_NONE" +96,"MASSDR","Mass Density of Rain","kg/m^3","UC_NONE" +97,"MASSDS","Mass Density of Snow","kg/m^3","UC_NONE" +98,"MASSDG","Mass Density of Graupel","kg/m^3","UC_NONE" +99,"MASSDH","Mass Density of Hail","kg/m^3","UC_NONE" +100,"SPNCR","Specific Number Concentration of Rain","kg^-1","UC_NONE" +101,"SPNCS","Specific Number Concentration of Snow","kg^-1","UC_NONE" +102,"SPNCG","Specific Number Concentration of Graupel","kg^-1","UC_NONE" +103,"SPNCH","Specific Number Concentration of Hail","kg^-1","UC_NONE" +104,"NUMDR","Number Density of Rain","m^-3","UC_NONE" +105,"NUMDS","Number Density of Snow","m^-3","UC_NONE" +106,"NUMDG","Number Density of Graupel","m^-3","UC_NONE" +107,"NUMDH","Number Density of Hail","m^-3","UC_NONE" +108,"SHTPRM","Specific Humidity Tendency due to Parameterizations","kg/kg(s)","UC_NONE" +109,"MDLWHVA","Mass Density of Liquid Water Coating on Hail Expressed as Mass of Liquid Water per Unit Volume of Air","kg/m^3","UC_NONE" +110,"SMLWHMA","Specific Mass of Liquid Water Coating on Hail Expressed as Mass of Liquid Water per Unit Mass of Moist Air","kg/kg","UC_NONE" +111,"MMLWHDA","Mass Mixing Ratio of Liquid Water Coating on Hail Expressed as Mass of Liquid Water per Unit Mass of Dry Air","kg/kg","UC_NONE" +112,"MDLWGVA","Mass Density of Liquid Water Coating on Graupel Expressed as Mass of Liquid Water per Unit Volume of Air","kg/m^3","UC_NONE" +113,"SMLWGMA","Specific Mass of Liquid Water Coating on Graupel Expressed as Mass of Liquid Water per Unit Mass of Moist Air","kg/kg","UC_NONE" +114,"MMLWGDA","Mass Mixing Ratio of Liquid Water Coating on Graupel Expressed as Mass of Liquid Water per Unit Mass of Dry Air","kg/kg","UC_NONE" +115,"MDLWSVA","Mass Density of Liquid Water Coating on Snow Expressed as Mass of Liquid Water per Unit Volume of Air","kg/m^3","UC_NONE" +116,"SMLWSMA","Specific Mass of Liquid Water Coating on Snow Expressed as Mass of Liquid Water per Unit Mass of Moist Air","kg/kg","UC_NONE" +117,"MMLWSDA","Mass Mixing Ratio of Liquid Water Coating on Snow Expressed as Mass of Liquid Water per Unit Mass of Dry Air","kg/kg","UC_NONE" +118,"UNCSH","Unbalanced Component of Specific Humidity","kg/kg","UC_NONE" +119,"UCSCLW","Unbalanced Component of Specific Cloud Liquid Water content","kg/kg","UC_NONE" +120,"UCSCIW","Unbalanced Component of Specific Cloud Ice Water content","kg/kg","UC_NONE" +121,"FSNOWC","Fraction of Snow Cover","Proportion","UC_NONE" +122,"","Precipitation intensity index","0=No precipitation occurrence; 1=Light precipitation; 2=Moderate precipitation; 3=Heavy precipitation; 4-254=Reserved; 255=Missing","UC_NONE" +123,"","Dominant precipitation type","0=No precipitation; 1=Rain; 2=Thunderstorm; 3=Freezing rain; 4=Mixed/ice; 5=Snow; 6=Wet snow; 7=Mixture of rain and snow; 8=Ice pellets; 9=Graupel; 10=Hail; 11=Drizzle; 12=Freezing drizzle; 13-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +124,"","Presence of showers","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +125,"","Presence of blowing snow","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +126,"","Presence of blizzard","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +127,"","Ice pellets (non-water equivalent) precipitation rate","m/s","UC_NONE" +128,"","Total solid precipitation rate","kg m-2 s-1","UC_NONE" +129,"","Effective radius of cloud water","m","UC_NONE" +130,"","Effective radius of rain","m","UC_NONE" +131,"","Effective radius of cloud ice","m","UC_NONE" +132,"","Effective radius of snow","m","UC_NONE" +133,"","Effective radius of graupel","m","UC_NONE" +134,"","Effective radius of hail","m","UC_NONE" +135,"","Effective radius of subgrid liquid clouds","m","UC_NONE" +136,"","Effective radius of subgrid ice clouds","m","UC_NONE" +137,"","Effective aspect ratio of rain","-","UC_NONE" +138,"","Effective aspect ratio of cloud ice","-","UC_NONE" +139,"","Effective aspect ratio of snow","-","UC_NONE" +140,"","Effective aspect ratio of graupel","-","UC_NONE" +141,"","Effective aspect ratio of hail","-","UC_NONE" +142,"","Effective aspect ratio of subgrid ice clouds","-","UC_NONE" +143,"","Potential evaporation rate","kg m-2 s-1","UC_NONE" +144,"","Specific rain water content (convective)","kg kg-1","UC_NONE" +145,"","Specific snow water content (convective)","kg kg-1","UC_NONE" +146,"","Cloud ice precipitation rate","kg m-2 s-1","UC_NONE" +147,"","Character of precipitation","0=None; 1=Showers; 2=Intermittent; 3=Continuous; 4-254=Reserved; 255=Missing","UC_NONE" +148,"","Snow evaporation rate","kg m-2 s-1","UC_NONE" +149,"","Cloud water mixing ratio","kg kg-1","UC_NONE" +150,"","Column integrated eastward water vapour mass flux","kg m-1 s-1","UC_NONE" +151,"","Column integrated northward water vapour mass flux","kg m-1 s-1","UC_NONE" +152,"","Column integrated eastward cloud liquid water mass flux","kg m-1 s-1","UC_NONE" +153,"","Column integrated northward cloud liquid water mass flux","kg m-1 s-1","UC_NONE" +154,"","Column integrated eastward cloud ice mass flux","kg m-1 s-1","UC_NONE" +155,"","Column integrated northward cloud ice mass flux","kg m-1 s-1","UC_NONE" +156,"","Column integrated eastward rain mass flux","kg m-1 s-1","UC_NONE" +157,"","Column integrated northward rain mass flux","kg m-1 s-1","UC_NONE" +158,"","Column integrated eastward snow mass flux","kg m-1 s-1","UC_NONE" +159,"","Column integrated northward snow mass flux","kg m-1 s-1","UC_NONE" +160,"","Column integrated divergence of water vapour mass flux","kg m-2 s-1","UC_NONE" +161,"","Column integrated divergence of cloud liquid water mass flux","kg m-2 s-1","UC_NONE" +162,"","Column integrated divergence of cloud ice mass flux","kg m-2 s-1","UC_NONE" +163,"","Column integrated divergence of rain mass flux","kg m-2 s-1","UC_NONE" +164,"","Column integrated divergence of snow mass flux","kg m-2 s-1","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_13.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_13.csv new file mode 100644 index 00000000..15e88357 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_13.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"AEROT","Aerosol type","0=Aerosol not present; 1=Aerosol present; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +1,"","Reserved","","UC_NONE" +2,"","Reserved","","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_14.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_14.csv new file mode 100644 index 00000000..22fbc219 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_14.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"TOZNE","Total ozone","Dobson","UC_NONE" +1,"O3MR","Ozone mixing ratio","kg/kg","UC_NONE" +2,"TCIOZ","Total column integrated ozone","Dobson","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_15.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_15.csv new file mode 100644 index 00000000..8a99899d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_15.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"BSWID","Base spectrum width","m/s","UC_NONE" +1,"BREF","Base reflectivity","dB","UC_NONE" +2,"BRVEL","Base radial velocity","m/s","UC_NONE" +3,"VERIL","Vertically-integrated liquid","kg/m","UC_NONE" +4,"LMAXBR","Layer maximum base reflectivity","dB","UC_NONE" +5,"PREC","Precipitation","kg/(m^2)","UC_NONE" +6,"RDSP1","Radar spectra (1)","-","UC_NONE" +7,"RDSP2","Radar spectra (2)","-","UC_NONE" +8,"RDSP3","Radar spectra (3)","-","UC_NONE" +9,"RFCD","Reflectivity of Cloud Droplets","dB","UC_NONE" +10,"RFCI","Reflectivity of Cloud Ice","dB","UC_NONE" +11,"RFSNOW","Reflectivity of Snow","dB","UC_NONE" +12,"RFRAIN","Reflectivity of Rain","dB","UC_NONE" +13,"RFGRPL","Reflectivity of Graupel","dB","UC_NONE" +14,"RFHAIL","Reflectivity of Hail","dB","UC_NONE" +15,"HSR","Hybrid Scan Reflectivity","dB","UC_NONE" +16,"HSRHT","Hybrid Scan Reflectivity Height","m","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_16.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_16.csv new file mode 100644 index 00000000..661bb886 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_16.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"REFZR","Equivalent radar reflectivity for rain","mm^6/m^3","UC_NONE" +1,"REFZI","Equivalent radar reflectivity for snow","mm^6/m^3","UC_NONE" +2,"REFZC","Equivalent radar reflectivity for parameterized convection","mm^6/m^3","UC_NONE" +3,"RETOP","Echo Top","m","UC_NONE" +4,"REFD","Reflectivity","dB","UC_NONE" +5,"REFC","Composity reflectivity","dB","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_17.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_17.csv new file mode 100644 index 00000000..2ceceea9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_17.csv @@ -0,0 +1,11 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"LTNGSD","Lightning Strike Density","m^2/s","UC_NONE" +1,"LTPINX","Lightning Potential Index (LPI) (see Note)","J/kg","UC_NONE" +2,"","Cloud-to-ground lightning flash density","km-2 day-1","UC_NONE" +3,"","Cloud-to-cloud lightning flash density","km-2 day-1","UC_NONE" +4,"","Total lightning flash density","km-2 day-1","UC_NONE" +5,"","Subgrid-scale lightning potential index","J kg-1","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_18.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_18.csv new file mode 100644 index 00000000..d316947a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_18.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"ACCES","Air concentration of Caesium 137","Bq/(m^3)","UC_NONE" +1,"ACIOD","Air concentration of Iodine 131","Bq/(m^3)","UC_NONE" +2,"ACRADP","Air concentration of radioactive pollutant","Bq/(m^3)","UC_NONE" +3,"GDCES","Ground deposition of Caesium 137","Bq/(m^2)","UC_NONE" +4,"GDIOD","Ground deposition of Iodine 131","Bq/(m^2)","UC_NONE" +5,"GDRADP","Ground deposition of radioactive pollutant","Bq/(m^2)","UC_NONE" +6,"TIACCP","Time-integrated air concentration of caesium pollutant","(Bq s)/(m^3)","UC_NONE" +7,"TIACIP","Time-integrated air concentration of iodine pollutant","(Bq s)/(m^3)","UC_NONE" +8,"TIACRP","Time-integrated air concentration of radioactive pollutant","(Bq s)/(m^3)","UC_NONE" +9,"","Reserved","-","UC_NONE" +10,"AIRCON","Air Concentration","Bq/(m^3)","UC_NONE" +11,"WETDEP","Wet Deposition","Bq/(m^2)","UC_NONE" +12,"DRYDEP","Dry Deposition","Bq/(m^2)","UC_NONE" +13,"TOTLWD","Total Deposition (Wet + Dry)","Bq/(m^2)","UC_NONE" +14,"SACON","Specific Activity Concentration","Bq/kg","UC_NONE" +15,"MAXACON","Maximum of Air Concentration in Layer","Bq/(m^3)","UC_NONE" +16,"HMXACON","Height of Maximum of Air Concentration","m","UC_NONE" +17,"CIAIRC","Column-Integrated Air Concentration","Bq/(m^2)","UC_NONE" +18,"CAACL","Column-Averaged Air Concentration in Layer","Bq/(m^3)","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_19.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_19.csv new file mode 100644 index 00000000..e155585d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_19.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"VIS","Visibility","m","UC_M2StatuteMile" +1,"ALBDO","Albedo","%","UC_NONE" +2,"TSTM","Thunderstorm probability","%","UC_NONE" +3,"MIXHT","Mixed layer depth","m","UC_NONE" +4,"VOLASH","Volcanic ash","0=Not present; 1=Present; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +5,"ICIT","Icing top","m","UC_NONE" +6,"ICIB","Icing base","m","UC_NONE" +7,"ICI","Icing","0=None; 1=Light; 2=Moderate; 3=Severe; 4=Trace; 5=Heavy; 6-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +8,"TURBT","Turbulance top","m","UC_NONE" +9,"TURBB","Turbulence base","m","UC_NONE" +10,"TURB","Turbulance","0=None (smooth); 1=Light; 2=Moderate; 3=Severe; 4=Extreme; 5-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +11,"TKE","Turbulent kinetic energy","J/kg","UC_NONE" +12,"PBLREG","Planetary boundary layer regime","0=Reserved; 1=Stable; 2=Mechanically driven turbulence; 3=Forced convection; 4=Free convection; 5-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +13,"CONTI","Contrail intensity","0=Contrail not present; 1=Contrail present; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +14,"CONTET","Contrail engine type","0=Low bypass; 1=High bypass; 2=Non-bypass; 3-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +15,"CONTT","Contrail top","m","UC_NONE" +16,"CONTB","Contrail base","m","UC_NONE" +17,"MXSALB","Maximum snow albedo","%","UC_NONE" +18,"SNFALB","Snow free albedo","%","UC_NONE" +19,"SALBD","Snow albedo","%","UC_NONE" +20,"ICIP","Icing","%","UC_NONE" +21,"CTP","In-Cloud Turbulence","%","UC_NONE" +22,"CAT","Clear Air Turbulence","%","UC_NONE" +23,"SLDP","Supercooled Large Droplet Probability","%","UC_NONE" +24,"CONTKE","Convective Turbulent Kinetic Energy","J/kg","UC_NONE" +25,"WIWW","Weather Interpretation ww (WMO)","=(see FM 94 BUFR/FM 95 CREX Code table 0 20 003 - Present weather)","UC_NONE" +26,"CONVO","Convective Outlook","0=No risk area; 1=Reserved; 2=General thunderstorm risk area; 3=Reserved; 4=Slight risk area; 5=Reserved; 6=Moderate risk area; 7=Reserved; 8=High risk area; 9-10=Reserved; 11=Dry thunderstorm (dry lightning) risk area; 12-13=Reserved; 14=Critical risk area; 15-17=Reserved; 18=Extremely critical risk area; 19-254=Reserved; 255=Missing","UC_NONE" +27,"ICESC","Icing Scenario","0=None; 1=General; 2=Convective; 3=Stratiform; 4=Freezing; 5-191=Reserved; 192-254=Reserved for local use; 255=Missing value","UC_NONE" +28,"MWTURB","Mountain Wave Turbulence (Eddy Dissipation Rate)","m^(2/3)/s","UC_NONE" +29,"CATEDR","Clear Air Turbulence (CAT) (Eddy Dissipation Rate)","m^(2/3)/s","UC_NONE" +30,"EDPARM","Eddy Dissipation Parameter","m^(2/3)/s","UC_NONE" +31,"MXEDPRM","Maximum of Eddy Dissipation Parameter in Layer","m^(2/3)/s","UC_NONE" +32,"HIFREL","Highest Freezing Level","m","UC_NONE" +33,"VISLFOG","Visibility Through Liquid Fog","m","UC_NONE" +34,"VISIFOG","Visibility Through Ice Fog","m","UC_NONE" +35,"VISBSN","Visibility Through Blowing Snow","m","UC_NONE" +36,"","Presence of snow squalls","0=No; 1=Yes; 2-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +37,"","Icing severity","0=None; 1=Trace; 2=Light; 3=Moderate; 4=Severe; 5-254=Reserved; 255=Missing value","UC_NONE" +38,"","Sky transparency index","0=Worst; 1=Very poor; 2=Poor; 3=Average; 4=Good; 5=Excellent; 6-190=Reserved; 191=Unknown; 192-254=Reserved for local use; 255=Missing","UC_NONE" +39,"","Seeing index","0=Worst; 1=Very poor; 2=Poor; 3=Average; 4=Good; 5=Excellent; 6-190=Reserved; 191=Unknown; 192-254=Reserved for local use; 255=Missing","UC_NONE" +40,"","Snow level","m","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_190.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_190.csv new file mode 100644 index 00000000..807b1e39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_190.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Arbitrary text string","CCITTIA5","UC_NONE" +1,"","Reserved","","UC_NONE" +2,"","Reserved","","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_191.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_191.csv new file mode 100644 index 00000000..3a3cf17f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_191.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"TSEC","Seconds prior to initial reference time (defined in Section 1)","s","UC_NONE" +1,"GEOLAT","Geographical Latitude","deg N","UC_NONE" +2,"GEOLON","Geographical Longitude","deg E","UC_NONE" +3,"DSLOBS","Days Since Last Observation","d","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_2.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_2.csv new file mode 100644 index 00000000..1aad6efe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_2.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"WDIR","Wind direction (from which blowing)","deg true","UC_NONE" +1,"WIND","Wind speed","m/s","UC_MS2Knots" +2,"UGRD","u-component of wind","m/s","UC_NONE" +3,"VGRD","v-component of wind","m/s","UC_NONE" +4,"STRM","Stream function","(m^2)/s","UC_NONE" +5,"VPOT","Velocity potential","(m^2)/s","UC_NONE" +6,"MNTSF","Montgomery stream function","(m^2)/(s^2)","UC_NONE" +7,"SGCVV","Sigma coordinate vertical velocity","1/s","UC_NONE" +8,"VVEL","Vertical velocity (pressure)","Pa/s","UC_NONE" +9,"DZDT","Vertical velocity (geometric)","m/s","UC_NONE" +10,"ABSV","Absolute vorticity","1/s","UC_NONE" +11,"ABSD","Absolute divergence","1/s","UC_NONE" +12,"RELV","Relative vorticity","1/s","UC_NONE" +13,"RELD","Relative divergence","1/s","UC_NONE" +14,"PVORT","Potential vorticity","K(m^2)/(kg s)","UC_NONE" +15,"VUCSH","Vertical u-component shear","1/s","UC_NONE" +16,"VVCSH","Vertical v-component shear","1/s","UC_NONE" +17,"UFLX","Momentum flux; u component","N/(m^2)","UC_NONE" +18,"VFLX","Momentum flux; v component","N/(m^2)","UC_NONE" +19,"WMIXE","Wind mixing energy","J","UC_NONE" +20,"BLYDP","Boundary layer dissipation","W/(m^2)","UC_NONE" +21,"MAXGUST","Maximum wind speed","m/s","UC_NONE" +22,"GUST","Wind speed (gust)","m/s","UC_MS2Knots" +23,"UGUST","u-component of wind (gust)","m/s","UC_NONE" +24,"VGUST","v-component of wind (gust)","m/s","UC_NONE" +25,"VWSH","Vertical speed shear","1/s","UC_NONE" +26,"MFLX","Horizontal momentum flux","N/(m^2)","UC_NONE" +27,"USTM","U-component storm motion","m/s","UC_NONE" +28,"VSTM","V-component storm motion","m/s","UC_NONE" +29,"CD","Drag coefficient","-","UC_NONE" +30,"FRICV","Frictional velocity","m/s","UC_NONE" +31,"TDCMOM","Turbulent Diffusion Coefficient for Momentum","(m^2)/s","UC_NONE" +32,"ETACVV","Eta Coordinate Vertical Velocity","1/s","UC_NONE" +33,"WINDF","Wind Fetch","m","UC_NONE" +34,"NWIND","Normal Wind Component","m/s","UC_NONE" +35,"TWIND","Tangential Wind Component","m/s","UC_NONE" +36,"AFRWE","Amplitude Function for Rossby Wave Envelope for Meridional Wind","m/s","UC_NONE" +37,"NTSS","Northward Turbulent Surface Stress","1/(m^2)","UC_NONE" +38,"ETSS","Eastward Turbulent Surface Stress","1/(m^2)","UC_NONE" +39,"EWTPARM","Eastward Wind Tendency Due to Parameterizations","m/(s^2)","UC_NONE" +40,"NWTPARM","Northward Wind Tendency Due to Parameterizations","m/(s^2)","UC_NONE" +41,"UGWIND","U-Component of Geostrophic Wind","m/s","UC_NONE" +42,"VGWIND","V-Component of Geostrophic Wind","m/s","UC_NONE" +43,"GEOWD","Geostrophic Wind Direction","deg true","UC_NONE" +44,"GEOWS","Geostrophic Wind Speed","m/s","UC_NONE" +45,"UNDIV","Unbalanced Component of Divergence","1/s","UC_NONE" +46,"VORTADV","Vorticity Advection","1/(s^2)","UC_NONE" +47,"","Surface roughness for heat","m","UC_NONE" +48,"","Surface roughness for moisture","m","UC_NONE" +49,"","Wind stress","N m-2","UC_NONE" +50,"","Eastward wind stress","N m-2","UC_NONE" +51,"","Northward wind stress","N m-2","UC_NONE" +52,"","u-component of wind stress","N m-2","UC_NONE" +53,"","v-component of wind stress","N m-2","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_20.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_20.csv new file mode 100644 index 00000000..65c7d428 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_20.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"MASSDEN","Mass Density (Concentration)","kg/(m^3)","UC_NONE" +1,"COLMD","Column-Integrated Mass Density","kg/(m^2)","UC_NONE" +2,"MASSMR","Mass Mixing Ratio (Mass Fraction in Air)","kg/kg","UC_NONE" +3,"AEMFLX","Atmosphere Emission Mass Flux","kg/(m^2*s)","UC_NONE" +4,"ANPMFLX","Atmosphere Net Production Mass Flux","kg/(m^2*s)","UC_NONE" +5,"ANPEMFLX","Atmosphere Net Production and Emission Mass Flux","kg/(m^2*s)","UC_NONE" +6,"SDDMFLX","Surface Dry Deposition Mass Flux","kg/(m^2*s)","UC_NONE" +7,"SWDMFLX","Surface Wet Deposition Mass Flux","kg/(m^2*s)","UC_NONE" +8,"AREMFLX","Atmosphere Re-Emission Mass Flux","kg/(m^2*s)","UC_NONE" +9,"WLSMFLX","Wet Deposition by Large-Scale Precipitation Mass Flux","kg/(m^2*s)","UC_NONE" +10,"WDCPMFLX","Wet Deposition by Convective Precipitation Mass Flux","kg/(m^2*s)","UC_NONE" +11,"SEDMFLX","Sedimentation Mass Flux","kg/(m^2*s)","UC_NONE" +12,"DDMFLX","Dry Deposition Mass Flux","kg/(m^2*s)","UC_NONE" +13,"TRANHH","Transfer From Hydrophobic to Hydrophilic","kg(kg*s)","UC_NONE" +14,"TRSDS","Transfer From SO2 (Sulphur Dioxide) to SO4 (Sulphate)","kg(kg*s)","UC_NONE" +15,"DDVEL","Dry deposition velocity","m/s","UC_NONE" +16,"MSSRDRYA","Mass mixing ratio with respect to dry air","kg/kg","UC_NONE" +17,"MSSRWETA","Mass mixing ratio with respect to wet air","kg/kg","UC_NONE" +18,"","Potential of hydrogen (pH)","pH","UC_NONE" +19,"","Reserved","-","UC_NONE" +20,"","Reserved","-","UC_NONE" +21,"","Reserved","-","UC_NONE" +22,"","Reserved","-","UC_NONE" +23,"","Reserved","-","UC_NONE" +24,"","Reserved","-","UC_NONE" +25,"","Reserved","-","UC_NONE" +26,"","Reserved","-","UC_NONE" +27,"","Reserved","-","UC_NONE" +28,"","Reserved","-","UC_NONE" +29,"","Reserved","-","UC_NONE" +30,"","Reserved","-","UC_NONE" +31,"","Reserved","-","UC_NONE" +32,"","Reserved","-","UC_NONE" +33,"","Reserved","-","UC_NONE" +34,"","Reserved","-","UC_NONE" +35,"","Reserved","-","UC_NONE" +36,"","Reserved","-","UC_NONE" +37,"","Reserved","-","UC_NONE" +38,"","Reserved","-","UC_NONE" +39,"","Reserved","-","UC_NONE" +40,"","Reserved","-","UC_NONE" +41,"","Reserved","-","UC_NONE" +42,"","Reserved","-","UC_NONE" +43,"","Reserved","-","UC_NONE" +44,"","Reserved","-","UC_NONE" +45,"","Reserved","-","UC_NONE" +46,"","Reserved","-","UC_NONE" +47,"","Reserved","-","UC_NONE" +48,"","Reserved","-","UC_NONE" +49,"","Reserved","-","UC_NONE" +50,"AIA","Amount in Atmosphere","mol","UC_NONE" +51,"CONAIR","Concentration in Air","mol/(m^3)","UC_NONE" +52,"VMXR","Volume Mixing Ratio (Fraction in Air)","mol/mol","UC_NONE" +53,"CGPRC","Chemical Gross Production Rate of Concentration","mol/(m^3*s)","UC_NONE" +54,"CGDRC","Chemical Gross Destruction Rate of Concentration","mol/(m^3*s)","UC_NONE" +55,"SFLUX","Surface Flux","mol/(m^2*s)","UC_NONE" +56,"COAIA","Changes of Amount in Atmosphere","mol/s","UC_NONE" +57,"TYABA","Total Yearly Average Burden of the Atmosphere","mol","UC_NONE" +58,"TYAAL","Total Yearly Average Atmospheric Loss","mol/s","UC_NONE" +59,"ANCON","Aerosol Number Concentration","1/(m^3)","UC_NONE" +60,"ASNCON","Aerosol Specific Number Concentration","1/kg","UC_NONE" +61,"MXMASSD","Maximum of Mass Density","kg(/m^3)","UC_NONE" +62,"HGTMD","Height of Mass Density","m","UC_NONE" +63,"CAVEMDL","Column-Averaged Mass Density in Layer","kg/(m^3)","UC_NONE" +64,"MOLRDRYA","Mole fraction with respect to dry air","mol/mol","UC_NONE" +65,"MOLRWETA","Mole fraction with respect to dry air","mol/mol","UC_NONE" +66,"CINCLDSP","Column-integrated in-cloud scavenging rate by precipitation","kg/(m^2 s)","UC_NONE" +67,"CBLCLDSP","Column-integrated below-cloud scavenging rate by precipitation","kg/(m^2 s)","UC_NONE" +68,"CIRELREP","Column-integrated release rate from evaporating precipitation","kg/(m^2 s)","UC_NONE" +69,"CINCSLSP","Column-integrated in-cloud scavenging rate by large-scale precipitation","kg/(m^2 s)","UC_NONE" +70,"CBECSLSP","Column-integrated below-cloud scavenging rate by large-scale precipitation","kg/(m^2 s)","UC_NONE" +71,"CRERELSP","Column-integrated release rate from evaporating large-scale precipitation","kg/(m^2 s)-","UC_NONE" +72,"CINCSRCP","Column-integrated in-cloud scavenging rate by convective precipitation","kg/(m^2 s)","UC_NONE" +73,"CBLCSRCP","Column-integrated below-cloud scavenging rate by convective precipitation","kg/(m^2 s)","UC_NONE" +74,"CIRERECP","Column-integrated release rate from evaporating convective precipitation","kg/(m^2 s)","UC_NONE" +75,"WFIREFLX","Wildfire flux","kg/(m^2 s)","UC_NONE" +76,"","Emission rate","kg kg-1 s-1","UC_NONE" +77,"","Surface emission flux","kg m-2 s-1","UC_NONE" +78,"","Column integrated eastward mass flux","kg m-1 s-1","UC_NONE" +79,"","Column integrated northward mass flux","kg m-1 s-1","UC_NONE" +80,"","Column integrated divergence of mass flux","kg m-2 s-1","UC_NONE" +81,"","Column integrated net source","kg m-2 s-1","UC_NONE" +82,"","Reserved","-","UC_NONE" +83,"","Reserved","-","UC_NONE" +84,"","Reserved","-","UC_NONE" +85,"","Reserved","-","UC_NONE" +86,"","Reserved","-","UC_NONE" +87,"","Reserved","-","UC_NONE" +88,"","Reserved","-","UC_NONE" +89,"","Reserved","-","UC_NONE" +90,"","Reserved","-","UC_NONE" +91,"","Reserved","-","UC_NONE" +92,"","Reserved","-","UC_NONE" +93,"","Reserved","-","UC_NONE" +94,"","Reserved","-","UC_NONE" +95,"","Reserved","-","UC_NONE" +96,"","Reserved","-","UC_NONE" +97,"","Reserved","-","UC_NONE" +98,"","Reserved","-","UC_NONE" +99,"","Reserved","-","UC_NONE" +100,"SADEN","Surface Area Density (Aerosol)","1/m","UC_NONE" +101,"ATMTK","Vertical Visual Range","m","UC_NONE" +102,"AOTK","Atmosphere Optical Thickness","Numeric","UC_NONE" +103,"SSALBK","Single Scattering Albedo","Numeric","UC_NONE" +104,"ASYSFK","Asymmetry Factor","Numeric","UC_NONE" +105,"AECOEF","Aerosol Extinction Coefficient","1/m","UC_NONE" +106,"AACOEF","Aerosol Absorption Coefficient","1/m","UC_NONE" +107,"ALBSAT","Aerosol Lidar Backscatter from Satellite","1/(m*sr)","UC_NONE" +108,"ALBGRD","Aerosol Lidar Backscatter from the Ground","1/(m*sr)","UC_NONE" +109,"ALESAT","Aerosol Lidar Extinction from Satellite","1/m","UC_NONE" +110,"ALEGRD","Aerosol Lidar Extinction from the Ground","1/m","UC_NONE" +111,"ANGSTEXP","Angstrom Exponent","Numeric","UC_NONE" +112,"SCTAOTK","Scattering Aerosol Optical Thickness","Numeric","UC_NONE" +113,"","Reserved","-","UC_NONE" +114,"","Reserved","-","UC_NONE" +115,"","Reserved","-","UC_NONE" +116,"","Reserved","-","UC_NONE" +117,"","Reserved","-","UC_NONE" +118,"","Reserved","-","UC_NONE" +119,"","Reserved","-","UC_NONE" +120,"","Reserved","-","UC_NONE" +121,"","Reserved","-","UC_NONE" +122,"","Reserved","-","UC_NONE" +123,"","Reserved","-","UC_NONE" +124,"","Reserved","-","UC_NONE" +125,"","Reserved","-","UC_NONE" +126,"","Reserved","-","UC_NONE" +127,"","Reserved","-","UC_NONE" +128,"","Reserved","-","UC_NONE" +129,"","Reserved","-","UC_NONE" +130,"","Reserved","-","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_21.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_21.csv new file mode 100644 index 00000000..02d627f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_21.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Column integrated potential + internal energy","J m-2","UC_NONE" +1,"","Column integrated kinetic energy","J m-2","UC_NONE" +2,"","Column integrated total energy","J m-2","UC_NONE" +3,"","Column integrated enthalpy","J m-2","UC_NONE" +4,"","Column integrated water enthalpy","J m-2","UC_NONE" +5,"","Column integrated eastward enthalpy flux","W m-1","UC_NONE" +6,"","Column integrated northward enthalpy flux","W m-1","UC_NONE" +7,"","Column integrated eastward potential energy flux","W m-1","UC_NONE" +8,"","Column integrated northward potential energy flux","W m-1","UC_NONE" +9,"","Column integrated eastward kinetic energy flux","W m-1","UC_NONE" +10,"","Column integrated northward kinetic energy flux","W m-1","UC_NONE" +11,"","Column integrated eastward total energy flux","W m-1","UC_NONE" +12,"","Column integrated northward total energy flux","W m-1","UC_NONE" +13,"","Divergence of column integrated enthalpy flux","W m-2","UC_NONE" +14,"","Divergence of column integrated potential energy flux","W m-2","UC_NONE" +15,"","Divergence of column integrated water potential energy flux","W m-2","UC_NONE" +16,"","Divergence of column integrated kinetic energy flux","W m-2","UC_NONE" +17,"","Divergence of column integrated total energy flux","W m-2","UC_NONE" +18,"","Divergence of column integrated water enthalpy flux","W m-2","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_3.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_3.csv new file mode 100644 index 00000000..4cd4def6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_3.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"PRES","Pressure","Pa","UC_NONE" +1,"PRMSL","Pressure reduced to MSL","Pa","UC_NONE" +2,"PTEND","Pressure tendency","Pa/s","UC_NONE" +3,"ICAHT","ICAO Standard Atmosphere Reference Height","m","UC_NONE" +4,"GP","Geopotential","(m^2)/(s^2)","UC_NONE" +5,"HGT","Geopotential height","gpm","UC_NONE" +6,"DIST","Geometric height","m","UC_M2Feet" +7,"HSTDV","Standard deviation of height","m","UC_NONE" +8,"PRESA","Pressure anomaly","Pa","UC_NONE" +9,"GPA","Geopotential height anomaly","gpm","UC_NONE" +10,"DEN","Density","kg/(m^3)","UC_NONE" +11,"ALTS","Altimeter setting","Pa","UC_NONE" +12,"THICK","Thickness","m","UC_NONE" +13,"PRESALT","Pressure altitude","m","UC_NONE" +14,"DENALT","Density altitude","m","UC_NONE" +15,"5WAVH","5-wave geopotential height","gpm","UC_NONE" +16,"U-GWD","Zonal flux of gravity wave stress","N/(m^2)","UC_NONE" +17,"V-GWD","Meridional flux of gravity wave stress","N/(m^2)","UC_NONE" +18,"HPBL","Planetary boundary layer height","m","UC_NONE" +19,"5WAVA","5-wave geopotential height anomaly","gpm","UC_NONE" +20,"SDSGSO","Standard deviation of sub-grid scale orography","m","UC_NONE" +21,"AOSGSO","Angle of sub-gridscale orography","rad","UC_NONE" +22,"SSGSO","Slope of sub-gridscale orography","Numeric","UC_NONE" +23,"GSGSO","Gravity wave dissipation","W/m^2","UC_NONE" +24,"ASGSO","Anisotrophy of sub-gridscale orography","Numeric","UC_NONE" +25,"NLPRES","Natural Logarithm of Pressure in Pa","Numeric","UC_NONE" +26,"EXPRES","Exner Pressure","Numeric","UC_NONE" +27,"UMFLX","Updraught Mass Flux","kg/(m^2*s)","UC_NONE" +28,"DMFLX","Downdraught Mass Flux","kg/(m^2*s)","UC_NONE" +29,"UDRATE","Updraught Detrainment Rate","kg/(m^3*s)","UC_NONE" +30,"DDRATE","Downdraught Detrainment Rate","kg/(m^3*s)","UC_NONE" +31,"UCLSPRS","Unbalanced Component of Logarithm of Surface Pressure","","UC_NONE" +32,"","Saturation water vapour pressure","Pa","UC_NONE" +33,"","Geometric altitude above mean sea level","m","UC_NONE" +34,"","Geometric height above ground level","m","UC_NONE" +35,"","Column integrated divergence of total mass flux","kg m-2 s-1","UC_NONE" +36,"","Column integrated eastward total mass flux","kg m-1 s-1","UC_NONE" +37,"","Column integrated northward total mass flux","kg m-1 s-1","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_4.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_4.csv new file mode 100644 index 00000000..065de288 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_4.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"NSWRS","Net short-wave radiation flux (surface)","W/(m^2)","UC_NONE" +1,"NSWRT","Net short-wave radiation flux (top of atmosphere)","W/(m^2)","UC_NONE" +2,"SWAVR","Short wave radiation flux","W/(m^2)","UC_NONE" +3,"GRAD","Global radiation flux","W/(m^2)","UC_NONE" +4,"BRTMP","Brightness temperature","K","UC_NONE" +5,"LWRAD","Radiance (with respect to wave number)","W/(m sr)","UC_NONE" +6,"SWRAD","Radiance (with respect to wave length)","W/(m^3 sr)","UC_NONE" +7,"DSWRF","Downward short-wave radiation flux","W/(m^2)","UC_NONE" +8,"USWRF","Upward short-wave radiation flux","W/(m^2)","UC_NONE" +9,"NSWRF","Net short wave radiation flux","W/(m^2)","UC_NONE" +10,"PHOTAR","Photosynthetically active radiation","W/(m^2)","UC_NONE" +11,"NSWRFCS","Net short-wave radiation flux; clear sky","W/(m^2)","UC_NONE" +12,"DWUVR","Downward UV radiation","W/(m^2)","UC_NONE" +13,"DSWRFLX","Direct Short Wave Radiation Flux","W/(m^2)","UC_NONE" +14,"DIFSWRF","Diffuse Short Wave Radiation Flux","W/(m^2)","UC_NONE" +15,"","Upward UV radiation emitted/reflected from the Earth's surface","W m-2","UC_NONE" +16,"","Reserved","-","UC_NONE" +17,"","Reserved","-","UC_NONE" +18,"","Reserved","-","UC_NONE" +19,"","Reserved","-","UC_NONE" +20,"","Reserved","-","UC_NONE" +21,"","Reserved","-","UC_NONE" +22,"","Reserved","-","UC_NONE" +23,"","Reserved","-","UC_NONE" +24,"","Reserved","-","UC_NONE" +25,"","Reserved","-","UC_NONE" +26,"","Reserved","-","UC_NONE" +27,"","Reserved","-","UC_NONE" +28,"","Reserved","-","UC_NONE" +29,"","Reserved","-","UC_NONE" +30,"","Reserved","-","UC_NONE" +31,"","Reserved","-","UC_NONE" +32,"","Reserved","-","UC_NONE" +33,"","Reserved","-","UC_NONE" +34,"","Reserved","-","UC_NONE" +35,"","Reserved","-","UC_NONE" +36,"","Reserved","-","UC_NONE" +37,"","Reserved","-","UC_NONE" +38,"","Reserved","-","UC_NONE" +39,"","Reserved","-","UC_NONE" +40,"","Reserved","-","UC_NONE" +41,"","Reserved","-","UC_NONE" +42,"","Reserved","-","UC_NONE" +43,"","Reserved","-","UC_NONE" +44,"","Reserved","-","UC_NONE" +45,"","Reserved","-","UC_NONE" +46,"","Reserved","-","UC_NONE" +47,"","Reserved","-","UC_NONE" +48,"","Reserved","-","UC_NONE" +49,"","Reserved","-","UC_NONE" +50,"UVIUCS","UV index (under clear sky)","Numeric","UC_NONE" +51,"UVI","UV index","W/(m^2)","UC_UVIndex" +52,"DSWRFCS","Downward Short-Wave Radiation Flux, Clear Sky","W/(m^2)","UC_NONE" +53,"USWRFCS","Upward Short-Wave Radiation Flux, Clear Sky","W/(m^2)","UC_NONE" +54,"","Direct normal short-wave radiation flux","W m-2","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_5.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_5.csv new file mode 100644 index 00000000..198829cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_5.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"NLWRS","Net long wave radiation flux (surface)","W/(m^2)","UC_NONE" +1,"NLWRT","Net long wave radiation flux (top of atmosphere)","W/(m^2)","UC_NONE" +2,"LWAVR","Long wave radiation flux","W/(m^2)","UC_NONE" +3,"DLWRF","Downward long-wave radiation flux","W/(m^2)","UC_NONE" +4,"ULWRF","Upward long-wave radiation flux","W/(m^2)","UC_NONE" +5,"NLWRF","Net long wave radiation flux","W/(m^2)","UC_NONE" +6,"NLWRCS","Net long-wave radiation flux; clear sky","W/(m^2)","UC_NONE" +7,"BRTEMP","Brightness Temperature","K","UC_K2F" +8,"DLWRFCS","Downward Long-Wave Radiation Flux, Clear Sky","W/(m^2)","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_6.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_6.csv new file mode 100644 index 00000000..f02c05ad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_6.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"CICE","Cloud Ice","kg/(m^2)","UC_NONE" +1,"TCDC","Total cloud cover","%","UC_NONE" +2,"CDCON","Convective cloud cover","%","UC_NONE" +3,"LCDC","Low cloud cover","%","UC_NONE" +4,"MCDC","Medium cloud cover","%","UC_NONE" +5,"HCDC","High cloud cover","%","UC_NONE" +6,"CWAT","Cloud water","kg/(m^2)","UC_NONE" +7,"CDCA","Cloud amount","%","UC_NONE" +8,"CDCT","Cloud type","0=Clear; 1=Cumulonimbus; 2=Stratus; 3=Stratocumulus; 4=Cumulus; 5=Altostratus; 6=Nimbostratus; 7=Altocumulus; 8=Cirrostratus; 9=Cirrocumulus; 10=Cirrus; 11=Cumulonimbus - ground-based fog beneath the lowest layer; 12=Stratus - ground-based fog beneath the lowest layer; 13=Stratocumulus - ground-based fog beneath the lowest layer; 14=Cumulus - ground-based fog beneath the lowest layer; 15=Altostratus - ground-based fog beneath the lowest layer; 16=Nimbostratus - ground-based fog beneath the lowest layer; 17=Altocumulus - ground-based fog beneath the lowest layer; 18=Cirrostratus - ground-based fog beneath the lowest layer; 19=Cirrocumulus - ground-based fog beneath the lowest layer; 20=Cirrus - ground-based fog beneath the lowest layer; 21-190=Reserved; 191=Unknown; 192-254=Reserved for local use; 255=Missing","UC_NONE" +9,"TMAXT","Thunderstorm maximum tops","m","UC_NONE" +10,"THUNC","Thunderstorm coverage","0=None; 1=Isolated (1-2%); 2=Few (3-5%); 3=Scattered (6-45%); 4=Numerous (> 45%); 5-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +11,"CDCB","Cloud base","m","UC_M2Feet" +12,"CDCT","Cloud top","m","UC_M2Feet" +13,"CEIL","Ceiling","m","UC_M2Feet" +14,"CDLYR","Non-convective cloud cover","%","UC_NONE" +15,"CWORK","Cloud work function","J/kg","UC_NONE" +16,"CUEFI","Convective cloud efficiency","-","UC_NONE" +17,"TCOND","Total condensate","kg/kg","UC_NONE" +18,"TCOLW","Total column-integrated cloud water","kg/(m^2)","UC_NONE" +19,"TCOLI","Total column-integrated cloud ice","kg/(m^2)","UC_NONE" +20,"TCOLC","Total column-integrated condensate","kg/(m^2)","UC_NONE" +21,"FICE","Ice fraction of total condensate","-","UC_NONE" +22,"CDCC","Cloud cover","%","UC_NONE" +23,"CDCIMR","Cloud ice mixing ratio","kg/kg","UC_NONE" +24,"SUNS","Sunshine","Numeric","UC_NONE" +25,"CBHE","Horizontal extent of cumulonimbus (CB)","%","UC_NONE" +26,"HCONCB","Height of Convective Cloud Base","m","UC_NONE" +27,"HCONCT","Height of Convective Cloud Top","m","UC_NONE" +28,"NCONCD","Number Concentration of Cloud Droplets","1/kg","UC_NONE" +29,"NCCICE","Number Concentration of Cloud Ice","1/kg","UC_NONE" +30,"NDENCD","Number Density of Cloud Droplets","1/(m^3)","UC_NONE" +31,"NDCICE","Number Density of Cloud Ice","1/(m^3)","UC_NONE" +32,"FRACCC","Fraction of Cloud Cover","Numeric","UC_NONE" +33,"SUNSD","SunShine Duration","s","UC_NONE" +34,"SLWTC","Surface Long Wave Effective Total Cloudiness","Numeric","UC_NONE" +35,"SSWTC","Surface Short Wave Effective Total Cloudiness","Numeric","UC_NONE" +36,"FSTRPC","Fraction of Stratiform Precipitation Cover","Proportion","UC_NONE" +37,"FCONPC","Fraction of Convective Precipitation Cover","Proportion","UC_NONE" +38,"MASSDCD","Mass Density of Cloud Droplets","kg/(m^3)","UC_NONE" +39,"MASSDCI","Mass Density of Cloud Ice","kg/(m^3)","UC_NONE" +40,"MDCCWD","Mass Density of Convective Cloud Water Droplets","kg/(m^3)","UC_NONE" +41,"","Reserved","-","UC_NONE" +42,"","Reserved","-","UC_NONE" +43,"","Reserved","-","UC_NONE" +44,"","Reserved","-","UC_NONE" +45,"","Reserved","-","UC_NONE" +46,"","Reserved","-","UC_NONE" +47,"VFRCWD","Volume Fraction of Cloud Water Droplets","Numeric","UC_NONE" +48,"VFRCICE","Volume Fraction of Cloud Ice Particles","Numeric","UC_NONE" +49,"VFRCIW","Volume Fraction of Cloud (Ice and/or Water)","Numeric","UC_NONE" +50,"","Fog","%","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_7.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_7.csv new file mode 100644 index 00000000..c1793cac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_0_7.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"PLI","Parcel lifted index (to 500 hPa)","K","UC_NONE" +1,"BLI","Best lifted index (to 500 hPa)","K","UC_NONE" +2,"KX","K index","K","UC_NONE" +3,"KOX","KO index","K","UC_NONE" +4,"TOTALX","Total totals index","K","UC_NONE" +5,"SX","Sweat index","numeric","UC_NONE" +6,"CAPE","Convective available potential energy","J/kg","UC_NONE" +7,"CIN","Convective inhibition","J/kg","UC_NONE" +8,"HLCY","Storm relative helicity","J/kg","UC_NONE" +9,"EHLX","Energy helicity index","numeric","UC_NONE" +10,"LFTX","Surface lifted index","K","UC_NONE" +11,"4LFTX","Best (4-layer) lifted index","K","UC_NONE" +12,"RI","Richardson number","-","UC_NONE" +13,"SHWINX","Showalter Index","K","UC_NONE" +14,"","Reserved","-","UC_NONE" +15,"UPHL","Updraft Helicity","m^2/s^2","UC_NONE" +16,"BLKRN","Bulk Richardson Number","-","UC_NONE" +17,"GRDRN","Gradient Richardson Number","-","UC_NONE" +18,"FLXRN","Flux Richardson Number","-","UC_NONE" +19,"CONAPES","Convective Available Potential Energy Shear","m^2/s^2","UC_NONE" +20,"","Thunderstorm intensity index","0=No thunderstorm occurrence; 1=Weak thunderstorm; 2=Moderate thunderstorm; 3=Severe thunderstorm; 4-254=Reserved; 255=Missing","UC_NONE" +21,"-","Reserved","-","UC_NONE" +22,"-","Reserved","-","UC_NONE" +23,"-","Reserved","-","UC_NONE" +24,"-","Reserved","-","UC_NONE" +25,"-","Reserved","-","UC_NONE" +26,"-","Reserved","-","UC_NONE" +27,"-","Reserved","-","UC_NONE" +28,"-","Reserved","-","UC_NONE" +29,"-","Reserved","-","UC_NONE" +30,"-","Reserved","-","UC_NONE" +31,"-","Reserved","-","UC_NONE" +32,"-","Reserved","-","UC_NONE" +33,"-","Reserved","-","UC_NONE" +34,"-","Reserved","-","UC_NONE" +35,"-","Reserved","-","UC_NONE" +36,"-","Reserved","-","UC_NONE" +37,"-","Reserved","-","UC_NONE" +38,"-","Reserved","-","UC_NONE" +39,"-","Reserved","-","UC_NONE" +40,"-","Reserved","-","UC_NONE" +41,"-","Reserved","-","UC_NONE" +42,"-","Reserved","-","UC_NONE" +43,"-","Reserved","-","UC_NONE" +44,"-","Reserved","-","UC_NONE" +45,"-","Reserved","-","UC_NONE" +46,"-","Reserved","-","UC_NONE" +47,"-","Reserved","-","UC_NONE" +48,"-","Reserved","-","UC_NONE" +49,"-","Reserved","-","UC_NONE" +50,"-","Reserved","-","UC_NONE" +51,"-","Reserved","-","UC_NONE" +52,"-","Reserved","-","UC_NONE" +53,"-","Reserved","-","UC_NONE" +54,"-","Reserved","-","UC_NONE" +55,"-","Reserved","-","UC_NONE" +56,"-","Reserved","-","UC_NONE" +57,"-","Reserved","-","UC_NONE" +58,"-","Reserved","-","UC_NONE" +59,"-","Reserved","-","UC_NONE" +60,"-","Reserved","-","UC_NONE" +61,"-","Reserved","-","UC_NONE" +62,"-","Reserved","-","UC_NONE" +63,"-","Reserved","-","UC_NONE" +64,"-","Reserved","-","UC_NONE" +65,"-","Reserved","-","UC_NONE" +66,"-","Reserved","-","UC_NONE" +67,"-","Reserved","-","UC_NONE" +68,"-","Reserved","-","UC_NONE" +69,"-","Reserved","-","UC_NONE" +70,"-","Reserved","-","UC_NONE" +71,"-","Reserved","-","UC_NONE" +72,"-","Reserved","-","UC_NONE" +73,"-","Reserved","-","UC_NONE" +74,"-","Reserved","-","UC_NONE" +75,"-","Reserved","-","UC_NONE" +76,"-","Reserved","-","UC_NONE" +77,"-","Reserved","-","UC_NONE" +78,"-","Reserved","-","UC_NONE" +79,"-","Reserved","-","UC_NONE" +80,"-","Reserved","-","UC_NONE" +81,"-","Reserved","-","UC_NONE" +82,"-","Reserved","-","UC_NONE" +83,"-","Reserved","-","UC_NONE" +84,"-","Reserved","-","UC_NONE" +85,"-","Reserved","-","UC_NONE" +86,"-","Reserved","-","UC_NONE" +87,"-","Reserved","-","UC_NONE" +88,"-","Reserved","-","UC_NONE" +89,"-","Reserved","-","UC_NONE" +90,"-","Reserved","-","UC_NONE" +91,"-","Reserved","-","UC_NONE" +92,"-","Reserved","-","UC_NONE" +93,"-","Reserved","-","UC_NONE" +94,"-","Reserved","-","UC_NONE" +95,"-","Reserved","-","UC_NONE" +96,"-","Reserved","-","UC_NONE" +97,"-","Reserved","-","UC_NONE" +98,"-","Reserved","-","UC_NONE" +99,"-","Reserved","-","UC_NONE" +100,"-","Reserved","-","UC_NONE" +101,"-","Reserved","-","UC_NONE" +102,"-","Reserved","-","UC_NONE" +103,"-","Reserved","-","UC_NONE" +104,"-","Reserved","-","UC_NONE" +105,"-","Reserved","-","UC_NONE" +106,"-","Reserved","-","UC_NONE" +107,"-","Reserved","-","UC_NONE" +108,"-","Reserved","-","UC_NONE" +109,"-","Reserved","-","UC_NONE" +110,"-","Reserved","-","UC_NONE" +111,"-","Reserved","-","UC_NONE" +112,"-","Reserved","-","UC_NONE" +113,"-","Reserved","-","UC_NONE" +114,"-","Reserved","-","UC_NONE" +115,"-","Reserved","-","UC_NONE" +116,"-","Reserved","-","UC_NONE" +117,"-","Reserved","-","UC_NONE" +118,"-","Reserved","-","UC_NONE" +119,"-","Reserved","-","UC_NONE" +120,"-","Reserved","-","UC_NONE" +121,"-","Reserved","-","UC_NONE" +122,"-","Reserved","-","UC_NONE" +123,"-","Reserved","-","UC_NONE" +124,"-","Reserved","-","UC_NONE" +125,"-","Reserved","-","UC_NONE" +126,"-","Reserved","-","UC_NONE" +127,"-","Reserved","-","UC_NONE" +128,"-","Reserved","-","UC_NONE" +129,"-","Reserved","-","UC_NONE" +130,"-","Reserved","-","UC_NONE" +131,"-","Reserved","-","UC_NONE" +132,"-","Reserved","-","UC_NONE" +133,"-","Reserved","-","UC_NONE" +134,"-","Reserved","-","UC_NONE" +135,"-","Reserved","-","UC_NONE" +136,"-","Reserved","-","UC_NONE" +137,"-","Reserved","-","UC_NONE" +138,"-","Reserved","-","UC_NONE" +139,"-","Reserved","-","UC_NONE" +140,"-","Reserved","-","UC_NONE" +141,"-","Reserved","-","UC_NONE" +142,"-","Reserved","-","UC_NONE" +143,"-","Reserved","-","UC_NONE" +144,"-","Reserved","-","UC_NONE" +145,"-","Reserved","-","UC_NONE" +146,"-","Reserved","-","UC_NONE" +147,"-","Reserved","-","UC_NONE" +148,"-","Reserved","-","UC_NONE" +149,"-","Reserved","-","UC_NONE" +150,"-","Reserved","-","UC_NONE" +151,"-","Reserved","-","UC_NONE" +152,"-","Reserved","-","UC_NONE" +153,"-","Reserved","-","UC_NONE" +154,"-","Reserved","-","UC_NONE" +155,"-","Reserved","-","UC_NONE" +156,"-","Reserved","-","UC_NONE" +157,"-","Reserved","-","UC_NONE" +158,"-","Reserved","-","UC_NONE" +159,"-","Reserved","-","UC_NONE" +160,"-","Reserved","-","UC_NONE" +161,"-","Reserved","-","UC_NONE" +162,"-","Reserved","-","UC_NONE" +163,"-","Reserved","-","UC_NONE" +164,"-","Reserved","-","UC_NONE" +165,"-","Reserved","-","UC_NONE" +166,"-","Reserved","-","UC_NONE" +167,"-","Reserved","-","UC_NONE" +168,"-","Reserved","-","UC_NONE" +169,"-","Reserved","-","UC_NONE" +170,"-","Reserved","-","UC_NONE" +171,"-","Reserved","-","UC_NONE" +172,"-","Reserved","-","UC_NONE" +173,"-","Reserved","-","UC_NONE" +174,"-","Reserved","-","UC_NONE" +175,"-","Reserved","-","UC_NONE" +176,"-","Reserved","-","UC_NONE" +177,"-","Reserved","-","UC_NONE" +178,"-","Reserved","-","UC_NONE" +179,"-","Reserved","-","UC_NONE" +180,"-","Reserved","-","UC_NONE" +181,"-","Reserved","-","UC_NONE" +182,"-","Reserved","-","UC_NONE" +183,"-","Reserved","-","UC_NONE" +184,"-","Reserved","-","UC_NONE" +185,"-","Reserved","-","UC_NONE" +186,"-","Reserved","-","UC_NONE" +187,"-","Reserved","-","UC_NONE" +188,"-","Reserved","-","UC_NONE" +189,"-","Reserved","-","UC_NONE" +190,"-","Reserved","-","UC_NONE" +191,"-","Reserved","-","UC_NONE" +192,"LFTX","Surface Lifted Index","K","UC_NONE" +193,"4LFTX","Best (4 layer) Lifted Index","K","UC_NONE" +194,"RI","Richardson Number","Numeric","UC_NONE" +195,"CWDI","Convective Weather Detection Index","-","UC_NONE" +196,"UVI","Ultra Violet Index","W/m^2","UC_NONE" +197,"UPHL","Updraft Helicity","m^2/s^2","UC_NONE" +198,"LAI","Leaf Area Index","Numeric","UC_NONE" +199,"MXUPHL","Hourly Maximum of Updraft Helicity over Layer 2km to 5 km AGL","m^2/s^2","UC_NONE" +200,"MNUPHL","Hourly Minimum of Updraft Helicity","m^2/s^2","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_0.csv new file mode 100644 index 00000000..31a83499 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"WVSP1","Wave spectra (1)","-","UC_NONE" +1,"WVSP2","Wave spectra (2)","-","UC_NONE" +2,"WVSP3","Wave spectra (3)","-","UC_NONE" +3,"HTSGW","Significant height of combined wind waves and swell","m","UC_M2Feet" +4,"WVDIR","Direction of wind waves","Degree true","UC_NONE" +5,"WVHGT","Significant height of wind waves","m","UC_M2Feet" +6,"WVPER","Mean period of wind waves","s","UC_NONE" +7,"SWDIR","Direction of swell waves","Degree true","UC_NONE" +8,"SWELL","Significant height of swell waves","m","UC_NONE" +9,"SWPER","Mean period of swell waves","s","UC_NONE" +10,"DIRPW","Primary wave direction","Degree true","UC_NONE" +11,"PERPW","Primary wave mean period","s","UC_NONE" +12,"DIRSW","Secondary wave direction","Degree true","UC_NONE" +13,"PERSW","Secondary wave mean period","s","UC_NONE" +14,"WWSDIR","Direction of Combined Wind Waves and Swell","Degree true","UC_NONE" +15,"MWSPER","Mean Period of Combined Wind Waves and Swell","s","UC_NONE" +16,"CDWW","Coefficient of Drag With Waves","-","UC_NONE" +17,"FRICV","Friction Velocity","m/s","UC_NONE" +18,"WSTR","Wave Stress","N/(m^2)","UC_NONE" +19,"NWSTR","Normalised Waves Stress","-","UC_NONE" +20,"MSSW","Mean Square Slope of Waves","-","UC_NONE" +21,"USSD","U-component Surface Stokes Drift","m/s","UC_NONE" +22,"VSSD","V-component Surface Stokes Drift","m/s","UC_NONE" +23,"PMAXWH","Period of Maximum Individual Wave Height","s","UC_NONE" +24,"MAXWH","Maximum Individual Wave Height","m","UC_NONE" +25,"IMWF","Inverse Mean Wave Frequency","s","UC_NONE" +26,"IMFWW","Inverse Mean Frequency of The Wind Waves","s","UC_NONE" +27,"IMFTSW","Inverse Mean Frequency of The Total Swell","s","UC_NONE" +28,"MZWPER","Mean Zero-Crossing Wave Period","s","UC_NONE" +29,"MZPWW","Mean Zero-Crossing Period of The Wind Waves","s","UC_NONE" +30,"MZPTSW","Mean Zero-Crossing Period of The Total Swell","s","UC_NONE" +31,"WDIRW","Wave Directional Width","-","UC_NONE" +32,"DIRWWW","Directional Width of The Wind Waves","-","UC_NONE" +33,"DIRWTS","Directional Width of The Total Swell","-","UC_NONE" +34,"PWPER","Peak Wave Period","s","UC_NONE" +35,"PPERWW","Peak Period of The Wind Waves","s","UC_NONE" +36,"PPERTS","Peak Period of The Total Swell","s","UC_NONE" +37,"ALTWH","Altimeter Wave Height","m","UC_NONE" +38,"ALCWH","Altimeter Corrected Wave Height","m","UC_NONE" +39,"ALRRC","Altimeter Range Relative Correction","-","UC_NONE" +40,"MNWSOW","10 Metre Neutral Wind Speed Over Waves","m/s","UC_NONE" +41,"MWDIRW","10 Metre Wind Direction Over Waves","Degree true","UC_NONE" +42,"WESP","Wave Energy Spectrum","s/((m^2)*rad)","UC_NONE" +43,"KSSEW","Kurtosis of The Sea Surface Elevation Due to Waves","-","UC_NONE" +44,"BENINX","Benjamin-Feir Index","-","UC_NONE" +45,"SPFTR","Spectral Peakedness Factor","1/s","UC_NONE" +46,"","Peak wave direction","deg","UC_NONE" +47,"","Significant wave height of first swell partition","m","UC_NONE" +48,"","Significant wave height of second swell partition","m","UC_NONE" +49,"","Significant wave height of third swell partition","m","UC_NONE" +50,"","Mean wave period of first swell partition","s","UC_NONE" +51,"","Mean wave period of second swell partition","s","UC_NONE" +52,"","Mean wave period of third swell partition","s","UC_NONE" +53,"","Mean wave direction of first swell partition","deg","UC_NONE" +54,"","Mean wave direction of second swell partition","deg","UC_NONE" +55,"","Mean wave direction of third swell partition","deg","UC_NONE" +56,"","Wave directional width of first swell partition","-","UC_NONE" +57,"","Wave directional width of second swell partition","-","UC_NONE" +58,"","Wave directional width of third swell partition","-","UC_NONE" +59,"","Wave frequency width of first swell partition","-","UC_NONE" +60,"","Wave frequency width of second swell partition","-","UC_NONE" +61,"","Wave frequency width of third swell partition","-","UC_NONE" +62,"","Wave frequency width","-","UC_NONE" +63,"","Frequency width of wind waves","-","UC_NONE" +64,"","Frequency width of total swell","-","UC_NONE" +65,"","Peak wave period of first swell partition","s","UC_NONE" +66,"","Peak wave period of second swell partition","s","UC_NONE" +67,"","Peak wave period of third swell partition","s","UC_NONE" +68,"","Peak wave direction of first swell partition","degree true","UC_NONE" +69,"","Peak wave direction of second swell partition","degree true","UC_NONE" +70,"","Peak wave direction of third swell partition","degree true","UC_NONE" +71,"","Peak direction of wind waves","degree true","UC_NONE" +72,"","Peak direction of total swell","degree true","UC_NONE" +73,"","Whitecap fraction","fraction","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_1.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_1.csv new file mode 100644 index 00000000..a98bd659 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_1.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"DIRC","Current direction","Degree true","UC_NONE" +1,"SPC","Current speed","m/s","UC_NONE" +2,"UOGRD","u-component of current","m/s","UC_NONE" +3,"VOGRD","v-component of current","m/s","UC_NONE" +4,"RIPCOP","Rip Current Occurrence Probability","%","UC_NONE" +5,"","Eastward current","m s-1","UC_NONE" +6,"","Northward current","m s-1","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_191.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_191.csv new file mode 100644 index 00000000..c44844d0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_191.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"TSEC","Seconds prior to initial reference time (defined in Section 1)","s","UC_NONE" +1,"MOSF","Meridonal Overturning Stream Function","m^3/s","UC_NONE" +2,"","Reserved","-","UC_NONE" +3,"DSLOBS","Days Since Last Observation","d","UC_NONE" +4,"","Barotropic stream function","m3 s-1","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_2.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_2.csv new file mode 100644 index 00000000..39f52d80 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_2.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"ICEC","Ice cover","Proportion","UC_NONE" +1,"ICETK","Ice thinkness","m","UC_NONE" +2,"DICED","Direction of ice drift","Degree true","UC_NONE" +3,"SICED","Speed of ice drift","m/s","UC_NONE" +4,"UICE","u-component of ice drift","m/s","UC_NONE" +5,"VICE","v-component of ice drift","m/s","UC_NONE" +6,"ICEG","Ice growth rate","m/s","UC_NONE" +7,"ICED","Ice divergence","1/s","UC_NONE" +8,"ICET","Ice temperature","K","UC_NONE" +9,"ICEPRS","Module of Ice Internal Pressure","Pa*m","UC_NONE" +10,"ZVCICEP","Zonal Vector Component of Vertically Integrated Ice Internal Pressure","Pa*m","UC_NONE" +11,"MVCICEP","Meridional Vector Component of Vertically Integrated Ice Internal Pressure","Pa*m","UC_NONE" +12,"CICES","Compressive Ice Strength","N/m","UC_NONE" +13,"","Snow temperature (over sea ice)","K","UC_NONE" +14,"","Albedo","Numeric","UC_NONE" +15,"","Sea ice volume per unit area","m3 m-2","UC_NONE" +16,"","Snow volume over sea ice per unit area","m3 m-2","UC_NONE" +17,"","Sea ice heat content","J m-2","UC_NONE" +18,"","Snow over sea ice heat content","J m-2","UC_NONE" +19,"","Ice freeboard thickness","m","UC_NONE" +20,"","Ice melt pond fraction","fraction","UC_NONE" +21,"","Ice melt pond depth","m","UC_NONE" +22,"","Ice melt pond volume per unit area","m3 m-2","UC_NONE" +23,"","Sea ice fraction tendency due to parameterization","s-1","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_3.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_3.csv new file mode 100644 index 00000000..702aab68 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_3.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"WTMP","Water temperature","K","UC_NONE" +1,"DSLM","Deviation of sea level from mean","m","UC_NONE" +2,"CH","Heat Exchange Coefficient","","UC_NONE" +3,"","Practical salinity","Numeric","UC_NONE" +4,"","Downward heat flux","W m-2","UC_NONE" +5,"","Eastward surface stress","N m-2","UC_NONE" +6,"","Northward surface stress","N m-2","UC_NONE" +7,"","x-component surface stress","N m-2","UC_NONE" +8,"","y-component surface stress","N m-2","UC_NONE" +9,"","Thermosteric change in sea surface height","m","UC_NONE" +10,"","Halosteric change in sea surface height","m","UC_NONE" +11,"","Steric change in sea surface height","m","UC_NONE" +12,"","Sea salt flux","kg m-2 s-1","UC_NONE" +13,"","Net upward water flux","kg m-2 s-1","UC_NONE" +14,"","Eastward surface water velocity","m s-1","UC_NONE" +15,"","Northward surface water velocity","m s-1","UC_NONE" +16,"","x-component of surface water velocity","m s-1","UC_NONE" +17,"","y-component of surface water velocity","m s-1","UC_NONE" +18,"","Heat flux correction","W m-2","UC_NONE" +19,"","Sea surface height tendency due to parameterization","m s-1","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_4.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_4.csv new file mode 100644 index 00000000..d661b864 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_10_4.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"MTHD","Main thermocline depth","m","UC_NONE" +1,"MTHA","Main thermocline anomaly","m","UC_NONE" +2,"TTHDP","Transient thermocline depth","m","UC_NONE" +3,"SALTY","Salinity","kg/kg","UC_NONE" +4,"OVHD","Ocean Vertical Heat Diffusivity","m^2/s","UC_NONE" +5,"OVSD","Ocean Vertical Salt Diffusivity","m^2/s","UC_NONE" +6,"OVMD","Ocean Vertical Momentum Diffusivity","m^2/s","UC_NONE" +7,"BATHY","Bathymetry","m","UC_NONE" +8,"","Reserved","-","UC_NONE" +9,"","Reserved","-","UC_NONE" +10,"","Reserved","-","UC_NONE" +11,"SFSALP","Shape Factor With Respect To Salinity Profile","","UC_NONE" +12,"SFTMPP","Shape Factor With Respect To Temperature Profile In Thermocline","","UC_NONE" +13,"ACWSRD","Attenuation Coefficient Of Water With Respect to Solar Radiation","1/m","UC_NONE" +14,"WDEPTH","Water Depth","m","UC_NONE" +15,"WTMPSS","Water Temperature","K","UC_NONE" +16,"","Water density (rho)","kg m-3","UC_NONE" +17,"","Water density anomaly (sigma)","kg m-3","UC_NONE" +18,"","Water potential temperature (theta)","K","UC_NONE" +19,"","Water potential density (rho theta)","kg m-3","UC_NONE" +20,"","Water potential density anomaly (sigma theta)","kg m-3","UC_NONE" +21,"","Practical salinity","Numeric","UC_NONE" +22,"","Water column-integrated heat content","J m-2","UC_NONE" +23,"","Eastward water velocity","m s-1","UC_NONE" +24,"","Northward water velocity","m s-1","UC_NONE" +25,"","x-component water velocity","m s-1","UC_NONE" +26,"","y-component water velocity","m s-1","UC_NONE" +27,"","Upward water velocity","m s-1","UC_NONE" +28,"","Vertical eddy diffusivity","m2 s-1","UC_NONE" +29,"","Bottom pressure equivalent height","m","UC_NONE" +30,"","Fresh water flux into sea water from rivers","kg m-2 s-1","UC_NONE" +31,"","Fresh water flux correction","kg m-2 s-1","UC_NONE" +32,"","Virtual salt flux into sea water","g kg-1 m-2 s-1","UC_NONE" +33,"","Virtual salt flux correction","g kg-1 m-2 s-1","UC_NONE" +34,"","Sea water temperature tendency due to Newtonian relaxation","K s-1","UC_NONE" +35,"","Sea water salinity tendency due to Newtonian relaxation","g kg-1 s-1","UC_NONE" +36,"","Sea water temperature tendency due to parameterization","K s-1","UC_NONE" +37,"","Sea water salinity tendency due to parameterization","g kg-1 s-1","UC_NONE" +38,"","Eastward sea water velocity tendency due to parameterization","m-2 s-1","UC_NONE" +39,"","Northward sea water velocity tendency due to parameterization","m-2 s-1","UC_NONE" +40,"","Sea water temperature tendency due to direct bias correction","K s-1","UC_NONE" +41,"","Sea water salinity tendency due to direct bias correction","g kg-1 s-1","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_0.csv new file mode 100644 index 00000000..97dfcee8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"FFLDG","Flash flood guidance","kg/(m^2)","UC_NONE" +1,"FFLDRO","Flash flood runoff","kg/(m^2)","UC_NONE" +2,"RSSC","Remotely sensed snow cover","0-49=Reserved; 50=No-snow/no-cloud; 51-99=Reserved; 100=Clouds; 101-249=Reserved; 250=Snow; 251-254=Reserved for local use; 255=Missing","UC_NONE" +3,"ESCT","Elevation of snow covered terrain","0-90=Elevation in increments of 100 m; 91-253=Reserved; 254=Clouds; 255=Missing","UC_NONE" +4,"SWEPON","Snow water equivalent percent of normal","%","UC_NONE" +5,"BGRUN","Baseflow-groundwater runoff","kg/(m^2)","UC_NONE" +6,"SSRUN","Storm surface runoff","kg/(m^2)","UC_NONE" +7,"","Discharge from rivers or streams","m3/s","UC_NONE" +8,"","Groundwater upper storage","kg m-2","UC_NONE" +9,"","Groundwater lower storage","kg m-2","UC_NONE" +10,"","Side flow into river channel","m3 s-1 m-1","UC_NONE" +11,"","River storage of water","m3","UC_NONE" +12,"","Floodplain storage of water","m3","UC_NONE" +13,"","Depth of water on soil surface","kg m-2","UC_NONE" +14,"","Upstream accumulated precipitation","kg m-2","UC_NONE" +15,"","Upstream accumulated snow melt","kg m-2","UC_NONE" +16,"","Percolation rate","kg m-2 s-1","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_1.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_1.csv new file mode 100644 index 00000000..46d8db1a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_1.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"CPPOP","Conditional percent precipitation amount fractile for an overall period","kg/(m^2)","UC_NONE" +1,"PPOSP","Percent precipitation in a sub-period of an overall period","%","UC_NONE" +2,"PoP","Probability of 0.01 inch of precipitation","%","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_2.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_2.csv new file mode 100644 index 00000000..2accdbb4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_1_2.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Water depth","m","UC_NONE" +1,"","Water temperature","K","UC_NONE" +2,"","Water fraction","Proportion","UC_NONE" +3,"","Sediment thickness","m","UC_NONE" +4,"","Sediment temperature","K","UC_NONE" +5,"","Ice thickness","m","UC_NONE" +6,"","Ice temperature","K","UC_NONE" +7,"","Ice cover","Proportion","UC_NONE" +8,"","Land cover (0 = water, 1 = land)","Proportion","UC_NONE" +9,"","Shape factor with respect to salinity profile","-","UC_NONE" +10,"","Shape factor with respect to temperature profile in thermocline","-","UC_NONE" +11,"","Attenuation coefficient of water with respect to solar radiation","/m","UC_NONE" +12,"","Salinity","kg/kg","UC_NONE" +13,"","Cross-sectional area of flow in channel","m2","UC_NONE" +14,"","Snow temperature","K","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_0.csv new file mode 100644 index 00000000..c9c7c8f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Universal thermal climate index","K","UC_NONE" +1,"","Mean radiant temperature","K","UC_NONE" +2,"","Wet-bulb globe temperature","K","UC_NONE" +3,"","Globe temperature","K","UC_NONE" +4,"","Humidex","K","UC_NONE" +5,"","Effective temperature","K","UC_NONE" +6,"","Normal effective temperature","K","UC_NONE" +7,"","Standard effective temperature","K","UC_NONE" +8,"","Physiological equivalent temperature","K","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_1.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_1.csv new file mode 100644 index 00000000..7979fbe8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_1.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Malaria cases","Fraction","UC_NONE" +1,"","Malaria circumsporozoite protein rate","Fraction","UC_NONE" +2,"","Plasmodium falciparum entomological inoculation rate","Bites per day per person","UC_NONE" +3,"","Human bite rate by anopheles vectors","Bites per day per person","UC_NONE" +4,"","Malaria immunity","Fraction","UC_NONE" +5,"","Falciparum parasite rates","Fraction","UC_NONE" +6,"","Detectable falciparum parasite ratio (after day 10)","Fraction","UC_NONE" +7,"","Anopheles vector to host ratio","Fraction","UC_NONE" +8,"","Anopheles vector number","Number m-2","UC_NONE" +9,"","Fraction of malarial vector reproductive habitat","Fraction","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_2.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_2.csv new file mode 100644 index 00000000..0af83c95 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_20_2.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Population density","Person m-2","UC_NONE" +1,"","Reserved","","UC_NONE" +2,"","Reserved","","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_0.csv new file mode 100644 index 00000000..cdc202bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"LAND","Land cover (1=land; 2=sea)","Proportion","UC_NONE" +1,"SFCR","Surface roughness","m","UC_NONE" +2,"TSOIL","Soil temperature","K","UC_NONE" +3,"SOILM","Soil moisture content","kg/(m^2)","UC_NONE" +4,"VEG","Vegetation","%","UC_NONE" +5,"WATR","Water runoff","kg/(m^2)","UC_NONE" +6,"EVAPT","Evapotranspiration","1/(kg^2 s)","UC_NONE" +7,"MTERH","Model terrain height","m","UC_NONE" +8,"LANDU","Land use","0=Reserved; 1=Urban land; 2=Agriculture; 3=Range land; 4=Deciduous forest; 5=Coniferous forest; 6=Forest/wetland; 7=Water; 8=Wetlands; 9=Desert; 10=Tundra; 11=Ice; 12=Tropical forest; 13=Savannah; 14-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +9,"SOILW","Volumetric soil moisture content","Proportion","UC_NONE" +10,"GFLUX","Ground heat flux","W/(m^2)","UC_NONE" +11,"MSTAV","Moisture availability","%","UC_NONE" +12,"SFEXC","Exchange coefficient","(kg/(m^3))(m/s)","UC_NONE" +13,"CNWAT","Plant canopy surface water","kg/(m^2)","UC_NONE" +14,"BMIXL","Blackadar's mixing length scale","m","UC_NONE" +15,"CCOND","Canopy conductance","m/s","UC_NONE" +16,"RSMIN","Minimal stomatal resistance","s/m","UC_NONE" +17,"WILT","Wilting point","Proportion","UC_NONE" +18,"RCS","Solar parameter in canopy conductance","Proportion","UC_NONE" +19,"RCT","Temperature parameter in canopy conductance","Proportion","UC_NONE" +20,"RCSOL","Soil moisture parameter in canopy conductance","Proportion","UC_NONE" +21,"RCQ","Humidity parameter in canopy conductance","Proportion","UC_NONE" +22,"SOILM","Soil moisture","kg/m^3","UC_NONE" +23,"CISOILW","Column-integrated soil water","kg/m^2","UC_NONE" +24,"HFLUX","Heat flux","W/m^2","UC_NONE" +25,"VSOILM","Volumetric soil moisture","m^3/m^3","UC_NONE" +26,"WILT","Wilting point","kg/m^3","UC_NONE" +27,"VWILTM","Volumetric wilting moisture","m^3/m^3","UC_NONE" +28,"LEAINX","Leaf Area Index","Numeric","UC_NONE" +29,"EVGFC","Evergreen Forest Cover","Proportion","UC_NONE" +30,"DECFC","Deciduous Forest Cover","Proportion","UC_NONE" +31,"NDVINX","Normalized Differential Vegetation Index (NDVI)","Numeric","UC_NONE" +32,"RDVEG","Root Depth of Vegetation","m","UC_NONE" +33,"WROD","Water Runoff and Drainage","kg/(m^2)","UC_NONE" +34,"SFCWRO","Surface Water Runoff","kg/(m^2)","UC_NONE" +35,"TCLASS","Tile Class","0=Reserved; 1=Evergreen broadleaved forest; 2=Deciduous broadleaved closed forest; 3=Deciduous broadleaved open forest; 4=Evergreen needle-leaf forest; 5=Deciduous needle-leaf forest; 6=Mixed leaf trees; 7=Freshwater flooded trees; 8=Saline water flooded trees; 9=Mosaic tree/natural vegetation; 10=Burnt tree cover; 11=Evergreen shrubs closed-open; 12=Deciduous shrubs closed-open; 13=Herbaceous vegetation closed-open; 14=Sparse herbaceous or grass; 15=Flooded shrubs or herbaceous; 16=Cultivated and managed areas; 17=Mosaic crop/tree/natural vegetation; 18=Mosaic crop/shrub/grass; 19=Bare areas; 20=Water; 21=Snow and ice; 22=Artificial surface; 23=Ocean; 24=Irrigated croplands; 25=Rainfed croplands; 26=Mosaic cropland (50-70%) - vegetation (20-50%); 27=Mosaic vegetation (50-70%) - cropland (20-50%); 28=Closed broadleaved evergreen forest; 29=Closed needle-leaved evergreen forest; 30=Open needle-leaved deciduous forest; 31=Mixed broadleaved and needle-leaved forest; 32=Mosaic shrubland (50-70%) - grassland (20-50%); 33=Mosaic grassland (50-70%) - shrubland (20-50%); 34=Closed to open shrubland; 35=Sparse vegetation; 36=Closed to open forest regularly flooded; 37=Closed forest or shrubland permanently flooded; 38=Closed to open grassland regularly flooded; 39=Undefined; 40-32767=Reserved; 32768-=Reserved for local use","UC_NONE" +36,"TFRCT","Tile Fraction","Proportion","UC_NONE" +37,"TPERCT","Tile Percentage","%","UC_NONE" +38,"SOILVIC","Soil Volumetric Ice Content (Water Equivalent) ","m^3/m^3","UC_NONE" +39,"","Evapotranspiration rate","kg m-2 s-1","UC_NONE" +40,"","Potential evapotranspiration rate","kg m-2 s-1","UC_NONE" +41,"","Snow melt rate","kg m-2 s-1","UC_NONE" +42,"","Water runoff and drainage rate","kg m-2 s-1","UC_NONE" +43,"","Drainage direction","0=Reserved; 1=South-west; 2=South; 3=South-east; 4=West; 5=No direction; 6=East; 7=North-west; 8=North; 9=North-east; 10-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +44,"","Upstream area","m2","UC_NONE" +45,"","Wetland cover","Proportion","UC_NONE" +46,"","Wetland type","0=Reserved; 1=Bog; 2=Drained; 3=Fen; 4=Floodplain; 5=Mangrove; 6=Marsh; 7=Rice; 8=Riverine; 9=Salt marsh; 10=Swamp; 11=Upland; 12=Wet tundra; 13-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +47,"","Irrigation cover","Proportion","UC_NONE" +48,"","C4 crop cover","Proportion","UC_NONE" +49,"","C4 grass cover","Proportion","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_3.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_3.csv new file mode 100644 index 00000000..bf0a81be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_3.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"SOTYP","Soil type","0=Reserved; 1=Sand; 2=Loamy sand; 3=Sandy loam; 4=Silt loam; 5=Organic (redefined); 6=Sandy clay loam; 7=Silt clay loam; 8=Clay loam; 9=Sandy clay; 10=Silty clay; 11=Clay; 12-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +1,"UPLST","Upper layer soil temperature","K","UC_NONE" +2,"UPLSM","Upper layer soil moisture","kg/(m^3)","UC_NONE" +3,"LOWLSM","Lower layer soil moisture","kg/(m^3)","UC_NONE" +4,"BOTLST","Bottom layer soil temperature","K","UC_NONE" +5,"SOILL","Liquid volumetric soil moisture (non-frozen)","Proportion","UC_NONE" +6,"RLYRS","Number of soil layers in root zone","Numeric","UC_NONE" +7,"SMREF","Transpiration stress-onset (soil moisture)","Proportion","UC_NONE" +8,"SMDRY","Direct evaporation cease (soil moisture)","Proportion","UC_NONE" +9,"POROS","Soil porosity","Proportion","UC_NONE" +10,"LIQVSM","Liquid volumetric soil moisture (non-frozen)","m^3/m^3","UC_NONE" +11,"VOLTSO","Volumetric transpiration stress-onset (soil moisture)","m^3/m^3","UC_NONE" +12,"TRANSO","Transpiration stress-onset (soil moisture)","kg/m^3","UC_NONE" +13,"VOLDEC","Volumetric direct evaporation cease (soil moisture)","m^3/m^3","UC_NONE" +14,"DIREC","Direct evaporation cease (soil moisture)","kg/m^3","UC_NONE" +15,"SOILP","Soil porosity","m^3/m^3","UC_NONE" +16,"VSOSM","Volumetric saturation of soil moisture","m^3/m^3","UC_NONE" +17,"SATOSM","Saturation of soil moisture","kg/m^3","UC_NONE" +18,"SOILTMP","Soil Temperature","K","UC_NONE" +19,"SOILMOI","Soil Moisture","kg/(m^3)","UC_NONE" +20,"CISOILM","Column-Integrated Soil Moisture","kg/(m^2)","UC_NONE" +21,"SOILICE","Soil Ice","kg/(m^3)","UC_NONE" +22,"CISICE","Column-Integrated Soil Ice","kg/(m^2)","UC_NONE" +23,"LWSNWP","Liquid Water in Snow Pack","kg/(m^2)","UC_NONE" +24,"FRSTINX","Frost Index","kg/day","UC_NONE" +25,"SNWDEB","Snow Depth at Elevation Bands","kg/(m^2)","UC_NONE" +26,"SHFLX","Soil Heat Flux","W/(m^2)","UC_NONE" +27,"SOILDEP","Soil Depth","m","UC_NONE" +28,"","Snow temperature","K","UC_NONE" +29,"","Ice temperature","K","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_4.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_4.csv new file mode 100644 index 00000000..7ce05191 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_4.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Fire outlook","0=No risk area; 1=Reserved; 2=General thunderstorm risk area; 3=Reserved; 4=Slight risk area; 5=Reserved; 6=Moderate risk area; 7=Reserved; 8=High risk area; 9-10=Reserved; 11=Dry thunderstorm (dry lightning) risk area; 12-13=Reserved; 14=Critical risk area; 15-17=Reserved; 18=Extremely critical risk area; 19-254=Reserved; 255=Missing","UC_NONE" +1,"","Fire outlook due to dry thunderstorm","0=No risk area; 1=Reserved; 2=General thunderstorm risk area; 3=Reserved; 4=Slight risk area; 5=Reserved; 6=Moderate risk area; 7=Reserved; 8=High risk area; 9-10=Reserved; 11=Dry thunderstorm (dry lightning) risk area; 12-13=Reserved; 14=Critical risk area; 15-17=Reserved; 18=Extremely critical risk area; 19-254=Reserved; 255=Missing","UC_NONE" +2,"","Haines index","Numeric","UC_NONE" +3,"","Fire burned area","%","UC_NONE" +4,"","Fosberg index","Numeric","UC_NONE" +5,"","Forest Fire Weather Index (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +6,"","Fine Fuel Moisture Code (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +7,"","Duff Moisture Code (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +8,"","Drought Code (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +9,"","Initial Fire Spread Index (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +10,"","Fire Buildup Index (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +11,"","Fire Daily Severity Rating (as defined by the Canadian Forest Service)","Numeric","UC_NONE" +12,"","Keetch-Byram drought index","Numeric","UC_NONE" +13,"","Drought factor (as defined by the Australian forest service )","Numeric","UC_NONE" +14,"","Rate of spread (as defined by the Australian forest service )","m/s","UC_NONE" +15,"","Fire danger index (as defined by the Australian forest service )","Numeric","UC_NONE" +16,"","Spread component (as defined by the US Forest Service National Fire Danger Rating System)","Numeric","UC_NONE" +17,"","Burning index (as defined by the US Forest Service National Fire Danger Rating System)","Numeric","UC_NONE" +18,"","Ignition component (as defined by the US Forest Service National Fire Danger Rating System)","%","UC_NONE" +19,"","Energy release component (as defined by the US Forest Service National Fire Danger Rating System)","Joule/m2","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_5.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_5.csv new file mode 100644 index 00000000..c6409cf7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_5.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Glacier cover","Proportion","UC_NONE" +1,"","Glacier temperature","K","UC_NONE" +2,"","Reserved","","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_6.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_6.csv new file mode 100644 index 00000000..a3fca888 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_2_6.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Urban cover","Proportion","UC_NONE" +1,"","Road cover","Proportion","UC_NONE" +2,"","Building cover","Proportion","UC_NONE" +3,"","Building height","m","UC_NONE" +4,"","Vertical-to-horizontal area fraction","m2 m-2","UC_NONE" +5,"","Standard deviation of building height","m","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_0.csv new file mode 100644 index 00000000..ff829d8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"SRAD","Scaled radiance","Numeric","UC_NONE" +1,"SALBEDO","Scaled albedo","Numeric","UC_NONE" +2,"SBTMP","Scaled brightness temperature","Numeric","UC_NONE" +3,"SPWAT","Scaled precipitable water","Numeric","UC_NONE" +4,"SLFTI","Scaled lifted index","Numeric","UC_NONE" +5,"SCTPRES","Scaled cloud top pressure","Numeric","UC_NONE" +6,"SSTMP","Scaled skin temperature","Numeric","UC_NONE" +7,"CLOUDM","Cloud mask","0=Clear over water; 1=Clear over land; 2=Cloud; 3=No data; 4-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +8,"PIXST","Pixel scene type","0=No scene identified; 1=Green needle-leafed forest; 2=Green broad-leafed forest; 3=Deciduous needle-leafed forest; 4=Deciduous broad-leafed forest; 5=Deciduous mixed forest; 6=Closed shrub-land; 7=Open shrub-land; 8=Woody savannah; 9=Savannah; 10=Grassland; 11=Permanent wetland; 12=Cropland; 13=Urban; 14=Vegetation/crops; 15=Permanent snow/ice; 16=Barren desert; 17=Water bodies; 18=Tundra; 19=Warm liquid water cloud; 20=Supercooled liquid water cloud; 21=Mixed-phase cloud; 22=Optically thin ice cloud; 23=Optically thick ice cloud; 24=Multilayered cloud; 25-96=Reserved; 97=Snow/ice on land; 98=Snow/ice on water; 99=Sun-glint; 100=General cloud; 101=Low cloud/fog/stratus; 102=Low cloud/stratocumulus; 103=Low cloud/unknown type; 104=Medium cloud/nimbostratus; 105=Medium cloud/altostratus; 106=Medium cloud/unknown type; 107=High cloud/cumulus; 108=High cloud/cirrus; 109=High cloud/unknown; 110=Unknown cloud type; 111=Single layer water cloud; 112=Single layer ice cloud; 113-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +9,"FIREDI","Fire detection indicator","0=No fire detected; 1=Possible fire detected; 2=Probable fire detected; 3=Missing","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_1.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_1.csv new file mode 100644 index 00000000..0a6d634c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_1.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"ESTP","Estimated precipitation","kg/(m^2)","UC_NONE" +1,"IRRATE","Instantaneous rain rate","kg/(m^2*s)","UC_NONE" +2,"CTOPH","Cloud top height","kg/(m^2*s)","UC_NONE" +3,"CTOPHQI","Cloud top height quality indicator","0=Nominal cloud top height quality; 1=Fog in segment; 2=Poor quality height estimation; 3=Fog in segment and poor quality height estimation; 4-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +4,"ESTUGRD","Estimated u component of wind","m/s","UC_NONE" +5,"ESTVGRD","Estimated v component of wind","m/s","UC_NONE" +6,"NPIXU","Number of pixels used","Numeric","UC_NONE" +7,"SOLZA","Solar zenith angle","Degree","UC_NONE" +8,"RAZA","Relative azimuth angle","Degree","UC_NONE" +9,"RFL06","Reflectance in 0.6 micron channel","%","UC_NONE" +10,"RFL08","Reflectance in 0.8 micron channel","%","UC_NONE" +11,"RFL16","Reflectance in 1.6 micron channel","%","UC_NONE" +12,"RFL39","Reflectance in 3.9 micron channel","%","UC_NONE" +13,"ATMDIV","Atmospheric divergence","1/s","UC_NONE" +14,"CBTMP","Cloudy Brightness Temperature","K","UC_NONE" +15,"CSBTMP","Clear Sky Brightness Temperature","K","UC_NONE" +16,"CLDRAD","Cloudy Radiance (with respect to wave number)","W/(m*sr)","UC_NONE" +17,"CSKYRAD","Clear Sky Radiance (with respect to wave number)","W/(m*sr)","UC_NONE" +18,"","Reserved","-","UC_NONE" +19,"WINDS","Wind Speed","m/s","UC_NONE" +20,"AOT06","Aerosol Optical Thickness at 0.635 µm","","UC_NONE" +21,"AOT08","Aerosol Optical Thickness at 0.810 µm","","UC_NONE" +22,"AOT16","Aerosol Optical Thickness at 1.640 µm","","UC_NONE" +23,"ANGCOE","Angstrom Coefficient","","UC_NONE" +24,"","Reserved","-","UC_NONE" +25,"","Reserved","-","UC_NONE" +26,"","Reserved","-","UC_NONE" +27,"BRFLF","Bidirectional Reflecance Factor","Numeric","UC_NONE" +28,"SPBRT","Brightness Temperature","K","UC_NONE" +29,"SRAD","Scaled Radiance","Numeric","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Correlation coefficient between MPE rain-rates for the co-located IR data and the microwave data rain-rates","Numeric","UC_NONE" +99,"","Standard deviation between MPE rain-rates for the co-located IR data and the microwave data rain-rates","kg m-2 s-1","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_2.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_2.csv new file mode 100644 index 00000000..c0c107e0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_2.csv @@ -0,0 +1,28 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Clear sky probability","%","UC_NONE" +1,"","Cloud top temperature","K","UC_NONE" +2,"","Cloud top pressure","Pa","UC_NONE" +3,"","Cloud type","0=No scene identified; 1=Green needle-leafed forest; 2=Green broad-leafed forest; 3=Deciduous needle-leafed forest; 4=Deciduous broad-leafed forest; 5=Deciduous mixed forest; 6=Closed shrub-land; 7=Open shrub-land; 8=Woody savannah; 9=Savannah; 10=Grassland; 11=Permanent wetland; 12=Cropland; 13=Urban; 14=Vegetation/crops; 15=Permanent snow/ice; 16=Barren desert; 17=Water bodies; 18=Tundra; 19=Warm liquid water cloud; 20=Supercooled liquid water cloud; 21=Mixed-phase cloud; 22=Optically thin ice cloud; 23=Optically thick ice cloud; 24=Multilayered cloud; 25-96=Reserved; 97=Snow/ice on land; 98=Snow/ice on water; 99=Sun-glint; 100=General cloud; 101=Low cloud/fog/stratus; 102=Low cloud/stratocumulus; 103=Low cloud/unknown type; 104=Medium cloud/nimbostratus; 105=Medium cloud/altostratus; 106=Medium cloud/unknown type; 107=High cloud/cumulus; 108=High cloud/cirrus; 109=High cloud/unknown; 110=Unknown cloud type; 111=Single layer water cloud; 112=Single layer ice cloud; 113-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +4,"","Cloud phase","0=No scene identified; 1=Green needle-leafed forest; 2=Green broad-leafed forest; 3=Deciduous needle-leafed forest; 4=Deciduous broad-leafed forest; 5=Deciduous mixed forest; 6=Closed shrub-land; 7=Open shrub-land; 8=Woody savannah; 9=Savannah; 10=Grassland; 11=Permanent wetland; 12=Cropland; 13=Urban; 14=Vegetation/crops; 15=Permanent snow/ice; 16=Barren desert; 17=Water bodies; 18=Tundra; 19=Warm liquid water cloud; 20=Supercooled liquid water cloud; 21=Mixed-phase cloud; 22=Optically thin ice cloud; 23=Optically thick ice cloud; 24=Multilayered cloud; 25-96=Reserved; 97=Snow/ice on land; 98=Snow/ice on water; 99=Sun-glint; 100=General cloud; 101=Low cloud/fog/stratus; 102=Low cloud/stratocumulus; 103=Low cloud/unknown type; 104=Medium cloud/nimbostratus; 105=Medium cloud/altostratus; 106=Medium cloud/unknown type; 107=High cloud/cumulus; 108=High cloud/cirrus; 109=High cloud/unknown; 110=Unknown cloud type; 111=Single layer water cloud; 112=Single layer ice cloud; 113-191=Reserved; 192-254=Reserved for local use; 255=Missing","UC_NONE" +5,"","Cloud optical depth","Numeric","UC_NONE" +6,"","Cloud particle effective radius","m","UC_NONE" +7,"","Cloud liquid water path","kg m-2","UC_NONE" +8,"","Cloud ice water path","kg m-2","UC_NONE" +9,"","Cloud albedo","Numeric","UC_NONE" +10,"","Cloud emissivity","Numeric","UC_NONE" +11,"","Effective absorption optical depth ratio","Numeric","UC_NONE" +30,"","Measurement cost","Numeric","UC_NONE" +31,"","Upper layer cloud optical depth","Numeric","UC_NONE" +32,"","Upper layer cloud top pressure","Pa","UC_NONE" +33,"","Upper layer cloud effective radius","m","UC_NONE" +34,"","Error in upper layer cloud optical depth","Numeric","UC_NONE" +35,"","Error in upper layer cloud top pressure","Pa","UC_NONE" +36,"","Error in upper layer cloud effective radius","m","UC_NONE" +37,"","Lower layer cloud optical depth","Numeric","UC_NONE" +38,"","Lower layer cloud top pressure","Pa","UC_NONE" +39,"","Error in lower layer cloud optical depth","Numeric","UC_NONE" +40,"","Error in lower layer cloud top pressure","Pa","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_3.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_3.csv new file mode 100644 index 00000000..b8411888 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_3.csv @@ -0,0 +1,8 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Probability of encountering marginal visual flight rule conditions","%","UC_NONE" +1,"","Probability of encountering low instrument flight rule conditions","%","UC_NONE" +2,"","Probability of encountering instrument flight rule conditions","%","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_4.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_4.csv new file mode 100644 index 00000000..d64a3158 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_4.csv @@ -0,0 +1,14 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Volcanic ash probability","%","UC_NONE" +1,"","Volcanic ash cloud top temperature","K","UC_NONE" +2,"","Volcanic ash cloud top pressure","Pa","UC_NONE" +3,"","Volcanic ash cloud top height","m","UC_NONE" +4,"","Volcanic ash cloud emissivity","Numeric","UC_NONE" +5,"","Volcanic ash effective absorption optical depth ratio","Numeric","UC_NONE" +6,"","Volcanic ash cloud optical depth","Numeric","UC_NONE" +7,"","Volcanic ash column density","kg m-2","UC_NONE" +8,"","Volcanic ash particle effective radius","m","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_5.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_5.csv new file mode 100644 index 00000000..82b95c73 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_5.csv @@ -0,0 +1,11 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Interface sea-surface temperature","K","UC_NONE" +1,"","Skin sea-surface temperature","K","UC_NONE" +2,"","Sub-skin sea-surface temperature","K","UC_NONE" +3,"","Foundation sea-surface temperature","K","UC_NONE" +4,"","Estimated bias between sea-surface temperature and standard","K","UC_NONE" +5,"","Estimated standard deviation between sea surface temperature and standard","K","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_6.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_6.csv new file mode 100644 index 00000000..28027d24 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_3_6.csv @@ -0,0 +1,11 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Global solar irradiance","W m-2","UC_NONE" +1,"","Global solar exposure","J m-2","UC_NONE" +2,"","Direct solar irradiance","W m-2","UC_NONE" +3,"","Direct solar exposure","J m-2","UC_NONE" +4,"","Diffuse solar irradiance","W m-2","UC_NONE" +5,"","Diffuse solar exposure","J m-2","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_0.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_0.csv new file mode 100644 index 00000000..10b7db2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_0.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Temperature","K","UC_NONE" +1,"","Electron temperature","K","UC_NONE" +2,"","Proton temperature","K","UC_NONE" +3,"","Ion temperature","K","UC_NONE" +4,"","Parallel temperature","K","UC_NONE" +5,"","Perpendicular temperature","K","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_1.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_1.csv new file mode 100644 index 00000000..fc029574 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_1.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Velocity magnitude (speed)","m s-1","UC_NONE" +1,"","1st vector component of velocity (coordinate system dependent)","m s-1","UC_NONE" +2,"","2nd vector component of velocity (coordinate system dependent)","m s-1","UC_NONE" +3,"","3rd vector component of velocity (coordinate system dependent)","m s-1","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_10.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_10.csv new file mode 100644 index 00000000..a10160b7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_10.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Scintillation index (sigma phi)","rad","UC_NONE" +1,"","Scintillation index S4","Numeric","UC_NONE" +2,"","Rate of change of TEC index (ROTI)","TECU/min","UC_NONE" +3,"","Disturbance ionosphere index spatial gradient (DIXSG)","Numeric","UC_NONE" +4,"","Along arc TEC rate (AATR)","TECU/min","UC_NONE" +5,"","Kp","Numeric","UC_NONE" +6,"","Equatorial disturbance storm time index (Dst)","nT","UC_NONE" +7,"","Auroral electrojet (AE)","nT","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_2.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_2.csv new file mode 100644 index 00000000..42629399 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_2.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Particle number density","m-3","UC_NONE" +1,"","Electron density","m-3","UC_NONE" +2,"","Proton density","m-3","UC_NONE" +3,"","Ion density","m-3","UC_NONE" +4,"","Vertical total electron content","TECU","UC_NONE" +5,"","HF absorption frequency","Hz","UC_NONE" +6,"","HF absorption","dB","UC_NONE" +7,"","Spread F","m","UC_NONE" +8,"","h'F","m","UC_NONE" +9,"","Critical frequency","Hz","UC_NONE" +10,"","Maximal usable frequency (MUF)","Hz","UC_NONE" +11,"","Peak height (hm)","m","UC_NONE" +12,"","Peak density (Nm)","m-3","UC_NONE" +13,"","Equivalent slab thickness (tau)","km","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_3.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_3.csv new file mode 100644 index 00000000..8a69646b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_3.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Magnetic field magnitude","T","UC_NONE" +1,"","1st vector component of magnetic field","T","UC_NONE" +2,"","2nd vector component of magnetic field","T","UC_NONE" +3,"","3rd vector component of magnetic field","T","UC_NONE" +4,"","Electric field magnitude","V m-1","UC_NONE" +5,"","1st vector component of electric field","V m-1","UC_NONE" +6,"","2nd vector component of electric field","V m-1","UC_NONE" +7,"","3rd vector component of electric field","V m-1","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_4.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_4.csv new file mode 100644 index 00000000..d4c9da1c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_4.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Proton flux (differential)","(m2 s sr eV)-1","UC_NONE" +1,"","Proton flux (integral)","(m2 s sr )-1","UC_NONE" +2,"","Electron flux (differential)","(m2 s sr eV)-1","UC_NONE" +3,"","Electron flux (integral)","(m2 s sr)-1","UC_NONE" +4,"","Heavy ion flux (differential)","(m2 s sr eV/nuc)-1","UC_NONE" +5,"","Heavy ion flux (integral)","(m2 s sr)-1","UC_NONE" +6,"","Cosmic ray neutron flux","h-1","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_5.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_5.csv new file mode 100644 index 00000000..7d5c1dd9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_5.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Amplitude","dB","UC_NONE" +1,"","Phase","rad","UC_NONE" +2,"","Frequency","Hz","UC_NONE" +3,"","Wavelength","m","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_6.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_6.csv new file mode 100644 index 00000000..1ed55d2c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_6.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Integrated solar irradiance","W m-2","UC_NONE" +1,"","Solar X-ray flux (XRS long)","W m-2","UC_NONE" +2,"","Solar X-ray flux (XRS short)","W m-2","UC_NONE" +3,"","Solar EUV irradiance","W m-2","UC_NONE" +4,"","Solar spectral irradiance","W m-2 nm-1","UC_NONE" +5,"","F10.7","W m-2 Hz-1","UC_NONE" +6,"","Solar radio emissions","W m-2 Hz-1","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_7.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_7.csv new file mode 100644 index 00000000..68f7481a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_7.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Limb intensity","J m-2 s-1","UC_NONE" +1,"","Disk intensity","J m-2 s-1","UC_NONE" +2,"","Disk intensity day","J m-2 s-1","UC_NONE" +3,"","Disk intensity night","J m-2 s-1","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_8.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_8.csv new file mode 100644 index 00000000..037a6bb8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_8.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","X-ray radiance","W sr-1 m-2","UC_NONE" +1,"","EUV radiance","W sr-1 m-2","UC_NONE" +2,"","H-alpha radiance","W sr-1 m-2","UC_NONE" +3,"","White light radiance","W sr-1 m-2","UC_NONE" +4,"","CaII-K radiance","W sr-1 m-2","UC_NONE" +5,"","White light coronagraph radiance","W sr-1 m-2","UC_NONE" +6,"","Heliospheric radiance","W sr-1 m-2","UC_NONE" +7,"","Thematic mask","Numeric","UC_NONE" +8,"","Solar induced chlorophyll fluorescence","W m-2 sr-1 m-1","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_9.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_9.csv new file mode 100644 index 00000000..09642c26 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_4_9.csv @@ -0,0 +1,261 @@ +"subcat","short_name","name","unit","unit_conv" +-4,"######################################################################################################","#","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#","#" +-1,"######################################################################################################","#","#","#" +0,"","Pedersen conductivity","S m-1","UC_NONE" +1,"","Hall conductivity","S m-1","UC_NONE" +2,"","Parallel conductivity","S m-1","UC_NONE" +3,"","Reserved","","UC_NONE" +4,"","Reserved","","UC_NONE" +5,"","Reserved","","UC_NONE" +6,"","Reserved","","UC_NONE" +7,"","Reserved","","UC_NONE" +8,"","Reserved","","UC_NONE" +9,"","Reserved","","UC_NONE" +10,"","Reserved","","UC_NONE" +11,"","Reserved","","UC_NONE" +12,"","Reserved","","UC_NONE" +13,"","Reserved","","UC_NONE" +14,"","Reserved","","UC_NONE" +15,"","Reserved","","UC_NONE" +16,"","Reserved","","UC_NONE" +17,"","Reserved","","UC_NONE" +18,"","Reserved","","UC_NONE" +19,"","Reserved","","UC_NONE" +20,"","Reserved","","UC_NONE" +21,"","Reserved","","UC_NONE" +22,"","Reserved","","UC_NONE" +23,"","Reserved","","UC_NONE" +24,"","Reserved","","UC_NONE" +25,"","Reserved","","UC_NONE" +26,"","Reserved","","UC_NONE" +27,"","Reserved","","UC_NONE" +28,"","Reserved","","UC_NONE" +29,"","Reserved","","UC_NONE" +30,"","Reserved","","UC_NONE" +31,"","Reserved","","UC_NONE" +32,"","Reserved","","UC_NONE" +33,"","Reserved","","UC_NONE" +34,"","Reserved","","UC_NONE" +35,"","Reserved","","UC_NONE" +36,"","Reserved","","UC_NONE" +37,"","Reserved","","UC_NONE" +38,"","Reserved","","UC_NONE" +39,"","Reserved","","UC_NONE" +40,"","Reserved","","UC_NONE" +41,"","Reserved","","UC_NONE" +42,"","Reserved","","UC_NONE" +43,"","Reserved","","UC_NONE" +44,"","Reserved","","UC_NONE" +45,"","Reserved","","UC_NONE" +46,"","Reserved","","UC_NONE" +47,"","Reserved","","UC_NONE" +48,"","Reserved","","UC_NONE" +49,"","Reserved","","UC_NONE" +50,"","Reserved","","UC_NONE" +51,"","Reserved","","UC_NONE" +52,"","Reserved","","UC_NONE" +53,"","Reserved","","UC_NONE" +54,"","Reserved","","UC_NONE" +55,"","Reserved","","UC_NONE" +56,"","Reserved","","UC_NONE" +57,"","Reserved","","UC_NONE" +58,"","Reserved","","UC_NONE" +59,"","Reserved","","UC_NONE" +60,"","Reserved","","UC_NONE" +61,"","Reserved","","UC_NONE" +62,"","Reserved","","UC_NONE" +63,"","Reserved","","UC_NONE" +64,"","Reserved","","UC_NONE" +65,"","Reserved","","UC_NONE" +66,"","Reserved","","UC_NONE" +67,"","Reserved","","UC_NONE" +68,"","Reserved","","UC_NONE" +69,"","Reserved","","UC_NONE" +70,"","Reserved","","UC_NONE" +71,"","Reserved","","UC_NONE" +72,"","Reserved","","UC_NONE" +73,"","Reserved","","UC_NONE" +74,"","Reserved","","UC_NONE" +75,"","Reserved","","UC_NONE" +76,"","Reserved","","UC_NONE" +77,"","Reserved","","UC_NONE" +78,"","Reserved","","UC_NONE" +79,"","Reserved","","UC_NONE" +80,"","Reserved","","UC_NONE" +81,"","Reserved","","UC_NONE" +82,"","Reserved","","UC_NONE" +83,"","Reserved","","UC_NONE" +84,"","Reserved","","UC_NONE" +85,"","Reserved","","UC_NONE" +86,"","Reserved","","UC_NONE" +87,"","Reserved","","UC_NONE" +88,"","Reserved","","UC_NONE" +89,"","Reserved","","UC_NONE" +90,"","Reserved","","UC_NONE" +91,"","Reserved","","UC_NONE" +92,"","Reserved","","UC_NONE" +93,"","Reserved","","UC_NONE" +94,"","Reserved","","UC_NONE" +95,"","Reserved","","UC_NONE" +96,"","Reserved","","UC_NONE" +97,"","Reserved","","UC_NONE" +98,"","Reserved","","UC_NONE" +99,"","Reserved","","UC_NONE" +100,"","Reserved","","UC_NONE" +101,"","Reserved","","UC_NONE" +102,"","Reserved","","UC_NONE" +103,"","Reserved","","UC_NONE" +104,"","Reserved","","UC_NONE" +105,"","Reserved","","UC_NONE" +106,"","Reserved","","UC_NONE" +107,"","Reserved","","UC_NONE" +108,"","Reserved","","UC_NONE" +109,"","Reserved","","UC_NONE" +110,"","Reserved","","UC_NONE" +111,"","Reserved","","UC_NONE" +112,"","Reserved","","UC_NONE" +113,"","Reserved","","UC_NONE" +114,"","Reserved","","UC_NONE" +115,"","Reserved","","UC_NONE" +116,"","Reserved","","UC_NONE" +117,"","Reserved","","UC_NONE" +118,"","Reserved","","UC_NONE" +119,"","Reserved","","UC_NONE" +120,"","Reserved","","UC_NONE" +121,"","Reserved","","UC_NONE" +122,"","Reserved","","UC_NONE" +123,"","Reserved","","UC_NONE" +124,"","Reserved","","UC_NONE" +125,"","Reserved","","UC_NONE" +126,"","Reserved","","UC_NONE" +127,"","Reserved","","UC_NONE" +128,"","Reserved","","UC_NONE" +129,"","Reserved","","UC_NONE" +130,"","Reserved","","UC_NONE" +131,"","Reserved","","UC_NONE" +132,"","Reserved","","UC_NONE" +133,"","Reserved","","UC_NONE" +134,"","Reserved","","UC_NONE" +135,"","Reserved","","UC_NONE" +136,"","Reserved","","UC_NONE" +137,"","Reserved","","UC_NONE" +138,"","Reserved","","UC_NONE" +139,"","Reserved","","UC_NONE" +140,"","Reserved","","UC_NONE" +141,"","Reserved","","UC_NONE" +142,"","Reserved","","UC_NONE" +143,"","Reserved","","UC_NONE" +144,"","Reserved","","UC_NONE" +145,"","Reserved","","UC_NONE" +146,"","Reserved","","UC_NONE" +147,"","Reserved","","UC_NONE" +148,"","Reserved","","UC_NONE" +149,"","Reserved","","UC_NONE" +150,"","Reserved","","UC_NONE" +151,"","Reserved","","UC_NONE" +152,"","Reserved","","UC_NONE" +153,"","Reserved","","UC_NONE" +154,"","Reserved","","UC_NONE" +155,"","Reserved","","UC_NONE" +156,"","Reserved","","UC_NONE" +157,"","Reserved","","UC_NONE" +158,"","Reserved","","UC_NONE" +159,"","Reserved","","UC_NONE" +160,"","Reserved","","UC_NONE" +161,"","Reserved","","UC_NONE" +162,"","Reserved","","UC_NONE" +163,"","Reserved","","UC_NONE" +164,"","Reserved","","UC_NONE" +165,"","Reserved","","UC_NONE" +166,"","Reserved","","UC_NONE" +167,"","Reserved","","UC_NONE" +168,"","Reserved","","UC_NONE" +169,"","Reserved","","UC_NONE" +170,"","Reserved","","UC_NONE" +171,"","Reserved","","UC_NONE" +172,"","Reserved","","UC_NONE" +173,"","Reserved","","UC_NONE" +174,"","Reserved","","UC_NONE" +175,"","Reserved","","UC_NONE" +176,"","Reserved","","UC_NONE" +177,"","Reserved","","UC_NONE" +178,"","Reserved","","UC_NONE" +179,"","Reserved","","UC_NONE" +180,"","Reserved","","UC_NONE" +181,"","Reserved","","UC_NONE" +182,"","Reserved","","UC_NONE" +183,"","Reserved","","UC_NONE" +184,"","Reserved","","UC_NONE" +185,"","Reserved","","UC_NONE" +186,"","Reserved","","UC_NONE" +187,"","Reserved","","UC_NONE" +188,"","Reserved","","UC_NONE" +189,"","Reserved","","UC_NONE" +190,"","Reserved","","UC_NONE" +191,"","Reserved","","UC_NONE" +192,"","Reserved for local use","","UC_NONE" +193,"","Reserved for local use","","UC_NONE" +194,"","Reserved for local use","","UC_NONE" +195,"","Reserved for local use","","UC_NONE" +196,"","Reserved for local use","","UC_NONE" +197,"","Reserved for local use","","UC_NONE" +198,"","Reserved for local use","","UC_NONE" +199,"","Reserved for local use","","UC_NONE" +200,"","Reserved for local use","","UC_NONE" +201,"","Reserved for local use","","UC_NONE" +202,"","Reserved for local use","","UC_NONE" +203,"","Reserved for local use","","UC_NONE" +204,"","Reserved for local use","","UC_NONE" +205,"","Reserved for local use","","UC_NONE" +206,"","Reserved for local use","","UC_NONE" +207,"","Reserved for local use","","UC_NONE" +208,"","Reserved for local use","","UC_NONE" +209,"","Reserved for local use","","UC_NONE" +210,"","Reserved for local use","","UC_NONE" +211,"","Reserved for local use","","UC_NONE" +212,"","Reserved for local use","","UC_NONE" +213,"","Reserved for local use","","UC_NONE" +214,"","Reserved for local use","","UC_NONE" +215,"","Reserved for local use","","UC_NONE" +216,"","Reserved for local use","","UC_NONE" +217,"","Reserved for local use","","UC_NONE" +218,"","Reserved for local use","","UC_NONE" +219,"","Reserved for local use","","UC_NONE" +220,"","Reserved for local use","","UC_NONE" +221,"","Reserved for local use","","UC_NONE" +222,"","Reserved for local use","","UC_NONE" +223,"","Reserved for local use","","UC_NONE" +224,"","Reserved for local use","","UC_NONE" +225,"","Reserved for local use","","UC_NONE" +226,"","Reserved for local use","","UC_NONE" +227,"","Reserved for local use","","UC_NONE" +228,"","Reserved for local use","","UC_NONE" +229,"","Reserved for local use","","UC_NONE" +230,"","Reserved for local use","","UC_NONE" +231,"","Reserved for local use","","UC_NONE" +232,"","Reserved for local use","","UC_NONE" +233,"","Reserved for local use","","UC_NONE" +234,"","Reserved for local use","","UC_NONE" +235,"","Reserved for local use","","UC_NONE" +236,"","Reserved for local use","","UC_NONE" +237,"","Reserved for local use","","UC_NONE" +238,"","Reserved for local use","","UC_NONE" +239,"","Reserved for local use","","UC_NONE" +240,"","Reserved for local use","","UC_NONE" +241,"","Reserved for local use","","UC_NONE" +242,"","Reserved for local use","","UC_NONE" +243,"","Reserved for local use","","UC_NONE" +244,"","Reserved for local use","","UC_NONE" +245,"","Reserved for local use","","UC_NONE" +246,"","Reserved for local use","","UC_NONE" +247,"","Reserved for local use","","UC_NONE" +248,"","Reserved for local use","","UC_NONE" +249,"","Reserved for local use","","UC_NONE" +250,"","Reserved for local use","","UC_NONE" +251,"","Reserved for local use","","UC_NONE" +252,"","Reserved for local use","","UC_NONE" +253,"","Reserved for local use","","UC_NONE" +254,"","Reserved for local use","","UC_NONE" +255,"","Missing","","UC_NONE" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_Canada.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_Canada.csv new file mode 100644 index 00000000..741b7694 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_Canada.csv @@ -0,0 +1,5 @@ +prod,cat,subcat,short_name,name,unit,unit_conv +0,4,192,"DSWRF_SFC_0","Downward incident solar flux","W/m^2",UC_NONE +0,4,193,"USWRF_SFC_0","Upward short wave radiative flux","W/m^2",UC_NONE +0,5,192,"DLWRF_SFC_0","Downward Long Wave Radiative Flux","W/m^2",UC_NONE +0,5,193,"ULWRF_0","Outgoing Long Wave Radiative Flux","W/m^2",UC_NONE diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_HPC.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_HPC.csv new file mode 100644 index 00000000..1c146001 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_HPC.csv @@ -0,0 +1,2 @@ +prod,cat,subcat,short_name,name,unit,unit_conv +0,1,192,"HPC-Wx","HPC Code","-",UC_NONE diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_MRMS.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_MRMS.csv new file mode 100644 index 00000000..ff92a003 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_MRMS.csv @@ -0,0 +1,175 @@ +prod,cat,subcat,short_name,name,unit,unit_conv +209,2,0,"NLDN_CG_001min_AvgDensity","CG Average Lightning Density 1-min - NLDN","flashes/km^2/min",UC_NONE +209,2,1,"NLDN_CG_005min_AvgDensity","CG Average Lightning Density 5-min - NLDN","flashes/km^2/min",UC_NONE +209,2,2,"NLDN_CG_015min_AvgDensity","CG Average Lightning Density 15-min - NLDN","flashes/km^2/min",UC_NONE +209,2,3,"NLDN_CG_030min_AvgDensity","CG Average Lightning Density 30-min - NLDN","flashes/km^2/min",UC_NONE +209,2,4,"LightningProbabilityNext30min","Lightning Probability 0-30 minutes - NLDN","%",UC_NONE +209,2,5,"LightningProbabilityNext30minGrid","Lightning Probability 0-30 minutes - NLDN","%",UC_NONE +209,2,6,"LightningProbabilityNext60minGrid","Lightning Probability 0-30 minutes - NLDN","%",UC_NONE +209,2,7,"LightningJumpGrid","Rapid lightning increases and decreases ","non-dim",UC_NONE +209,2,8,"LightningJumpGrid_Max_005min","Rapid lightning increases and decreases over 5-minutes ","non-dim",UC_NONE +209,3,0,"MergedAzShear0to2kmAGL","Azimuth Shear 0-2km AGL","0.001/s",UC_NONE +209,3,1,"MergedAzShear3to6kmAGL","Azimuth Shear 3-6km AGL","0.001/s",UC_NONE +209,3,2,"RotationTrack30min","Rotation Track 0-2km AGL 30-min","0.001/s",UC_NONE +209,3,3,"RotationTrack60min","Rotation Track 0-2km AGL 60-min","0.001/s",UC_NONE +209,3,4,"RotationTrack120min","Rotation Track 0-2km AGL 120-min","0.001/s",UC_NONE +209,3,5,"RotationTrack240min","Rotation Track 0-2km AGL 240-min","0.001/s",UC_NONE +209,3,6,"RotationTrack360min","Rotation Track 0-2km AGL 360-min","0.001/s",UC_NONE +209,3,7,"RotationTrack1440min","Rotation Track 0-2km AGL 1440-min","0.001/s",UC_NONE +209,3,14,"RotationTrackML30min","Rotation Track 3-6km AGL 30-min","0.001/s",UC_NONE +209,3,15,"RotationTrackML60min","Rotation Track 3-6km AGL 60-min","0.001/s",UC_NONE +209,3,16,"RotationTrackML120min","Rotation Track 3-6km AGL 120-min","0.001/s",UC_NONE +209,3,17,"RotationTrackML240min","Rotation Track 3-6km AGL 240-min","0.001/s",UC_NONE +209,3,18,"RotationTrackML360min","Rotation Track 3-6km AGL 360-min","0.001/s",UC_NONE +209,3,19,"RotationTrackML1440min","Rotation Track 3-6km AGL 1440-min","0.001/s",UC_NONE +209,3,26,"SHI","Severe Hail Index","index",UC_NONE +209,3,27,"POSH","Prob of Severe Hail","%",UC_NONE +209,3,28,"MESH","Maximum Estimated Size of Hail (MESH)","mm",UC_NONE +209,3,29,"MESHMax30min","MESH Hail Swath 30-min","mm",UC_NONE +209,3,30,"MESHMax60min","MESH Hail Swath 60-min","mm",UC_NONE +209,3,31,"MESHMax120min","MESH Hail Swath 120-min","mm",UC_NONE +209,3,32,"MESHMax240min","MESH Hail Swath 240-min","mm",UC_NONE +209,3,33,"MESHMax360min","MESH Hail Swath 360-min","mm",UC_NONE +209,3,34,"MESHMax1440min","MESH Hail Swath 1440-min","mm",UC_NONE +209,3,37,"VIL_Max_120min","VIL Swath 120-min","kg/m^2",UC_NONE +209,3,40,"VIL_Max_1440min","VIL Swath 1440-min","kg/m^2",UC_NONE +209,3,41,"VIL","Vertically Integrated Liquid","kg/m^2",UC_NONE +209,3,42,"VIL_Density","Vertically Integrated Liquid Density","g/m^3",UC_NONE +209,3,43,"VII","Vertically Integrated Ice","kg/m^2",UC_NONE +209,3,44,"EchoTop_18","Echo Top - 18 dBZ","km MSL",UC_NONE +209,3,45,"EchoTop_30","Echo Top - 30 dBZ","km MSL",UC_NONE +209,3,46,"EchoTop_50","Echo Top - 50 dBZ","km MSL",UC_NONE +209,3,47,"EchoTop_60","Echo Top - 60 dBZ","km MSL",UC_NONE +209,3,48,"H50AboveM20C","Thickness [50 dBZ top - (-20C)]","km",UC_NONE +209,3,49,"H50Above0C","Thickness [50 dBZ top - 0C]","km",UC_NONE +209,3,50,"H60AboveM20C","Thickness [60 dBZ top - (-20C)]","km",UC_NONE +209,3,51,"H60Above0C","Thickness [60 dBZ top - 0C]","km",UC_NONE +209,3,52,"Reflectivity_0C","Isothermal Reflectivity at 0C","dBZ",UC_NONE +209,3,53,"Reflectivity_-5C","Isothermal Reflectivity at -5C","dBZ",UC_NONE +209,3,54,"Reflectivity_-10C","Isothermal Reflectivity at -10C","dBZ",UC_NONE +209,3,55,"Reflectivity_-15C","Isothermal Reflectivity at -15C","dBZ",UC_NONE +209,3,56,"Reflectivity_-20C","Isothermal Reflectivity at -20C","dBZ",UC_NONE +209,3,57,"ReflectivityAtLowestAltitude","ReflectivityAtLowestAltitude","dBZ",UC_NONE +209,3,58,"MergedReflectivityAtLowestAltitude","Non Quality Controlled Reflectivity At Lowest Altitude","dBZ",UC_NONE +209,4,0,"IRband4","Infrared (E/W blend)","K",UC_NONE +209,4,1,"Visible","Visible (E/W blend)","non-dim",UC_NONE +209,4,2,"WaterVapor","Water Vapor (E/W blend)","K",UC_NONE +209,4,3,"CloudCover","Cloud Cover","K",UC_NONE +209,6,0,"PrecipFlag","Surface Precipitation Type","flag",UC_NONE +209,6,1,"PrecipRate","Radar Precipitation Rate","mm/hr",UC_NONE +209,6,2,"RadarOnly_QPE_01H","Radar precipitation accumulation 1-hour","mm",UC_NONE +209,6,3,"RadarOnly_QPE_03H","Radar precipitation accumulation 3-hour","mm",UC_NONE +209,6,4,"RadarOnly_QPE_06H","Radar precipitation accumulation 6-hour","mm",UC_NONE +209,6,5,"RadarOnly_QPE_12H","Radar precipitation accumulation 12-hour","mm",UC_NONE +209,6,6,"RadarOnly_QPE_24H","Radar precipitation accumulation 24-hour","mm",UC_NONE +209,6,7,"RadarOnly_QPE_48H","Radar precipitation accumulation 48-hour","mm",UC_NONE +209,6,8,"RadarOnly_QPE_72H","Radar precipitation accumulation 72-hour","mm",UC_NONE +209,6,9,"GaugeCorrQPE01H","Local Gauge Bias Corrected Radar Precipitation Accumulation 1-hour","mm",UC_NONE +209,6,10,"GaugeCorrQPE03H","Local Gauge Bias Corrected Radar Precipitation Accumulation 3-hour","mm",UC_NONE +209,6,11,"GaugeCorrQPE06H","Local Gauge Bias Corrected Radar Precipitation Accumulation 6-hour","mm",UC_NONE +209,6,12,"GaugeCorrQPE12H","Local Gauge Bias Corrected Radar Precipitation Accumulation 12-hour","mm",UC_NONE +209,6,13,"GaugeCorrQPE24H","Local Gauge Bias Corrected Radar Precipitation Accumulation 24-hour","mm",UC_NONE +209,6,14,"GaugeCorrQPE48H","Local Gauge Bias Corrected Radar Precipitation Accumulation 48-hour","mm",UC_NONE +209,6,15,"GaugeCorrQPE72H","Local Gauge Bias Corrected Radar Precipitation Accumulation 72-hour","mm",UC_NONE +209,6,16,"GaugeOnlyQPE01H","Gauge Only Precipitation Accumulation 1-hour","mm",UC_NONE +209,6,17,"GaugeOnlyQPE03H","Gauge Only Precipitation Accumulation 3-hour","mm",UC_NONE +209,6,18,"GaugeOnlyQPE06H","Gauge Only Precipitation Accumulation 6-hour","mm",UC_NONE +209,6,19,"GaugeOnlyQPE12H","Gauge Only Precipitation Accumulation 12-hour","mm",UC_NONE +209,6,20,"GaugeOnlyQPE24H","Gauge Only Precipitation Accumulation 24-hour","mm",UC_NONE +209,6,21,"GaugeOnlyQPE48H","Gauge Only Precipitation Accumulation 48-hour","mm",UC_NONE +209,6,22,"GaugeOnlyQPE72H","Gauge Only Precipitation Accumulation 72-hour","mm",UC_NONE +209,6,23,"MountainMapperQPE01H","Mountain Mapper Precipitation Accumulation 1-hour","mm",UC_NONE +209,6,24,"MountainMapperQPE03H","Mountain Mapper Precipitation Accumulation 3-hour","mm",UC_NONE +209,6,25,"MountainMapperQPE06H","Mountain Mapper Precipitation Accumulation 6-hour","mm",UC_NONE +209,6,26,"MountainMapperQPE12H","Mountain Mapper Precipitation Accumulation 12-hour","mm",UC_NONE +209,6,27,"MountainMapperQPE24H","Mountain Mapper Precipitation Accumulation 24-hour","mm",UC_NONE +209,6,28,"MountainMapperQPE48H","Mountain Mapper Precipitation Accumulation 48-hour","mm",UC_NONE +209,6,29,"MountainMapperQPE72H","Mountain Mapper Precipitation Accumulation 72-hour","mm",UC_NONE +209,6,30,"MultiSensor_QPE_01H_Pass1","Multi-sensor accumulation 1-hour (1-hour latency)","mm",UC_NONE +209,6,31,"MultiSensor_QPE_03H_Pass1","Multi-sensor accumulation 3-hour (1-hour latency)","mm",UC_NONE +209,6,32,"MultiSensor_QPE_06H_Pass1","Multi-sensor accumulation 6-hour (1-hour latency)","mm",UC_NONE +209,6,33,"MultiSensor_QPE_12H_Pass1","Multi-sensor accumulation 12-hour (1-hour latency)","mm",UC_NONE +209,6,34,"MultiSensor_QPE_24H_Pass1","Multi-sensor accumulation 24-hour (1-hour latency)","mm",UC_NONE +209,6,35,"MultiSensor_QPE_48H_Pass1","Multi-sensor accumulation 48-hour (1-hour latency)","mm",UC_NONE +209,6,36,"MultiSensor_QPE_72H_Pass1","Multi-sensor accumulation 72-hour (1-hour latency)","mm",UC_NONE +209,6,37,"MultiSensor_QPE_01H_Pass2","Multi-sensor accumulation 1-hour (2-hour latency)","mm",UC_NONE +209,6,38,"MultiSensor_QPE_03H_Pass2","Multi-sensor accumulation 3-hour (2-hour latency)","mm",UC_NONE +209,6,39,"MultiSensor_QPE_06H_Pass2","Multi-sensor accumulation 6-hour (2-hour latency)","mm",UC_NONE +209,6,40,"MultiSensor_QPE_12H_Pass2","Multi-sensor accumulation 12-hour (2-hour latency)","mm",UC_NONE +209,6,41,"MultiSensor_QPE_24H_Pass2","Multi-sensor accumulation 24-hour (2-hour latency)","mm",UC_NONE +209,6,42,"MultiSensor_QPE_48H_Pass2","Multi-sensor accumulation 48-hour (2-hour latency)","mm",UC_NONE +209,6,43,"MultiSensor_QPE_72H_Pass2","Multi-sensor accumulation 72-hour (2-hour latency)","mm",UC_NONE +209,6,44,"SyntheticPrecipRateID","Method IDs for blended single and dual-pol derived precip rates ","flag",UC_NONE +209,6,45,"RadarOnly_QPE_15M","Radar precipitation accumulation 15-minute","mm",UC_NONE +209,7,0,"Model_SurfaceTemp","Model Surface temperature","C",UC_NONE +209,7,1,"Model_WetBulbTemp","Model Surface wet bulb temperature","C",UC_NONE +209,7,2,"WarmRainProbability","Probability of warm rain","%",UC_NONE +209,7,3,"Model_0degC_Height","Model Freezing Level Height","m MSL",UC_NONE +209,7,4,"BrightBandTopHeight","Brightband Top Height","m AGL",UC_NONE +209,7,5,"BrightBandBottomHeight","Brightband Bottom Height","m AGL",UC_NONE +209,8,0,"RadarQualityIndex","Radar Quality Index","non-dim",UC_NONE +209,8,1,"GaugeInflIndex_01H_Pass1","Gauge Influence Index for 1-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,2,"GaugeInflIndex_03H_Pass1","Gauge Influence Index for 3-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,3,"GaugeInflIndex_06H_Pass1","Gauge Influence Index for 6-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,4,"GaugeInflIndex_12H_Pass1","Gauge Influence Index for 12-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,5,"GaugeInflIndex_24H_Pass1","Gauge Influence Index for 24-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,6,"GaugeInflIndex_48H_Pass1","Gauge Influence Index for 48-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,7,"GaugeInflIndex_72H_Pass1","Gauge Influence Index for 72-hour QPE (1-hour latency)","non-dim",UC_NONE +209,8,8,"SeamlessHSR","Seamless Hybrid Scan Reflectivity with VPR correction","dBZ",UC_NONE +209,8,9,"SeamlessHSRHeight","Height of Seamless Hybrid Scan Reflectivity","km AGL",UC_NONE +209,8,10,"RadarAccumulationQualityIndex_01H","Radar 1-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,11,"RadarAccumulationQualityIndex_03H","Radar 3-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,12,"RadarAccumulationQualityIndex_06H","Radar 6-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,13,"RadarAccumulationQualityIndex_12H","Radar 12-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,14,"RadarAccumulationQualityIndex_24H","Radar 24-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,15,"RadarAccumulationQualityIndex_48H","Radar 48-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,16,"RadarAccumulationQualityIndex_72H","Radar 72-hour QPE Accumulation Quality","non-dim",UC_NONE +209,8,17,"GaugeInflIndex_01H_Pass2","Gauge Influence Index for 1-hour QPE (2-hour latency)","non-dim",UC_NONE +209,8,18,"GaugeInflIndex_03H_Pass2","Gauge Influence Index for 3-hour QPE (2-hour latency)","non-dim",UC_NONE +209,8,19,"GaugeInflIndex_06H_Pass2","Gauge Influence Index for 6-hour QPE (2-hour latency)","non-dim",UC_NONE +209,8,20,"GaugeInflIndex_12H_Pass2","Gauge Influence Index for 12-hour QPE (2-hour latency)","non-dim",UC_NONE +209,8,21,"GaugeInflIndex_24H_Pass2","Gauge Influence Index for 24-hour QPE (2-hour latency)","non-dim",UC_NONE +209,8,22,"GaugeInflIndex_48H_Pass2","Gauge Influence Index for 48-hour QPE (2-hour latency)","non-dim",UC_NONE +209,8,23,"GaugeInflIndex_72H_Pass2","Gauge Influence Index for 72-hour QPE (2-hour latency)","non-dim",UC_NONE +209,9,0,"MergedReflectivityQC","3D Reflectivty Mosaic - 33 CAPPIS (500-19000m)","dBZ",UC_NONE +209,9,1,"CONUSPlusMergedReflectivityQC","All Radar 3D Reflectivity Mosaic - 33 CAPPIS (500-19000m)","dBZ",UC_NONE +209,9,3,"MergedRhoHV,5-min","33 levels (one file per level)","-99",UC_NONE +209,9,4,"MergedZdr,5-min","33 levels (one file per level)","-99",UC_NONE +209,10,0,"MergedReflectivityQCComposite","Composite Reflectivity Mosaic (optimal method)","dBZ",UC_NONE +209,10,1,"HeightCompositeReflectivity","Height of Composite Reflectivity Mosaic (optimal method)","m MSL",UC_NONE +209,10,2,"LowLevelCompositeReflectivity","Low-Level Composite Reflectivity Mosaic (0-4km)","dBZ",UC_NONE +209,10,3,"HeightLowLevelCompositeReflectivity","Height of Low-Level Composite Reflectivity Mosaic (0-4km)","m MSL",UC_NONE +209,10,4,"LayerCompositeReflectivity_Low","Layer Composite Reflectivity Mosaic 0-24kft (low altitude)","dBZ",UC_NONE +209,10,5,"LayerCompositeReflectivity_High","Layer Composite Reflectivity Mosaic 24-60 kft (highest altitude)","dBZ",UC_NONE +209,10,6,"LayerCompositeReflectivity_Super","Layer Composite Reflectivity Mosaic 33-60 kft (super high altitude)","dBZ",UC_NONE +209,10,7,"CREF_1HR_MAX","Composite Reflectivity Hourly Maximum","dBZ",UC_NONE +209,10,8,"ReflectivityMaxAboveM10C","Maximum Reflectivity at -10 deg C height and above","dBZ",UC_NONE +209,10,9,"LayerCompositeReflectivity_ANC","Layer Composite Reflectivity Mosaic (2-4.5km) (for ANC)","dBZ",UC_NONE +209,10,10,"BREF_1HR_MAX","Base Reflectivity Hourly Maximum","dBZ",UC_NONE +209,11,0,"MergedBaseReflectivityQC","Base Reflectivity Mosaic (optimal method)","dBZ",UC_NONE +209,11,1,"MergedReflectivityComposite","Raw Composite Reflectivity Mosaic (max ref)","dBZ",UC_NONE +209,11,2,"MergedReflectivityQComposite","Composite Reflectivity Mosaic (max ref)","dBZ",UC_NONE +209,11,3,"MergedBaseReflectivity","Raw Base Reflectivity Mosaic (optimal method)","dBZ",UC_NONE +209,12,0,"FLASH_CREST_MAXUNITSTREAMFLOW","FLASH QPE-CREST Unit Streamflow","m^3/s/km^2",UC_NONE +209,12,1,"FLASH_CREST_MAXSTREAMFLOW","FLASH QPE-CREST Streamflow","m^3/s",UC_NONE +209,12,2,"FLASH_CREST_MAXSOILSAT","FLASH QPE-CREST Soil Saturation","%",UC_NONE +209,12,4,"FLASH_SAC_MAXUNITSTREAMFLOW","FLASH QPE-SAC Unit Streamflow","m^3/s/km^2",UC_NONE +209,12,5,"FLASH_SAC_MAXSTREAMFLOW","FLASH QPE-SAC Streamflow","m^3/s",UC_NONE +209,12,6,"FLASH_SAC_MAXSOILSAT","FLASH QPE-SAC Soil Saturation","%",UC_NONE +209,12,14,"FLASH_QPE_ARI30M","FLASH QPE Average Recurrence Interval 30-min","years",UC_NONE +209,12,15,"FLASH_QPE_ARI01H","FLASH QPE Average Recurrence Interval 01H","years",UC_NONE +209,12,16,"FLASH_QPE_ARI03H","FLASH QPE Average Recurrence Interval 03H","years",UC_NONE +209,12,17,"FLASH_QPE_ARI06H","FLASH QPE Average Recurrence Interval 06H","years",UC_NONE +209,12,18,"FLASH_QPE_ARI12H","FLASH QPE Average Recurrence Interval 12H","years",UC_NONE +209,12,19,"FLASH_QPE_ARI24H","FLASH QPE Average Recurrence Interval 24H","years",UC_NONE +209,12,20,"FLASH_QPE_ARIMAX","FLASH QPE Average Recurrence Interval Maximum","years",UC_NONE +209,12,26,"FLASH_QPE_FFG01H","FLASH QPE-to-FFG Ratio 01H","non-dim",UC_NONE +209,12,27,"FLASH_QPE_FFG03H","FLASH QPE-to-FFG Ratio 03H","non-dim",UC_NONE +209,12,28,"FLASH_QPE_FFG06H","FLASH QPE-to-FFG Ratio 06H","non-dim",UC_NONE +209,12,29,"FLASH_QPE_FFGMAX","FLASH QPE-to-FFG Ratio Maximum","non-dim",UC_NONE +209,12,39,"FLASH_HP_MAXUNITSTREAMFLOW","FLASH QPE-Hydrophobic Unit Streamflow","m^3/s/km^2",UC_NONE +209,12,40,"FLASH_HP_MAXSTREAMFLOW","FLASH QPE-Hydrophobic Streamflow","m^3/s",UC_NONE +209,13,0,"ANC_ConvectiveLikelihood","Likelihood of convection over the next 01H","non-dim",UC_NONE +209,13,1,"ANC_FinalForecast","01H reflectivity forecast","dBZ",UC_NONE +209,14,0,"LVL3_HREET","Level III High Resolution Enhanced Echo Top mosaic","kft",UC_NONE +209,14,1,"LVL3_HighResVIL","Level III High Resouion VIL mosaic","kg/m^2",UC_NONE diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_NCEP.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_NCEP.csv new file mode 100644 index 00000000..27a76aa2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_NCEP.csv @@ -0,0 +1,401 @@ +prod,cat,subcat,short_name,name,unit,unit_conv +0,0,192,"SNOHF","Snow Phase Change Heat Flux","W/(m^2)",UC_NONE +0,0,193,"TTRAD","Temperature tendency by all radiation","K/s",UC_NONE +0,0,194,"REV","Relative Error Variance","-",UC_NONE +0,0,195,"LRGHR","Large Scale Condensate Heating rate","K/s",UC_NONE +0,0,196,"CNVHR","Deep Convective Heating rate","K/s",UC_NONE +0,0,197,"THFLX","Total Downward Heat Flux at Surface","W/(m^2)",UC_NONE +0,0,198,"TTDIA","Temperature Tendency By All Physics","K/s",UC_NONE +0,0,199,"TTPHY","Temperature Tendency By Non-radiation Physics","K/s",UC_NONE +0,0,200,"TSD1D","Standard Dev. of IR Temp. over 1x1 deg. area","K",UC_NONE +0,0,201,"SHAHR","Shallow Cnvective Heating rate","K/s",UC_NONE +0,0,202,"VDFHR","Vertical Diffusion Heating rate","K/s",UC_NONE +0,0,203,"THZ0","Potential temperature at top of viscus sublayer","K",UC_NONE +0,0,204,"TCHP","Tropical Cyclone Heat Potential","J/(m^2*K)",UC_NONE +0,1,192,"CRAIN","Categorical Rain","0=no; 1=yes",UC_NONE +0,1,193,"CFRZR","Categorical Freezing Rain","0=no; 1=yes",UC_NONE +0,1,194,"CICEP","Categorical Ice Pellets","0=no; 1=yes",UC_NONE +0,1,195,"CSNOW","Categorical Snow","0=no; 1=yes",UC_NONE +0,1,196,"CPRAT","Convective Precipitation Rate","kg/(m^2*s)",UC_NONE +0,1,197,"MCONV","Horizontal Moisture Divergence","kg/(kg*s)",UC_NONE +0,1,198,"MINRH","Minimum Relative Humidity","%",UC_NONE +0,1,199,"PEVAP","Potential Evaporation","kg/(m^2)",UC_NONE +0,1,200,"PEVPR","Potential Evaporation Rate","W/(m^2)",UC_NONE +0,1,201,"SNOWC","Snow Cover","%",UC_NONE +0,1,202,"FRAIN","Rain Fraction of Total Liquid Water","-",UC_NONE +0,1,203,"RIME","Rime Factor","-",UC_NONE +0,1,204,"TCOLR","Total Column Integrated Rain","kg/(m^2)",UC_NONE +0,1,205,"TCOLS","Total Column Integrated Snow","kg/(m^2)",UC_NONE +0,1,206,"TIPD","Total Icing Potential Diagnostic","-",UC_NONE +0,1,207,"NCIP","Number concentration for ice particles","-",UC_NONE +0,1,208,"SNOT","Snow temperature","K",UC_NONE +0,1,209,"TCLSW","Total column-integrated supercooled liquid water","kg/(m^2)",UC_NONE +0,1,210,"TCOLM","Total column-integrated melting ice","kg/(m^2)",UC_NONE +0,1,211,"EMNP","Evaporation - Precipitation","cm/day",UC_NONE +0,1,212,"SBSNO","Sublimination (evaporation from snow)","W/(m^2)",UC_NONE +0,1,213,"CNVMR","Deep Convective Moistening Rate","kg/(kg*s)",UC_NONE +0,1,214,"SHAMR","Shallow Convective Moistening Rate","kg/(kg*s)",UC_NONE +0,1,215,"VDFMR","Vertical Diffusion Moistening Rate","kg/(kg*s)",UC_NONE +0,1,216,"CONDP","Condensation Pressure of Parcali Lifted From Indicate Surface","Pa",UC_NONE +0,1,217,"LRGMR","Large scale moistening rate","kg/(kg/s)",UC_NONE +0,1,218,"QZ0","Specific humidity at top of viscous sublayer","kg/kg",UC_NONE +0,1,219,"QMAX","Maximum specific humidity at 2m","kg/kg",UC_NONE +0,1,220,"QMIN","Minimum specific humidity at 2m","kg/kg",UC_NONE +0,1,221,"ARAIN","Liquid precipitation (rainfall)","kg/(m^2)",UC_NONE +0,1,222,"SNOWT","Snow temperature, depth-avg","K",UC_NONE +0,1,223,"APCPN","Total precipitation (nearest grid point)","kg/(m^2)",UC_NONE +0,1,224,"ACPCPN","Convective precipitation (nearest grid point)","kg/(m^2)",UC_NONE +0,1,225,"FRZR","Freezing rain","kg/(m^2)",UC_NONE +0,1,226,"Wx","Weather String","-",UC_NONE +0,1,227,"FROZR","Frozen Rain","kg/(m^2)",UC_NONE +0,1,228,"FICEAC","Flat Ice Accumulation (FRAM)","kg/(m^2)",UC_NONE +0,1,229,"LICEAC","Line Ice Accumulation (FRAM)","kg/(m^2)",UC_NONE +0,1,230,"SLACC","Sleet Accumulation","kg/(m^2)",UC_NONE +0,1,231,"PPINDX","Precipitation Potential Index","%",UC_NONE +0,1,232,"PROBCIP","Probability Cloud Ice Present","%",UC_NONE +0,1,233,"SNOWLR","Snow Liquid ratio","kg/kg",UC_NONE +0,1,241,"TSNOW","Total Snow","kg/(m^2)",UC_NONE +0,1,242,"RHPW","Relative Humidity with Respect to Precipitable Water","%",UC_NONE +0,2,192,"VWSH","Vertical speed sheer","1/s",UC_NONE +0,2,193,"MFLX","Horizontal Momentum Flux","N/(m^2)",UC_NONE +0,2,194,"USTM","U-Component Storm Motion","m/s",UC_NONE +0,2,195,"VSTM","V-Component Storm Motion","m/s",UC_NONE +0,2,196,"CD","Drag Coefficient","-",UC_NONE +0,2,197,"FRICV","Frictional Velocity","m/s",UC_NONE +0,2,198,"LAUV","Latitude of U Wind Component of Velocity","deg",UC_NONE +0,2,199,"LOUV","Longitude of U Wind Component of Velocity","deg",UC_NONE +0,2,200,"LAVV","Latitude of V Wind Component of Velocity","deg",UC_NONE +0,2,201,"LOVV","Longitude of V Wind Component of Velocity","deg",UC_NONE +0,2,202,"LAPP","Latitude of Presure Point","deg",UC_NONE +0,2,203,"LOPP","Longitude of Presure Point","deg",UC_NONE +0,2,204,"VEDH","Vertical Eddy Diffusivity Heat exchange","m^2/s",UC_NONE +0,2,205,"COVMZ","Covariance between Meridional and Zonal Components of the wind","m^2/s^2",UC_NONE +0,2,206,"COVTZ","Covariance between Temperature and Zonal Components of the wind","K*m/s",UC_NONE +0,2,207,"COVTM","Covariance between Temperature and Meridional Components of the wind","K*m/s",UC_NONE +0,2,208,"VDFUA","Vertical Diffusion Zonal Acceleration","m/s^2",UC_NONE +0,2,209,"VDFVA","Vertical Diffusion Meridional Acceleration","m/s^2",UC_NONE +0,2,210,"GWDU","Gravity wave drag zonal acceleration","m/s^2",UC_NONE +0,2,211,"GWDV","Gravity wave drag meridional acceleration","m/s^2",UC_NONE +0,2,212,"CNVU","Convective zonal momentum mixing acceleration","m/s^2",UC_NONE +0,2,213,"CNVV","Convective meridional momentum mixing acceleration","m/s^2",UC_NONE +0,2,214,"WTEND","Tendency of vertical velocity","m/s^2",UC_NONE +0,2,215,"OMGALF","Omega (Dp/Dt) divide by density","K",UC_NONE +0,2,216,"CNGWDU","Convective Gravity wave drag zonal acceleration","m/s^2",UC_NONE +0,2,217,"CNGWDV","Convective Gravity wave drag meridional acceleration","m/s^2",UC_NONE +0,2,218,"LMV","Velocity point model surface","-",UC_NONE +0,2,219,"PVMWW","Potential vorticity (mass-weighted)","1/(s/m)",UC_NONE +0,2,220,"MAXUVV","Hourly Maximum of Downward Vertical Velocity in the lowest 400hPa","m/s",UC_NONE +0,2,221,"MAXDVV","Hourly Maximum of Downward Vertical Velocity","m/s",UC_NONE +0,2,222,"MAXUW","U Component of Hourly Maximum 10m Wind Speed","m/s",UC_NONE +0,2,223,"MAXVW","V Component of Hourly Maximum 10m Wind Speed","m/s",UC_NONE +0,2,224,"VRATE","Ventilation Rate","m^2/s",UC_NONE +0,2,225,"TRWSPD","Transport Wind Speed","m/s",UC_NONE +0,2,226,"TRWDIR","Transport Wind Direction","deg",UC_NONE +0,2,227,"TOA10","Earliest Reasonable Arrival Time (10% exceedance)","s",UC_NONE +0,2,228,"TOA50","Most Likely Arrival Time (50% exceedance)","s",UC_NONE +0,2,229,"TOD50","Most Likely Departure Time (50% exceedance)","s",UC_NONE +0,2,230,"TOD90","Latest Reasonable Departure Time (90% exceedance)","s",UC_NONE +0,3,192,"MSLET","MSLP (Eta model reduction)","Pa",UC_NONE +0,3,193,"5WAVH","5-Wave Geopotential Height","gpm",UC_NONE +0,3,194,"U-GWD","Zonal Flux of Gravity Wave Stress","N/(m^2)",UC_NONE +0,3,195,"V-GWD","Meridional Flux of Gravity Wave Stress","N/(m^2)",UC_NONE +0,3,196,"HPBL","Planetary Boundary Layer Height","m",UC_NONE +0,3,197,"5WAVA","5-Wave Geopotential Height Anomaly","gpm",UC_NONE +0,3,198,"MSLMA","MSLP (MAPS System Reduction)","Pa",UC_NONE +0,3,199,"TSLSA","3-hr pressure tendency (Std. Atmos. Reduction)","Pa/s",UC_NONE +0,3,200,"PLPL","Pressure of level from which parcel was lifted","Pa",UC_NONE +0,3,201,"LPSX","X-gradiant of Log Pressure","1/m",UC_NONE +0,3,202,"LPSY","Y-gradiant of Log Pressure","1/m",UC_NONE +0,3,203,"HGTX","X-gradiant of Height","1/m",UC_NONE +0,3,204,"HGTY","Y-gradiant of Height","1/m",UC_NONE +0,3,205,"LAYTH","Layer Thickness","m",UC_NONE +0,3,206,"NLGSP","Natural Log of Surface Pressure","ln(kPa)",UC_NONE +0,3,207,"CNVUMF","Convective updraft mass flux","kg/m^2/s",UC_NONE +0,3,208,"CNVDMF","Convective downdraft mass flux","kg/m^2/s",UC_NONE +0,3,209,"CNVDEMF","Convective detrainment mass flux","kg/m^2/s",UC_NONE +0,3,210,"LMH","Mass point model surface","-",UC_NONE +0,3,211,"HGTN","Geopotential height (nearest grid point)","gpm",UC_NONE +0,3,212,"PRESN","Pressure (nearest grid point)","Pa",UC_NONE +0,3,213,"ORCONV","Orographic Convexity","",UC_NONE +0,3,214,"ORASW","Orographic Asymmetry, W Component","",UC_NONE +0,3,215,"ORASS","Orographic Asymmetry, S Component","",UC_NONE +0,3,216,"ORASSW","Orographic Asymmetry, SW Component","",UC_NONE +0,3,217,"ORASNW","Orographic Asymmetry, NW Component","",UC_NONE +0,3,218,"ORLSW","Orographic Length Scale, W Component","",UC_NONE +0,3,219,"ORLSS","Orographic Length Scale, S Component","",UC_NONE +0,3,220,"ORLSSW","Orographic Length Scale, SW Component","",UC_NONE +0,3,221,"ORLSNW","Orographic Length Scale, NW Component","",UC_NONE +0,4,192,"DSWRF","Downward Short-Wave Rad. Flux","W/(m^2)",UC_NONE +0,4,193,"USWRF","Upward Short-Wave Rad. Flux","W/(m^2)",UC_NONE +0,4,194,"DUVB","UV-B downward solar flux","W/(m^2)",UC_NONE +0,4,195,"CDUVB","Clear sky UV-B downward solar flux","W/(m^2)",UC_NONE +0,4,196,"CSDSF","Clear sky Downward Solar Flux","W/(m^2)",UC_NONE +0,4,197,"SWHR","Solar Radiative Heating Rate","K/s",UC_NONE +0,4,198,"CSUSF","Clear Sky Upward Solar Flux","W/(m^2)",UC_NONE +0,4,199,"CFNSF","Cloud Forcing Net Solar Flux","W/(m^2)",UC_NONE +0,4,200,"VBDSF","Visible Beam Downward Solar Flux","W/(m^2)",UC_NONE +0,4,201,"VDDSF","Visible Diffuse Downward Solar Flux","W/(m^2)",UC_NONE +0,4,202,"NBDSF","Near IR Beam Downward Solar Flux","W/(m^2)",UC_NONE +0,4,203,"NDDSF","Near IR Diffuse Downward Solar Flux","W/(m^2)",UC_NONE +0,4,204,"DTRF","Downward Total radiation Flux","W/(m^2)",UC_NONE +0,4,205,"UTRF","Upward Total radiation Flux","W/(m^2)",UC_NONE +0,5,192,"DLWRF","Downward Long-Wave Rad. Flux","W/(m^2)",UC_NONE +0,5,193,"ULWRF","Upward Long-Wave Rad. Flux","W/(m^2)",UC_NONE +0,5,194,"LWHR","Long-Wave Radiative Heating Rate","K/s",UC_NONE +0,5,195,"CSULF","Clear Sky Upward Long Wave Flux","W/(m^2)",UC_NONE +0,5,196,"CSDLF","Clear Sky Downward Long Wave Flux","W/(m^2)",UC_NONE +0,5,197,"CFNLF","Cloud Forcing Net Long Wave Flux","W/(m^2)",UC_NONE +0,6,192,"CDLYR","Non-Convective Cloud Cover","%",UC_NONE +0,6,193,"CWORK","Cloud Work Function","J/kg",UC_NONE +0,6,194,"CUEFI","Convective Cloud Efficiency","-",UC_NONE +0,6,195,"TCOND","Total Condensate","kg/kg",UC_NONE +0,6,196,"TCOLW","Total Column-Integrated Cloud Water","kg/(m^2)",UC_NONE +0,6,197,"TCOLI","Total Column-Integrated Cloud Ice","kg/(m^2)",UC_NONE +0,6,198,"TCOLC","Total Column-Integrated Condensate","kg/(m^2)",UC_NONE +0,6,199,"FICE","Ice fraction of total condensate","-",UC_NONE +0,6,200,"MFLUX","Convective Cloud Mass Flux","Pa/s",UC_NONE +0,6,201,"SUNSD","SunShine duration","s",UC_NONE +0,7,192,"LFTX","Surface Lifted Index","K",UC_NONE +0,7,193,"4LFTX","Best (4 layer) Lifted Index","K",UC_NONE +0,7,194,"RI","Richardson Number","-",UC_NONE +0,7,195,"CWDI","Convective Weather Detection Index","-",UC_NONE +0,7,196,"UVI","Ultra Violet Index","W/(m^2)",UC_UVIndex +0,7,197,"UPHL","Updraft Helicity","m^2/s^2",UC_NONE +0,7,198,"LAI","Leaf area index","-",UC_NONE +0,7,199,"MXUPHL","Hourly Maximum of Updraft Helicity over Layer 2km to 5 km AGL","m^2/s^2",UC_NONE +0,7,200,"MNUPHL","Hourly Minimum of Updraft Helicity","m^2/s^2",UC_NONE +0,7,201,"BNEGLAY","Bourgoiun Negative Energy Layer (surface to freezing level)","J/kg",UC_NONE +0,7,202,"BPOSELAY","Bourgoiun Positive Energy Layer (2k ft AGL to 400 hPa)","J/kg",UC_NONE +0,13,192,"PMTC","Particulate matter (coarse)","10^-6g/m^3",UC_NONE +0,13,193,"PMTF","Particulate matter (fine)","10^-6g/m^3",UC_NONE +0,13,194,"LPMTF","Particulate matter (fine)","log10(10^-6g/m^3)",UC_LOG10 +0,13,195,"LIPMF","Integrated column particulate matter (fine)","log10(10^-6g/m^3)",UC_LOG10 +0,14,192,"O3MR","Ozone Mixing Ratio","kg/kg",UC_NONE +0,14,193,"OZCON","Ozone Concentration","PPB",UC_NONE +0,14,194,"OZCAT","Categorical Ozone Concentration","-",UC_NONE +0,14,195,"VDFOZ","Ozone Vertical Diffusion","kg/kg/s",UC_NONE +0,14,196,"POZ","Ozone Production","kg/kg/s",UC_NONE +0,14,197,"TOZ","Ozone Tendency","kg/kg/s",UC_NONE +0,14,198,"POZT","Ozone Production from Temperature Term","kg/kg/s",UC_NONE +0,14,199,"POZO","Ozone Production from Column Ozone Term","kg/kg/s",UC_NONE +0,14,200,"OZMAX1","Ozone Daily Max from 1-hour Average","ppbV",UC_NONE +0,14,201,"OZMAX8","Ozone Daily Max from 8-hour Average","ppbV",UC_NONE +0,14,202,"PDMAX1","PM 2.5 Daily Max from 1-hour Average","(10^-6g/(m^3)",UC_NONE +0,14,203,"PDMAX24","PM 2.5 Daily Max from 24-hour Average","(10^-6g/(m^3)",UC_NONE +0,16,192,"REFZR","Derived radar reflectivity backscatter from rain","mm^6/m^3",UC_NONE +0,16,193,"REFZI","Derived radar reflectivity backscatter from ice","mm^6/m^3",UC_NONE +0,16,194,"REFZC","Derived radar reflectivity backscatter from parameterized convection","mm^6/m^3",UC_NONE +0,16,195,"REFD","Derived radar reflectivity","dB",UC_NONE +0,16,196,"REFC","Maximum / Composite radar reflectivity","dB",UC_NONE +0,16,197,"RETOP","Radar Echo Top (18.3 DBZ)","m",UC_NONE +0,16,198,"MAXREF","Hourly Maximum of Simulated Reflectivity at 1 km AGL","dB",UC_NONE +0,17,192,"LTNG","Lightning","-",UC_NONE +0,19,192,"MXSALB","Maximum Snow Albedo","%",UC_NONE +0,19,193,"SNFALB","Snow-Free Albedo","%",UC_NONE +0,19,194,"SRCONO","Slight risk convective outlook","categorical",UC_NONE +0,19,195,"MRCONO","Moderate risk convective outlook","categorical",UC_NONE +0,19,196,"HRCONO","High risk convective outlook","categorical",UC_NONE +0,19,197,"TORPROB","Tornado probability","%",UC_NONE +0,19,198,"HAILPROB","Hail probability","%",UC_NONE +0,19,199,"WINDPROB","Wind probability","%",UC_NONE +0,19,200,"STORPROB","Significant Tornado probability","%",UC_NONE +0,19,201,"SHAILPRO","Significant Hail probability","%",UC_NONE +0,19,202,"SWINDPRO","Significant Wind probability","%",UC_NONE +0,19,203,"TSTMC","Categorical Thunderstorm","0=no; 1=yes",UC_NONE +0,19,204,"MIXLY","Number of mixed layers next to surface","integer",UC_NONE +0,19,205,"FLGHT","Flight Category","-",UC_NONE +0,19,206,"CICEL","Confidence Ceiling","-",UC_NONE +0,19,207,"CIVIS","Confidence Visibility","-",UC_NONE +0,19,208,"CIFLT","Confidence Flight Category","-",UC_NONE +0,19,209,"LAVNI","Low Level aviation interest","-",UC_NONE +0,19,210,"HAVNI","High Level aviation interest","-",UC_NONE +0,19,211,"SBSALB","Visible; Black Sky Albedo","%",UC_NONE +0,19,212,"SWSALB","Visible; White Sky Albedo","%",UC_NONE +0,19,213,"NBSALB","Near IR; Black Sky Albedo","%",UC_NONE +0,19,214,"NWSALB","Near IR; White Sky Albedo","%",UC_NONE +0,19,215,"PRSVR","Total Probability of Severe Thunderstorms (Days 2,3)","%",UC_NONE +0,19,216,"PRSIGSVR","Total Probability of Extreme Severe Thunderstorms (Days 2,3)","%",UC_NONE +0,19,217,"SIPD","Supercooled Large Droplet Icing","0=None; 1=Light; 2=Moderate; 3=Severe; 4=Trace; 5=Heavy; 255=missing",UC_NONE +0,19,218,"EPSR","Radiative emissivity","",UC_NONE +0,19,219,"TPFI","Turbulence potential forecast index","-",UC_NONE +0,19,220,"SVRTS","Categorical Severe Thunderstorm","0=No; 1=Yes; 2-3=Reserved; 4=Low; 5=Reserved; 6=Medium; 7=Reserved; 8=High; 255=missing",UC_NONE +0,19,221,"PROCON","Probability of Convection","%",UC_NONE +0,19,222,"CONVP","Convection Potential","0=No; 1=Yes; 2-3=Reserved; 4=Low; 5=Reserved; 6=Medium; 7=Reserved; 8=High; 255=missing",UC_NONE +0,19,223,"","Reserved","-",UC_NONE +0,19,224,"","Reserved","-",UC_NONE +0,19,225,"","Reserved","-",UC_NONE +0,19,226,"","Reserved","-",UC_NONE +0,19,227,"","Reserved","-",UC_NONE +0,19,228,"","Reserved","-",UC_NONE +0,19,229,"","Reserved","-",UC_NONE +0,19,230,"","Reserved","-",UC_NONE +0,19,231,"","Reserved","-",UC_NONE +0,19,232,"VAFTD","Volcanic Ash Forecast Transport and Dispersion","log10(kg/m^3)",UC_NONE +0,19,233,"ICPRB","Icing probability","-",UC_NONE +0,19,234,"ICSEV","Icing severity","-",UC_NONE +0,19,235,"JFWPRB","Joint Fire Weather Probability","%",UC_NONE +0,19,236,"SNOWLVL","Snow Level","m",UC_NONE +0,19,237,"DRYTPROB","Dry Thunderstorm Probability","%",UC_NONE +0,191,192,"NLAT","Latitude (-90 to 90)","deg",UC_NONE +0,191,193,"ELON","East Longitude (0 to 360)","deg",UC_NONE +0,191,194,"TSEC","Seconds prior to initial reference time","s",UC_NONE +0,191,195,"MLYNO","Model Layer number (From bottom up)","",UC_NONE +0,191,196,"NLATN","Latitude (nearest neighbor) (-90 to 90)","deg",UC_NONE +0,191,197,"ELONN","East longitude (nearest neighbor) (0 to 360)","deg",UC_NONE +0,192,1,"COVZM","Covariance between zonal and meridonial components of the wind","m^2/s^2",UC_NONE +0,192,2,"COVTZ","Covariance between zonal component of the wind and temperature","K*m/s",UC_NONE +0,192,3,"COVTM","Covariance between meridonial component of the wind and temperature","K*m/s",UC_NONE +0,192,4,"COVTW","Covariance between temperature and vertical component of the wind","K*m/s",UC_NONE +0,192,5,"COVZZ","Covariance between zonal and zonal components of the wind","m^2/s^2",UC_NONE +0,192,6,"COVMM","Covariance between meridonial and meridonial components of the wind","m^2/s^2",UC_NONE +0,192,7,"COVQZ","Covariance between specific humidity and zonal components of the wind","kg/kg*m/s",UC_NONE +0,192,8,"COVQM","Covariance between specific humidity and meridonial components of the wind","kg/kg*m/s",UC_NONE +0,192,9,"COVTVV","Covariance between temperature and vertical components of the wind","K*Pa/s",UC_NONE +0,192,10,"COVQVV","Covariance between specific humidity and vertical components of the wind","kg/kg*Pa/s",UC_NONE +0,192,11,"COVPSPS","Covariance between surface pressure and surface pressure","Pa*Pa",UC_NONE +0,192,12,"COVQQ","Covariance between specific humidity and specific humidity","kg/kg*kg/kg",UC_NONE +0,192,13,"COVVVVV","Covariance between vertical and vertical components of the wind","Pa^2/s^2",UC_NONE +0,192,14,"COVTT","Covariance between temperature and temperature","K*K",UC_NONE +1,0,192,"BGRUN","Baseflow-Groundwater Runoff","kg/(m^2)",UC_NONE +1,0,193,"SSRUN","Storm Surface Runoff","kg/(m^2)",UC_NONE +1,1,192,"CPOZP","Probability of Freezing Precipitation","%",UC_NONE +1,1,193,"CPOFP","Probability of Frozen Precipitation","%",UC_NONE +1,1,194,"PPFFG","Probability of precipitation exceeding flash flood guidance values","%",UC_NONE +1,1,195,"CWR","Probability of Wetting Rain; exceeding in 0.1 inch in a given time period","%",UC_NONE +2,0,192,"SOILW","Volumetric Soil Moisture Content","Fraction",UC_NONE +2,0,193,"GFLUX","Ground Heat Flux","W/(m^2)",UC_NONE +2,0,194,"MSTAV","Moisture Availability","%",UC_NONE +2,0,195,"SFEXC","Exchange Coefficient","(kg/(m^3))(m/s)",UC_NONE +2,0,196,"CNWAT","Plant Canopy Surface Water","kg/(m^2)",UC_NONE +2,0,197,"BMIXL","Blackadar's Mixing Length Scale","m",UC_NONE +2,0,198,"VGTYP","Vegetation Type","0..13",UC_NONE +2,0,199,"CCOND","Canopy Conductance","m/s",UC_NONE +2,0,200,"RSMIN","Minimal Stomatal Resistance","s/m",UC_NONE +2,0,201,"WILT","Wilting Point","Fraction",UC_NONE +2,0,202,"RCS","Solar parameter in canopy conductance","Fraction",UC_NONE +2,0,203,"RCT","Temperature parameter in canopy conductance","Fraction",UC_NONE +2,0,204,"RCQ","Humidity parameter in canopy conductance","Fraction",UC_NONE +2,0,205,"RCSOL","Soil moisture parameter in canopy conductance","Fraction",UC_NONE +2,0,206,"RDRIP","Rate of water dropping from canopy to ground","unknown",UC_NONE +2,0,207,"ICWAT","Ice-free water surface","%",UC_NONE +2,0,208,"AKHS","Surface exchange coefficients for T and Q divided by delta z","m/s",UC_NONE +2,0,209,"AKMS","Surface exchange coefficients for U and V divided by delta z","m/s",UC_NONE +2,0,210,"VEGT","Vegetation canopy temperature","K",UC_NONE +2,0,211,"SSTOR","Surface water storage","K g/m^2",UC_NONE +2,0,212,"LSOIL","Liquid soil moisture content (non-frozen)","K g/m^2",UC_NONE +2,0,213,"EWATR","Open water evaporation (standing water)","W/m^2",UC_NONE +2,0,214,"GWREC","Groundwater recharge","kg/m^2",UC_NONE +2,0,215,"QREC","Flood plain recharge","kg/m^2",UC_NONE +2,0,216,"SFCRH","Roughness length for heat","m",UC_NONE +2,0,217,"NDVI","Normalized difference vegetation index","-",UC_NONE +2,0,218,"LANDN","Land-sea coverage (nearest neighbor)","0=sea; 1=land",UC_NONE +2,0,219,"AMIXL","Asymptotic mixing length scale","m",UC_NONE +2,0,220,"WVINC","Water vapor added by precip assimilation","kg/m^2",UC_NONE +2,0,221,"WCINC","Water condensate added by precip assimilation","kg/m^2",UC_NONE +2,0,222,"WVCONV","Water vapor flux convergence (vertical int)","kg/m^2",UC_NONE +2,0,223,"WCCONV","Water condensate flux convergence (vertical int)","kg/m^2",UC_NONE +2,0,224,"WVUFLX","Water vapor zonal flux (vertical int)","kg/m^2",UC_NONE +2,0,225,"WVVFLX","Water vapor meridional flux (vertical int)","kg/m^2",UC_NONE +2,0,226,"WCUFLX","Water condensate zonal flux (vertical int)","kg/m^2",UC_NONE +2,0,227,"WCVFLX","Water condensate meridional flux (vertical int)","kg/m^2",UC_NONE +2,0,228,"ACOND","Aerodynamic conductance","m/s",UC_NONE +2,0,229,"EVCW","Canopy water evaporation","W/(m^2)",UC_NONE +2,0,230,"TRANS","Transpiration","W/(m^2)",UC_NONE +2,1,192,"CANL","Cold Advisory for Newborn Livestock","0=none; 2=slight; 4=mild; 6=moderate; 8=severe; 10=extreme",UC_NONE +2,3,192,"SOILL","Liquid Volumetric Soil Moisture (non Frozen)","Proportion",UC_NONE +2,3,193,"RLYRS","Number of Soil Layers in Root Zone","-",UC_NONE +2,3,194,"SLTYP","Surface Slope Type","Index",UC_NONE +2,3,195,"SMREF","Transpiration Stress-onset (soil moisture)","Proportion",UC_NONE +2,3,196,"SMDRY","Direct Evaporation Cease (soil moisture)","Proportion",UC_NONE +2,3,197,"POROS","Soil Porosity","Proportion",UC_NONE +2,3,198,"EVBS","Direct evaporation from bare soil","W/m^2",UC_NONE +2,3,199,"LSPA","Land Surface Precipitation Accumulation","kg/m^2",UC_NONE +2,3,200,"BARET","Bare soil surface skin temperature","K",UC_NONE +2,3,201,"AVSFT","Average surface skin temperature","K",UC_NONE +2,3,202,"RADT","Effective radiative skin temperature","K",UC_NONE +2,3,203,"FLDCP","Field Capacity","fraction",UC_NONE +3,1,192,"USCT","Scatterometer Estimated U Wind","m/s",UC_NONE +3,1,193,"VSCT","Scatterometer Estimated V Wind","m/s",UC_NONE +3,1,194,"SWQI","Scatterometer Wind Quality","",UC_NONE +3,192,0,"SBT122","Simulated Brightness Temperature for GOES 12, Channel 2","K",UC_NONE +3,192,1,"SBT123","Simulated Brightness Temperature for GOES 12, Channel 3","K",UC_NONE +3,192,2,"SBT124","Simulated Brightness Temperature for GOES 12, Channel 4","K",UC_NONE +3,192,3,"SBT125","Simulated Brightness Temperature for GOES 12, Channel 5","K",UC_NONE +3,192,4,"SBC123","Simulated Brightness Counts for GOES 12, Channel 3","numeric",UC_NONE +3,192,5,"SBC124","Simulated Brightness Counts for GOES 12, Channel 4","numeric",UC_NONE +3,192,6,"SBT112","Simulated Brightness Temperature for GOES 11, Channel 2","K",UC_NONE +3,192,7,"SBT113","Simulated Brightness Temperature for GOES 11, Channel 3","K",UC_NONE +3,192,8,"SBT114","Simulated Brightness Temperature for GOES 11, Channel 4","K",UC_NONE +3,192,9,"SBT115","Simulated Brightness Temperature for GOES 11, Channel 5","K",UC_NONE +3,192,10,"AMSRE9","Simulated Brightness Temperature for AMSRE on Aqua, Channel 9","K",UC_NONE +3,192,11,"AMSRE10","Simulated Brightness Temperature for AMSRE on Aqua, Channel 10","K",UC_NONE +3,192,12,"AMSRE11","Simulated Brightness Temperature for AMSRE on Aqua, Channel 11","K",UC_NONE +3,192,13,"AMSRE12","Simulated Brightness Temperature for AMSRE on Aqua, Channel 12","K",UC_NONE +3,192,14,"SRFA161","Simulated Reflectance Factor for ABI GOES-16, Band-1","",UC_NONE +3,192,15,"SRFA162","Simulated Reflectance Factor for ABI GOES-16, Band-2","",UC_NONE +3,192,16,"SRFA163","Simulated Reflectance Factor for ABI GOES-16, Band-3","",UC_NONE +3,192,17,"SRFA164","Simulated Reflectance Factor for ABI GOES-16, Band-4","",UC_NONE +3,192,18,"SRFA165","Simulated Reflectance Factor for ABI GOES-16, Band-5","",UC_NONE +3,192,19,"SRFA166","Simulated Reflectance Factor for ABI GOES-16, Band-6","",UC_NONE +3,192,20,"SBTA167","Simulated Brightness Temperature for ABI GOES-16, Band-7","K",UC_NONE +3,192,21,"SBTA168","Simulated Brightness Temperature for ABI GOES-16, Band-8","K",UC_NONE +3,192,22,"SBTA169","Simulated Brightness Temperature for ABI GOES-16, Band-9","K",UC_NONE +3,192,23,"SBTA1610","Simulated Brightness Temperature for ABI GOES-16, Band-10","K",UC_NONE +3,192,24,"SBTA1611","Simulated Brightness Temperature for ABI GOES-16, Band-11","K",UC_NONE +3,192,25,"SBTA1612","Simulated Brightness Temperature for ABI GOES-16, Band-12","K",UC_NONE +3,192,26,"SBTA1613","Simulated Brightness Temperature for ABI GOES-16, Band-13","K",UC_NONE +3,192,27,"SBTA1614","Simulated Brightness Temperature for ABI GOES-16, Band-14","K",UC_NONE +3,192,28,"SBTA1615","Simulated Brightness Temperature for ABI GOES-16, Band-15","K",UC_NONE +3,192,29,"SBTA1616","Simulated Brightness Temperature for ABI GOES-16, Band-16","K",UC_NONE +3,192,30,"SRFA171","Simulated Reflectance Factor for ABI GOES-17, Band-1","",UC_NONE +3,192,31,"SRFA172","Simulated Reflectance Factor for ABI GOES-17, Band-2","",UC_NONE +3,192,32,"SRFA173","Simulated Reflectance Factor for ABI GOES-17, Band-3","",UC_NONE +3,192,33,"SRFA174","Simulated Reflectance Factor for ABI GOES-17, Band-4","",UC_NONE +3,192,34,"SRFA175","Simulated Reflectance Factor for ABI GOES-17, Band-5","",UC_NONE +3,192,35,"SRFA176","Simulated Reflectance Factor for ABI GOES-17, Band-6","",UC_NONE +3,192,36,"SBTA177","Simulated Brightness Temperature for ABI GOES-17, Band-7","K",UC_NONE +3,192,37,"SBTA178","Simulated Brightness Temperature for ABI GOES-17, Band-8","K",UC_NONE +3,192,38,"SBTA179","Simulated Brightness Temperature for ABI GOES-17, Band-9","K",UC_NONE +3,192,39,"SBTA1710","Simulated Brightness Temperature for ABI GOES-17, Band-10","K",UC_NONE +3,192,40,"SBTA1711","Simulated Brightness Temperature for ABI GOES-17, Band-11","K",UC_NONE +3,192,41,"SBTA1712","Simulated Brightness Temperature for ABI GOES-17, Band-12","K",UC_NONE +3,192,42,"SBTA1713","Simulated Brightness Temperature for ABI GOES-17, Band-13","K",UC_NONE +3,192,43,"SBTA1714","Simulated Brightness Temperature for ABI GOES-17, Band-14","K",UC_NONE +3,192,44,"SBTA1715","Simulated Brightness Temperature for ABI GOES-17, Band-15","K",UC_NONE +3,192,45,"SBTA1716","Simulated Brightness Temperature for ABI GOES-17, Band-16","K",UC_NONE +10,0,192,"WSTP","Wave Steepness","0",UC_NONE +10,0,193,"WLENG","Wave Length","0",UC_NONE +10,1,192,"OMLU","Ocean Mixed Layer U Velocity","m/s",UC_NONE +10,1,193,"OMLV","Ocean Mixed Layer V Velocity","m/s",UC_NONE +10,1,194,"UBARO","Barotropic U Velocity","m/s",UC_NONE +10,1,195,"VBARO","Barotropic V Velocity","m/s",UC_NONE +10,3,192,"SURGE","Hurricane Storm Surge","m",UC_M2Feet +10,3,193,"ETSRG","Extra Tropical Storm Surge","m",UC_M2Feet +10,3,194,"ELEV","Ocean Surface Elevation Relative to Geoid","m",UC_NONE +10,3,195,"SSHG","Sea Surface Height Relative to Geoid","m",UC_NONE +10,3,196,"P2OMLT","Ocean Mixed Layer Potential Density (Reference 2000m)","kg/(m^3)",UC_NONE +10,3,197,"AOHFLX","Net Air-Ocean Heat Flux","W/(m^2)",UC_NONE +10,3,198,"ASHFL","Assimilative Heat Flux","W/(m^2)",UC_NONE +10,3,199,"SSTT","Surface Temperature Trend","degree/day",UC_NONE +10,3,200,"SSST","Surface Salinity Trend","psu/day",UC_NONE +10,3,201,"KENG","Kinetic Energy","J/kg",UC_NONE +10,3,202,"SLTFL","Salt Flux","kg/(m^2*s)",UC_NONE +10,3,203,"LCH","Heat Exchange Coefficient","",UC_NONE +10,3,242,"TCSRG20","20% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,243,"TCSRG30","30% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,244,"TCSRG40","40% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,245,"TCSRG50","50% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,246,"TCSRG60","60% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,247,"TCSRG70","70% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,248,"TCSRG80","80% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,249,"TCSRG90","90% Tropical Cyclone Storm Surge Exceedance","m",UC_M2Feet +10,3,250,"ETCWL","Extra Tropical Storm Surge Combined Surge and Tide","m",UC_M2Feet +10,3,251,"TIDE","Tide","m",UC_M2Feet +10,3,252,"EROSNP","Erosion Occurrence Probability","%",UC_NONE +10,3,253,"OWASHP","Overwash Occurrence Probability","%",UC_NONE +10,4,192,"WTMPC","3-D Temperature","deg C",UC_NONE +10,4,193,"SALIN","3-D Salinity","",UC_NONE +10,4,194,"BKENG","Barotropic Kinetic Energy","J/kg",UC_NONE +10,4,195,"DBSS","Geometric Depth Below Sea Surface","m",UC_NONE +10,4,196,"INTFD","Interface Depths","m",UC_NONE +10,4,197,"OHC","Ocean Heat Content","J/m^2",UC_NONE diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_NDFD.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_NDFD.csv new file mode 100644 index 00000000..f59b2d0c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_NDFD.csv @@ -0,0 +1,38 @@ +prod,cat,subcat,short_name,name,unit,unit_conv +0,0,193,"ApparentT","Apparent Temperature","K",UC_K2F +0,0,205,"WBGT","Wet Bulb Globe Temperature","K",UC_K2F +0,1,192,"Wx","Weather string","-",UC_NONE +0,1,193,"PPI","Precipitation Potential Index","%",UC_NONE +0,1,227,"IceAccum","Ice Accumulation","kg/m^2",UC_InchWater +0,10,8,"PoP12","Prob of 0.01 In. of Precip","%",UC_NONE +0,13,194,"smokes","Surface level smoke from fires","log10(10^-6g/m^3)",UC_LOG10 +0,13,195,"smokec","Average vertical column smoke from fires","log10(10^-6g/m^3)",UC_LOG10 +0,14,192,"O3MR","Ozone Mixing Ratio","kg/kg",UC_NONE +0,14,193,"OZCON","Ozone Concentration","PPB",UC_NONE +0,14,200,"OZMAX1","Ozone Daily Max from 1-hour Average","ppbV",UC_NONE +0,14,201,"OZMAX8","Ozone Daily Max from 8-hour Average","ppbV",UC_NONE +0,19,194,"ConvOutlook","Convective Hazard Outlook","0=none; 2=tstm; 4=slight; 6=moderate; 8=high",UC_NONE +0,19,197,"TornadoProb","Tornado Probability","%",UC_NONE +0,19,198,"HailProb","Hail Probability","%",UC_NONE +0,19,199,"WindProb","Damaging Thunderstorm Wind Probability","%",UC_NONE +0,19,200,"XtrmTornProb","Extreme Tornado Probability","%",UC_NONE +0,19,201,"XtrmHailProb","Extreme Hail Probability","%",UC_NONE +0,19,202,"XtrmWindProb","Extreme Thunderstorm Wind Probability","%",UC_NONE +0,19,215,"TotalSvrProb","Total Probability of Severe Thunderstorms","%",UC_NONE +0,19,216,"TotalXtrmProb","Total Probability of Extreme Severe Thunderstorms","%",UC_NONE +0,19,217,"WWA","Watch Warning Advisory","-",UC_NONE +0,19,235,"TCWind","Tropical Cyclone Wind Threat","0=none; 4=low; 6=moderate; 8=high; 10=extreme",UC_NONE +0,19,236,"TCSurge","Tropical Cyclone Storm Surge Threat","0=none; 4=low; 6=moderate; 8=high; 10=extreme",UC_NONE +0,19,238,"TCRain","Tropical Cyclone Flooding Rain Threat","0=none; 4=low; 6=moderate; 8=high; 10=extreme",UC_NONE +0,19,239,"TCTornado","Tropical Cyclone Tornado Threat","0=none; 4=low; 6=moderate; 8=high; 10=extreme",UC_NONE +0,19,246,"SNOWLVL","Snow Level","m",UC_M2Feet +0,19,203,"TotalSvrProb","Total Probability of Severe Thunderstorms","%",UC_NONE +0,19,204,"TotalXtrmProb","Total Probability of Extreme Severe Thunderstorms","%",UC_NONE +0,192,192,"FireWx","Critical Fire Weather","%",UC_NONE +0,192,194,"DryLightning","Dry Lightning","%",UC_NONE +2,1,192,"CANL","Cold Advisory for Newborn Livestock","0=none; 2=slight; 4=mild; 6=moderate; 8=severe; 10=extreme",UC_NONE +10,3,192,"Surge","Hurricane Storm Surge","m",UC_M2Feet +10,3,193,"ETSurge","Extra Tropical Storm Surge","m",UC_M2Feet +10,3,250,"StormTide","Storm Surge and Tide","m",UC_M2Feet +10,3,251,"Tide","Tide","m",UC_M2Feet +0,1,198,"MinRH","Minimum Relative Humidity","%",UC_NONE diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_index.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_index.csv new file mode 100644 index 00000000..c96caa9e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_2_local_index.csv @@ -0,0 +1,7 @@ +center_code,subcenter_code,filename +7,5,grib2_table_4_2_local_HPC.csv +7,,grib2_table_4_2_local_NCEP.csv +8,0,grib2_table_4_2_local_NDFD.csv +8,65535,grib2_table_4_2_local_NDFD.csv +54,,grib2_table_4_2_local_Canada.csv +161,,grib2_table_4_2_local_MRMS.csv diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_5.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_5.csv new file mode 100644 index 00000000..a49b73cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_4_5.csv @@ -0,0 +1,261 @@ +"code","short_name","name","unit" +-4,"######################################################################################################","#","#" +-3,"DO NOT MODIFY THIS FILE. It is generated by frmts/grib/degrib/merge_degrib_and_wmo_tables.py","#","#" +-2,"from tables at version https://github.com/wmo-im/GRIB2/commit/cf3a2a24695f60f64ac9d5eb26a24b26d2a8a816","#","#" +-1,"######################################################################################################","#","#" +0,"RESERVED","Reserved","-" +1,"SFC","Ground or water surface","-" +2,"CBL","Cloud base level","-" +3,"CTL","Level of cloud tops","-" +4,"0DEG","Level of 0 degree C isotherm","-" +5,"ADCL","Level of adiabatic condensation lifted from the surface","-" +6,"MWSL","Maximum wind level","-" +7,"TRO","Tropopause","-" +8,"NTAT","Nominal top of atmosphere","-" +9,"SEAB","Sea bottom","-" +10,"EATM","Entire Atmosphere","-" +11,"CB","Cumulonimbus Base","m" +12,"CT","Cumulonimbus Top","m" +13,"unknown","Lowest level where vertically integrated cloud cover exceeds the specified percentage (cloud base for a given percentage cloud cover)","%" +14,"LFC","Level of free convection","-" +15,"CCL","Convection condensation level","-" +16,"LNB","Level of neutral buoyancy or equilibrium","-" +17,"","Departure level of the most unstable parcel of air (MUDL)","-" +18,"","Departure level of a mixed layer parcel of air with specified layer depth","Pa" +19,"","Reserved","-" +20,"TMPL","Isothermal level","K" +21,"","Lowest level where mass density exceeds the specified value (base for a given threshold of mass density)","kg m-3" +22,"","Highest level where mass density exceeds the specified value (top for a given threshold of mass density)","kg m-3" +23,"","Lowest level where air concentration exceeds the specified value (base for a given threshold of air concentration)","Bq m-3" +24,"","Highest level where air concentration exceeds the specified value (top for a given threshold of air concentration)","Bq m-3" +25,"","Highest level where radar reflectivity exceeds the specified value (echo top for a given threshold of reflectivity)","dBZ" +26,"","Convective cloud layer base","m" +27,"","Convective cloud layer top","m" +28,"","Reserved","-" +29,"","Reserved","-" +30,"","Specified radius from the centre of the Sun","m" +31,"","Solar photosphere","-" +32,"","Ionospheric D-region level","-" +33,"","Ionospheric E-region level","-" +34,"","Ionospheric F1-region level","-" +35,"","Ionospheric F2-region level","-" +36,"","Reserved","-" +37,"","Reserved","-" +38,"","Reserved","-" +39,"","Reserved","-" +40,"","Reserved","-" +41,"","Reserved","-" +42,"","Reserved","-" +43,"","Reserved","-" +44,"","Reserved","-" +45,"","Reserved","-" +46,"","Reserved","-" +47,"","Reserved","-" +48,"","Reserved","-" +49,"","Reserved","-" +50,"","Reserved","-" +51,"","Reserved","-" +52,"","Reserved","-" +53,"","Reserved","-" +54,"","Reserved","-" +55,"","Reserved","-" +56,"","Reserved","-" +57,"","Reserved","-" +58,"","Reserved","-" +59,"","Reserved","-" +60,"","Reserved","-" +61,"","Reserved","-" +62,"","Reserved","-" +63,"","Reserved","-" +64,"","Reserved","-" +65,"","Reserved","-" +66,"","Reserved","-" +67,"","Reserved","-" +68,"","Reserved","-" +69,"","Reserved","-" +70,"","Reserved","-" +71,"","Reserved","-" +72,"","Reserved","-" +73,"","Reserved","-" +74,"","Reserved","-" +75,"","Reserved","-" +76,"","Reserved","-" +77,"","Reserved","-" +78,"","Reserved","-" +79,"","Reserved","-" +80,"","Reserved","-" +81,"","Reserved","-" +82,"","Reserved","-" +83,"","Reserved","-" +84,"","Reserved","-" +85,"","Reserved","-" +86,"","Reserved","-" +87,"","Reserved","-" +88,"","Reserved","-" +89,"","Reserved","-" +90,"","Reserved","-" +91,"","Reserved","-" +92,"","Reserved","-" +93,"","Reserved","-" +94,"","Reserved","-" +95,"","Reserved","-" +96,"","Reserved","-" +97,"","Reserved","-" +98,"","Reserved","-" +99,"","Reserved","-" +100,"ISBL","Isobaric surface","Pa" +101,"MSL","Mean sea level","-" +102,"GPML","Specific altitude above mean sea level","m" +103,"HTGL","Specified height level above ground","m" +104,"SIGL","Sigma level","'sigma' value" +105,"HYBL","Hybrid level","-" +106,"DBLL","Depth below land surface","m" +107,"THEL","Isentropic (theta) level","K" +108,"SPDL","Level at specified pressure difference from ground to level","Pa" +109,"PVL","Potential vorticity surface","(K m^2)/(kg s)" +110,"RESERVED","Reserved","-" +111,"EtaL","Eta* level","-" +112,"RESERVED","Reserved","-" +113,"","Logarithmic hybrid level","-" +114,"SNOWLVL","Snow Level","m" +115,"","Sigma height level","-" +116,"","Reserved","-" +117,"unknown","Mixed layer depth","m" +118,"","Hybrid height level","-" +119,"","Hybrid pressure level","-" +120,"","Reserved","-" +121,"","Reserved","-" +122,"","Reserved","-" +123,"","Reserved","-" +124,"","Reserved","-" +125,"","Reserved","-" +126,"","Reserved","-" +127,"","Reserved","-" +128,"","Reserved","-" +129,"","Reserved","-" +130,"","Reserved","-" +131,"","Reserved","-" +132,"","Reserved","-" +133,"","Reserved","-" +134,"","Reserved","-" +135,"","Reserved","-" +136,"","Reserved","-" +137,"","Reserved","-" +138,"","Reserved","-" +139,"","Reserved","-" +140,"","Reserved","-" +141,"","Reserved","-" +142,"","Reserved","-" +143,"","Reserved","-" +144,"","Reserved","-" +145,"","Reserved","-" +146,"","Reserved","-" +147,"","Reserved","-" +148,"","Reserved","-" +149,"","Reserved","-" +150,"GVHC","Generalized Vertical Height Coordinate","-" +151,"","Soil level","Numeric" +152,"","Sea-ice level","Numeric" +153,"","Reserved","-" +154,"","Reserved","-" +155,"","Reserved","-" +156,"","Reserved","-" +157,"","Reserved","-" +158,"","Reserved","-" +159,"","Reserved","-" +160,"DBSL","Depth below sea level","m" +161,"","Depth below water surface","m" +162,"","Lake or river bottom","-" +163,"","Bottom of sediment layer","-" +164,"","Bottom of thermally active sediment layer","-" +165,"","Bottom of sediment layer penetrated by thermal wave","-" +166,"","Mixing layer","-" +167,"","Bottom of root zone","-" +168,"","Ocean model level","Numeric" +169,"","Ocean level defined by water density (sigma-theta) difference from near-surface to level","kg m-3" +170,"","Ocean level defined by water potential temperature difference from near-surface to level","K" +171,"","Ocean level defined by vertical eddy diffusivity difference from near-surface to level","m2 s-1" +172,"","Reserved","-" +173,"","Reserved","-" +174,"","Top surface of ice on sea, lake or river","-" +175,"","Top surface of ice, under snow cover, on sea, lake or river","-" +176,"","Bottom surface (underside) ice on sea, lake or river","-" +177,"","Deep soil (of indefinite depth)","-" +178,"","Reserved","-" +179,"","Top surface of glacier ice and inland ice","-" +180,"","Deep inland or glacier ice (of indefinite depth)","-" +181,"","Grid tile land fraction as a model surface","-" +182,"","Grid tile water fraction as a model surface","-" +183,"","Grid tile ice fraction on sea, lake or river as a model surface","-" +184,"","Grid tile glacier ice and inland ice fraction as a model surface","-" +185,"","Reserved","-" +186,"","Reserved","-" +187,"","Reserved","-" +188,"","Reserved","-" +189,"","Reserved","-" +190,"","Reserved","-" +191,"","Reserved","-" +192,"RESERVED","Reserved Local use","-" +193,"","Reserved for local use","-" +194,"","Reserved for local use","-" +195,"","Reserved for local use","-" +196,"","Reserved for local use","-" +197,"","Reserved for local use","-" +198,"","Reserved for local use","-" +199,"","Reserved for local use","-" +200,"EATM","Entire atmosphere (considered as a single layer)","-" +201,"EOCN","Entire ocean (considered as a single layer)","-" +202,"","Reserved for local use","-" +203,"","Reserved for local use","-" +204,"HTFL","Highest tropospheric freezing level","-" +205,"","Reserved for local use","-" +206,"GCBL","Grid scale cloud bottom level","-" +207,"GCTL","Grid scale cloud top level","-" +208,"","Reserved for local use","-" +209,"BCBL","Boundary layer cloud bottom level","-" +210,"BCTL","Boundary layer cloud top level","-" +211,"BCY","Boundary layer cloud level","-" +212,"LCBL","Low cloud bottom level","-" +213,"LCTL","Low cloud top level","-" +214,"LCY","Low cloud level","-" +215,"CEIL","Cloud ceiling","-" +216,"","Reserved for local use","-" +217,"","Reserved for local use","-" +218,"","Reserved for local use","-" +219,"","Reserved for local use","-" +220,"","Reserved for local use","-" +221,"","Reserved for local use","-" +222,"MCBL","Middle cloud bottom level","-" +223,"MCTL","Middle cloud top level","-" +224,"MCY","Middle cloud level","-" +225,"","Reserved for local use","-" +226,"","Reserved for local use","-" +227,"","Reserved for local use","-" +228,"","Reserved for local use","-" +229,"","Reserved for local use","-" +230,"","Reserved for local use","-" +231,"","Reserved for local use","-" +232,"HCBL","High cloud bottom level","-" +233,"HCTL","High cloud top level","-" +234,"HCY","High cloud level","-" +235,"OITL","Ocean Isotherm Level (1/10 deg C)","-" +236,"OLYR","Layer between two depths below ocean surface","-" +237,"OBML","Bottom of Ocean Mixed Layer (m)","-" +238,"OBIL","Bottom of Ocean Isothermal Layer (m)","-" +239,"","Reserved for local use","-" +240,"","Reserved for local use","-" +241,"","Reserved for local use","-" +242,"CCBL","Convective cloud bottom level","-" +243,"CCTL","Convective cloud top level","-" +244,"CCY","Convective cloud level","-" +245,"LLTW","Lowest level of the wet bulb zero","-" +246,"MTHE","Maximum equivalent potential temperature level","-" +247,"EHLT","Equilibrium level","-" +248,"SCBL","Shallow convective cloud bottom level","-" +249,"SCTL","Shallow convective cloud top level","-" +250,"","Reserved for local use","-" +251,"DCBL","Deep convective cloud bottom level","-" +252,"DCTL","Deep convective cloud top level","-" +253,"LBLSW","Lowest bottom level of supercooled liquid water layer","-" +254,"HTLSW","Highest top level of supercooled liquid water layer","-" +255,"MISSING","Missing","-" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_versions.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_versions.csv new file mode 100644 index 00000000..18ab667f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/grib2_table_versions.csv @@ -0,0 +1,3 @@ +component,version +wmo,v30 +degrib,2.25 diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/header.dxf b/.venv/lib/python3.12/site-packages/fiona/gdal_data/header.dxf new file mode 100644 index 00000000..3cf13f49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/header.dxf @@ -0,0 +1,1124 @@ + 0 +SECTION + 2 +HEADER + 9 +$ACADVER + 1 +AC1018 + 9 +$ACADMAINTVER + 70 + 0 + 9 +$DWGCODEPAGE + 3 +ANSI_1252 + 9 +$EXTMIN + 10 +30.0 + 20 +49.75 + 30 +0.0 + 9 +$EXTMAX + 10 +130.5 + 20 +163.1318914119703 + 30 +0.0 + 9 +$LIMMIN + 10 +0.0 + 20 +0.0 + 9 +$LIMMAX + 10 +12.0 + 20 +9.0 + 9 +$ORTHOMODE + 70 + 0 + 9 +$REGENMODE + 70 + 1 + 9 +$FILLMODE + 70 + 1 + 9 +$QTEXTMODE + 70 + 0 + 9 +$MIRRTEXT + 70 + 1 + 9 +$LTSCALE + 40 +1.0 + 9 +$ATTMODE + 70 + 1 + 9 +$TEXTSIZE + 40 +0.2 + 9 +$TRACEWID + 40 +0.05 + 9 +$TEXTSTYLE + 7 +Standard + 9 +$CLAYER + 8 +0 + 9 +$CELTYPE + 6 +ByLayer + 9 +$CECOLOR + 62 + 256 + 9 +$CELTSCALE + 40 +1.0 + 9 +$DISPSILH + 70 + 0 + 9 +$LUNITS + 70 + 2 + 9 +$LUPREC + 70 + 4 + 9 +$SKETCHINC + 40 +0.1 + 9 +$FILLETRAD + 40 +0.5 + 9 +$AUNITS + 70 + 0 + 9 +$AUPREC + 70 + 0 + 9 +$MENU + 1 +. + 9 +$ELEVATION + 40 +0.0 + 9 +$PELEVATION + 40 +0.0 + 9 +$THICKNESS + 40 +0.0 + 9 +$LIMCHECK + 70 + 0 + 9 +$CHAMFERA + 40 +0.5 + 9 +$CHAMFERB + 40 +0.5 + 9 +$CHAMFERC + 40 +1.0 + 9 +$CHAMFERD + 40 +0.0 + 9 +$SKPOLY + 70 + 0 + 9 +$ANGBASE + 50 +0.0 + 9 +$ANGDIR + 70 + 0 + 9 +$PDMODE + 70 + 0 + 9 +$PDSIZE + 40 +0.0 + 9 +$PLINEWID + 40 +0.0 + 9 +$SPLFRAME + 70 + 0 + 9 +$SPLINETYPE + 70 + 6 + 9 +$SPLINESEGS + 70 + 8 + 9 +$HANDSEED + 5 +44 + 9 +$SURFTAB1 + 70 + 6 + 9 +$SURFTAB2 + 70 + 6 + 9 +$SURFTYPE + 70 + 6 + 9 +$SURFU + 70 + 6 + 9 +$SURFV + 70 + 6 + 9 +$UCSBASE + 2 + + 9 +$UCSNAME + 2 + + 9 +$UCSORG + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSXDIR + 10 +1.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSYDIR + 10 +0.0 + 20 +1.0 + 30 +0.0 + 9 +$UCSORTHOREF + 2 + + 9 +$UCSORTHOVIEW + 70 + 0 + 9 +$UCSORGTOP + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSORGBOTTOM + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSORGLEFT + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSORGRIGHT + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSORGFRONT + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$UCSORGBACK + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSBASE + 2 + + 9 +$PUCSNAME + 2 + + 9 +$PUCSORG + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSXDIR + 10 +1.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSYDIR + 10 +0.0 + 20 +1.0 + 30 +0.0 + 9 +$PUCSORTHOREF + 2 + + 9 +$PUCSORTHOVIEW + 70 + 0 + 9 +$PUCSORGTOP + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSORGBOTTOM + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSORGLEFT + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSORGRIGHT + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSORGFRONT + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PUCSORGBACK + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$WORLDVIEW + 70 + 1 + 9 +$SHADEDGE + 70 + 3 + 9 +$SHADEDIF + 70 + 70 + 9 +$TILEMODE + 70 + 1 + 9 +$MAXACTVP + 70 + 64 + 9 +$PINSBASE + 10 +0.0 + 20 +0.0 + 30 +0.0 + 9 +$PLIMCHECK + 70 + 0 + 9 +$PEXTMIN + 10 +1.000000000000000E+20 + 20 +1.000000000000000E+20 + 30 +1.000000000000000E+20 + 9 +$PEXTMAX + 10 +-1.000000000000000E+20 + 20 +-1.000000000000000E+20 + 30 +-1.000000000000000E+20 + 9 +$PLIMMIN + 10 +0.0 + 20 +0.0 + 9 +$PLIMMAX + 10 +12.0 + 20 +9.0 + 9 +$UNITMODE + 70 + 0 + 9 +$VISRETAIN + 70 + 1 + 9 +$PLINEGEN + 70 + 0 + 9 +$PSLTSCALE + 70 + 1 + 9 +$TREEDEPTH + 70 + 3020 + 9 +$CMLSTYLE + 2 +Standard + 9 +$CMLJUST + 70 + 0 + 9 +$CMLSCALE + 40 +1.0 + 9 +$PROXYGRAPHICS + 70 + 1 + 9 +$MEASUREMENT + 70 + 0 + 9 +$CELWEIGHT +370 + -1 + 9 +$ENDCAPS +280 + 0 + 9 +$JOINSTYLE +280 + 0 + 9 +$LWDISPLAY +290 + 0 + 9 +$INSUNITS + 70 + 1 + 9 +$HYPERLINKBASE + 1 + + 9 +$STYLESHEET + 1 + + 9 +$XEDIT +290 + 1 + 9 +$CEPSNTYPE +380 + 0 + 9 +$PSTYLEMODE +290 + 1 + 9 +$EXTNAMES +290 + 1 + 9 +$PSVPSCALE + 40 +0.0 + 9 +$OLESTARTUP +290 + 0 + 9 +$SORTENTS +280 + 127 + 9 +$INDEXCTL +280 + 0 + 9 +$HIDETEXT +280 + 1 + 9 +$XCLIPFRAME +290 + 0 + 9 +$HALOGAP +280 + 0 + 9 +$OBSCOLOR + 70 + 257 + 9 +$OBSLTYPE +280 + 0 + 9 +$INTERSECTIONDISPLAY +280 + 0 + 9 +$INTERSECTIONCOLOR + 70 + 257 + 9 +$DIMASSOC +280 + 2 + 9 +$PROJECTNAME + 1 + + 0 +ENDSEC + 0 +SECTION + 2 +CLASSES + 0 +CLASS + 1 +ACDBDICTIONARYWDFLT + 2 +AcDbDictionaryWithDefault + 3 +ObjectDBX Classes + 90 + 0 + 91 + 4 +280 + 0 +281 + 0 + 0 +ENDSEC + 0 +SECTION + 2 +TABLES + 0 +TABLE + 2 +VPORT + 5 +8 +330 +0 +100 +AcDbSymbolTable + 70 + 1 + 0 +VPORT + 5 +29 +330 +8 +100 +AcDbSymbolTableRecord +100 +AcDbViewportTableRecord + 2 +*Active + 70 + 0 + 10 +0.0 + 20 +0.0 + 11 +1.0 + 21 +1.0 + 12 +80.25 + 22 +106.4409457059851 + 13 +0.0 + 23 +0.0 + 14 +0.5 + 24 +0.5 + 15 +0.5 + 25 +0.5 + 16 +0.0 + 26 +0.0 + 36 +1.0 + 17 +0.0 + 27 +0.0 + 37 +0.0 + 40 +113.3818914119703 + 41 +0.8863849310366128 + 42 +50.0 + 43 +0.0 + 44 +0.0 + 50 +0.0 + 51 +0.0 + 71 + 0 + 72 + 1000 + 73 + 1 + 74 + 3 + 75 + 0 + 76 + 0 + 77 + 0 + 78 + 0 +281 + 0 + 65 + 1 +110 +0.0 +120 +0.0 +130 +0.0 +111 +1.0 +121 +0.0 +131 +0.0 +112 +0.0 +122 +1.0 +132 +0.0 + 79 + 0 +146 +0.0 + 0 +ENDTAB + 0 +TABLE + 2 +LTYPE + 5 +5 +330 +0 +100 +AcDbSymbolTable + 70 + 1 + 0 +LTYPE + 5 +14 +330 +5 +100 +AcDbSymbolTableRecord +100 +AcDbLinetypeTableRecord + 2 +ByBlock + 70 + 0 + 3 + + 72 + 65 + 73 + 0 + 40 +0.0 + 0 +LTYPE + 5 +15 +330 +5 +100 +AcDbSymbolTableRecord +100 +AcDbLinetypeTableRecord + 2 +ByLayer + 70 + 0 + 3 + + 72 + 65 + 73 + 0 + 40 +0.0 + 0 +LTYPE + 5 +16 +330 +5 +100 +AcDbSymbolTableRecord +100 +AcDbLinetypeTableRecord + 2 +Continuous + 70 + 0 + 3 +Solid line + 72 + 65 + 73 + 0 + 40 +0.0 + 0 +ENDTAB + 0 +TABLE + 2 +LAYER + 5 +2 +330 +0 +100 +AcDbSymbolTable + 70 + 1 + 0 +LAYER + 5 +10 +330 +2 +100 +AcDbSymbolTableRecord +100 +AcDbLayerTableRecord + 2 +0 + 70 + 0 + 62 + 7 + 6 +Continuous +370 + -3 +390 +F + 0 +ENDTAB + 0 +TABLE + 2 +STYLE + 5 +3 +330 +0 +100 +AcDbSymbolTable + 70 + 1 + 0 +STYLE + 5 +11 +330 +3 +100 +AcDbSymbolTableRecord +100 +AcDbTextStyleTableRecord + 2 +Standard + 70 + 0 + 40 +0.0 + 41 +1.0 + 50 +0.0 + 71 + 0 + 42 +0.2 + 3 +txt + 4 + + 0 +ENDTAB + 0 +TABLE + 2 +VIEW + 5 +6 +330 +0 +100 +AcDbSymbolTable + 70 + 0 + 0 +ENDTAB + 0 +TABLE + 2 +UCS + 5 +7 +330 +0 +100 +AcDbSymbolTable + 70 + 0 + 0 +ENDTAB + 0 +TABLE + 2 +APPID + 5 +9 +330 +0 +100 +AcDbSymbolTable + 70 + 1 + 0 +APPID + 5 +12 +330 +9 +100 +AcDbSymbolTableRecord +100 +AcDbRegAppTableRecord + 2 +ACAD + 70 + 0 + 0 +ENDTAB + 0 +TABLE + 2 +DIMSTYLE + 5 +A +330 +0 +100 +AcDbSymbolTable + 70 + 1 +100 +AcDbDimStyleTable + 0 +DIMSTYLE +105 +27 +330 +A +100 +AcDbSymbolTableRecord +100 +AcDbDimStyleTableRecord + 2 +Standard + 70 + 0 +340 +11 + 0 +ENDTAB + 0 +TABLE + 2 +BLOCK_RECORD + 5 +1 +330 +0 +100 +AcDbSymbolTable + 70 + 1 + 0 +BLOCK_RECORD + 5 +1F +330 +1 +100 +AcDbSymbolTableRecord +100 +AcDbBlockTableRecord + 2 +*Model_Space +340 +22 + 0 +BLOCK_RECORD + 5 +1B +330 +1 +100 +AcDbSymbolTableRecord +100 +AcDbBlockTableRecord + 2 +*Paper_Space +340 +1E + 0 +ENDTAB + 0 +ENDSEC + 0 +SECTION + 2 +BLOCKS + 0 +BLOCK + 5 +20 +330 +1F +100 +AcDbEntity + 8 +0 +100 +AcDbBlockBegin + 2 +*Model_Space + 70 + 0 + 10 +0.0 + 20 +0.0 + 30 +0.0 + 3 +*Model_Space + 1 + + 0 +ENDBLK + 5 +21 +330 +1F +100 +AcDbEntity + 8 +0 +100 +AcDbBlockEnd + 0 +BLOCK + 5 +1C +330 +1B +100 +AcDbEntity + 67 + 1 + 8 +0 +100 +AcDbBlockBegin + 2 +*Paper_Space + 70 + 0 + 10 +0.0 + 20 +0.0 + 30 +0.0 + 3 +*Paper_Space + 1 + + 0 +ENDBLK + 5 +1D +330 +1B +100 +AcDbEntity + 67 + 1 + 8 +0 +100 +AcDbBlockEnd + 0 +ENDSEC + 0 +SECTION + 2 +ENTITIES diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_BasicPropertyUnit.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_BasicPropertyUnit.gfs new file mode 100644 index 00000000..43e0f474 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_BasicPropertyUnit.gfs @@ -0,0 +1,57 @@ + + + BasicPropertyUnit + BasicPropertyUnit + None + + inspireId_localId + inspireId|Identifier|localId + String + + + inspireId_namespace + inspireId|Identifier|namespace + String + + + nationalCadastralReference + nationalCadastralReference + String + + + areaValue + areaValue + Real + + + areaValue_uom + areaValue@uom + String + + + validFrom + validFrom + String + + + validTo + validTo + String + + + beginLifespanVersion + beginLifespanVersion + String + + + endLifespanVersion + endLifespanVersion + String + + + administrativeUnit_href + administrativeUnit@href + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralBoundary.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralBoundary.gfs new file mode 100644 index 00000000..6b271308 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralBoundary.gfs @@ -0,0 +1,60 @@ + + + CadastralBoundary + CadastralBoundary + + geometry + LineString + + + beginLifespanVersion + beginLifespanVersion + String + + + endLifespanVersion + endLifespanVersion + String + + + + + estimatedAccuracy + estimatedAccuracy + Real + + + estimatedAccuracy_uom + estimatedAccuracy@uom + String + + + + inspireId_localId + inspireId|Identifier|localId + String + + + inspireId_namespace + inspireId|Identifier|namespace + String + + + + validFrom + validFrom + String + + + validTo + validTo + String + + + + parcel_href + parcel@href + StringList + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralParcel.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralParcel.gfs new file mode 100644 index 00000000..129b0e2c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralParcel.gfs @@ -0,0 +1,81 @@ + + + CadastralParcel + CadastralParcel + + geometry + geometry + MultiPolygon + + + referencePoint + referencePoint + Point + + + areaValue + areaValue + Real + + + areaValue_uom + areaValue@uom + String + + + beginLifespanVersion + beginLifespanVersion + String + + + endLifespanVersion + endLifespanVersion + String + + + inspireId_localId + inspireId|Identifier|localId + String + + + inspireId_namespace + inspireId|Identifier|namespace + String + + + label + label + String + + + nationalCadastralReference + nationalCadastralReference + String + + + validFrom + validFrom + String + + + validTo + validTo + String + + + basicPropertyUnit_href + basicPropertyUnit@href + StringList + + + administrativeUnit_href + administrativeUnit@href + String + + + zoning_href + zoning@href + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralZoning.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralZoning.gfs new file mode 100644 index 00000000..e564dff6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/inspire_cp_CadastralZoning.gfs @@ -0,0 +1,161 @@ + + + CadastralZoning + CadastralZoning + + + geometry + geometry + MultiPolygon + + + referencePoint + referencePoint + Point + + + + beginLifespanVersion + beginLifespanVersion + String + + + endLifespanVersion + endLifespanVersion + String + + + + estimatedAccuracy + estimatedAccuracy + Real + + + estimatedAccuracy_uom + estimatedAccuracy@uom + String + + + + inspireId_localId + inspireId|Identifier|localId + String + + + inspireId_namespace + inspireId|Identifier|namespace + String + + + + label + label + String + + + + level + level + String + + + + levelName + levelName|LocalisedCharacterString + StringList + + + levelName_locale + levelName|LocalisedCharacterString@locale + StringList + + + + + + name_language + name|GeographicalName|language + StringList + + + name_nativeness + name|GeographicalName|nativeness + StringList + + + name_nameStatus + name|GeographicalName|nameStatus + StringList + + + name_pronunciation + name|GeographicalName|pronunciation + StringList + + + name_spelling_text + name|GeographicalName|spelling|SpellingOfName|text + StringList + + + name_spelling_script + name|GeographicalName|spelling|SpellingOfName|script + StringList + + + + nationalCadastalZoningReference + nationalCadastalZoningReference + String + + + + originalMapScaleDenominator + originalMapScaleDenominator + Integer + + + + validFrom + validFrom + String + + + validTo + validTo + String + + + + upperLevelUnit_href + upperLevelUnit@href + String + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmArea.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmArea.gfs new file mode 100644 index 00000000..1b066551 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmArea.gfs @@ -0,0 +1,59 @@ + + + AdmArea + AdmArea + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + admCode + admCode + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmBdry.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmBdry.gfs new file mode 100644 index 00000000..0214a6f6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmBdry.gfs @@ -0,0 +1,49 @@ + + + AdmBdry + AdmBdry + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmPt.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmPt.gfs new file mode 100644 index 00000000..5f80403a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_AdmPt.gfs @@ -0,0 +1,59 @@ + + + AdmPt + AdmPt + pos + Point + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + admCode + admCode + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_BldA.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_BldA.gfs new file mode 100644 index 00000000..007b1a33 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_BldA.gfs @@ -0,0 +1,54 @@ + + + BldA + BldA + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_BldL.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_BldL.gfs new file mode 100644 index 00000000..f456be87 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_BldL.gfs @@ -0,0 +1,54 @@ + + + BldL + BldL + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_Cntr.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_Cntr.gfs new file mode 100644 index 00000000..2f5a40e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_Cntr.gfs @@ -0,0 +1,54 @@ + + + Cntr + Cntr + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + alti + alti + Real + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_CommBdry.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_CommBdry.gfs new file mode 100644 index 00000000..f17b98ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_CommBdry.gfs @@ -0,0 +1,49 @@ + + + CommBdry + CommBdry + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_CommPt.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_CommPt.gfs new file mode 100644 index 00000000..e6666b1c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_CommPt.gfs @@ -0,0 +1,59 @@ + + + CommPt + CommPt + pos + Point + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + admCode + admCode + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_Cstline.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_Cstline.gfs new file mode 100644 index 00000000..7dbe45ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_Cstline.gfs @@ -0,0 +1,54 @@ + + + Cstline + Cstline + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_ElevPt.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_ElevPt.gfs new file mode 100644 index 00000000..e86ba792 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_ElevPt.gfs @@ -0,0 +1,54 @@ + + + ElevPt + ElevPt + pos + Point + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + alti + alti + Real + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_GCP.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_GCP.gfs new file mode 100644 index 00000000..b7810cf1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_GCP.gfs @@ -0,0 +1,94 @@ + + + GCP + GCP + pos + Point + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + advNo + advNo + String + + + orgName + orgName + String + + + type + type + String + + + gcpClass + gcpClass + String + + + gcpCode + gcpCode + String + + + name + name + String + + + B + B + Real + + + L + L + Real + + + alti + alti + Real + + + altiAcc + altiAcc + Integer + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_LeveeEdge.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_LeveeEdge.gfs new file mode 100644 index 00000000..c13e3963 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_LeveeEdge.gfs @@ -0,0 +1,49 @@ + + + LeveeEdge + LeveeEdge + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RailCL.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RailCL.gfs new file mode 100644 index 00000000..70921813 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RailCL.gfs @@ -0,0 +1,54 @@ + + + RailCL + RailCL + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdASL.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdASL.gfs new file mode 100644 index 00000000..9c4e1ac9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdASL.gfs @@ -0,0 +1,44 @@ + + + RdASL + RdASL + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdArea.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdArea.gfs new file mode 100644 index 00000000..7729d2db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdArea.gfs @@ -0,0 +1,54 @@ + + + RdArea + RdArea + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + name + name + String + + + admOffice + admOffice + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdCompt.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdCompt.gfs new file mode 100644 index 00000000..0675e934 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdCompt.gfs @@ -0,0 +1,59 @@ + + + RdCompt + RdCompt + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + admOffice + admOffice + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdEdg.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdEdg.gfs new file mode 100644 index 00000000..de9fe09a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdEdg.gfs @@ -0,0 +1,59 @@ + + + RdEdg + RdEdg + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + admOffice + admOffice + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdMgtBdry.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdMgtBdry.gfs new file mode 100644 index 00000000..24e2ab3e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdMgtBdry.gfs @@ -0,0 +1,49 @@ + + + RdMgtBdry + RdMgtBdry + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdSgmtA.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdSgmtA.gfs new file mode 100644 index 00000000..f63b17d6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RdSgmtA.gfs @@ -0,0 +1,59 @@ + + + RdSgmtA + RdSgmtA + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + admOffice + admOffice + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RvrMgtBdry.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RvrMgtBdry.gfs new file mode 100644 index 00000000..26361312 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_RvrMgtBdry.gfs @@ -0,0 +1,49 @@ + + + RvrMgtBdry + RvrMgtBdry + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBAPt.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBAPt.gfs new file mode 100644 index 00000000..438b7ded --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBAPt.gfs @@ -0,0 +1,49 @@ + + + SBAPt + SBAPt + pos + Point + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + sbaNo + sbaNo + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBArea.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBArea.gfs new file mode 100644 index 00000000..9fc59d82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBArea.gfs @@ -0,0 +1,54 @@ + + + SBArea + SBArea + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + sbaNo + sbaNo + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBBdry.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBBdry.gfs new file mode 100644 index 00000000..b2915837 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_SBBdry.gfs @@ -0,0 +1,44 @@ + + + SBBdry + SBBdry + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WA.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WA.gfs new file mode 100644 index 00000000..7eff1d67 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WA.gfs @@ -0,0 +1,54 @@ + + + WA + WA + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WL.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WL.gfs new file mode 100644 index 00000000..7d20ea2c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WL.gfs @@ -0,0 +1,54 @@ + + + WL + WL + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WStrA.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WStrA.gfs new file mode 100644 index 00000000..02652ca8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WStrA.gfs @@ -0,0 +1,54 @@ + + + WStrA + WStrA + area + Polygon + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WStrL.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WStrL.gfs new file mode 100644 index 00000000..8341102c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/jpfgdgml_WStrL.gfs @@ -0,0 +1,54 @@ + + + WStrL + WStrL + loc + LineString + urn:ogc:def:crs:EPSG::6668 + + fid + fid + String + + + lfSpanFr + lfSpanFr|timePosition + String + + + lfSpanTo + lfSpanTo|timePosition + String + + + devDate + devDate|timePosition + String + + + orgGILvl + orgGILvl + String + + + orgMDId + orgMDId + String + + + vis + vis + String + + + type + type + String + + + name + name + String + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ogrinfo_output.schema.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ogrinfo_output.schema.json new file mode 100644 index 00000000..af23826d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ogrinfo_output.schema.json @@ -0,0 +1,528 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "Schema for ogrinfo -json output", + + "oneOf": [ + { + "$ref": "#/definitions/dataset" + } + ], + + "definitions": { + + "dataset": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "driverShortName": { + "type": "string" + }, + "driverLongName": { + "type": "string" + }, + "layers": { + "type": "array", + "items": { + "$ref": "#/definitions/layer" + } + }, + "metadata": { + "$ref": "#/definitions/metadata" + }, + "domains": { + "$ref": "#/definitions/domains" + }, + "relationships": { + "$ref": "#/definitions/relationships" + }, + "rootGroup": { + "$ref": "#/definitions/group" + } + }, + "required": [ + "layers", + "metadata", + "domains" + ], + "additionalProperties": false + }, + + "layer": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "metadata": { + "$ref": "#/definitions/metadata" + }, + "fidColumnName": { + "type": "string" + }, + "featureCount": { + "type": "number" + }, + "features": { + "type": "array", + "items": { + "$ref": "https://geojson.org/schema/Feature.json" + } + }, + "fields": { + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "geometryFields": { + "type": "array", + "items": { + "$ref": "#/definitions/geometryField" + } + } + }, + "required": [ + "name", + "metadata", + "geometryFields", + "fields" + ], + "additionalProperties": false + }, + + "metadata": { + "type": "object", + "$comment": "Object whose keys are metadata domain names. The empty string is a valid metadata domain name, and is used for the default domain.", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/metadataDomain" + } + } + }, + + "metadataDomain": { + "$comment": " The values of a metadadomain are key: string pairs, or arbitrary JSON objects for metadata domain names starting with the \"json:\" prefix.", + "any": [ + { + "type": "object" + }, + { + "type": "#/definitions/keyValueDict" + } + ] + }, + + "field": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/fieldType" + }, + "subType": { + "$ref": "#/definitions/fieldSubType" + }, + "width": { + "type": "integer" + }, + "precision": { + "type": "integer" + }, + "nullable": { + "type": "boolean" + }, + "uniqueConstraint": { + "type": "boolean" + }, + "defaultValue": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "domainName": { + "type": "string" + }, + "comment": { + "type": "string" + }, + "timezone": { + "type": "string", + "pattern": "^(localtime|(mixed timezones)|UTC|((\\+|-)[0-9][0-9]:[0-9][0-9]))$" + } + }, + "required": [ + "name", + "type", + "nullable", + "uniqueConstraint" + ], + "additionalProperties": false + }, + + "fieldType": { + "enum": [ + "Integer", + "Integer64", + "Real", + "String", + "Binary", + "IntegerList", + "Integer64List", + "RealList", + "StringList", + "Date", + "Time", + "DateTime" + ] + }, + + "fieldSubType": { + "enum": [ + "None", + "Boolean", + "Int16", + "Float32", + "JSON", + "UUID" + ] + }, + + "geometryField": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "pattern": "^(Geometry|((Multi)?Point)|((Multi)?LineString)|((Multi)?Polygon)|GeometryCollection|((Multi)?Curve)|((Multi)?Surface)|CircularString|CompoundCurve|CurvePolygon|Tin|PolyhedralSurface|Triangle)[Z]?[M]?$" + }, + "nullable": { + "type": "boolean" + }, + "extent": { + "type": "array", + "items": { + "type": "number", + "minItems": 4, + "maxItems": 4 + } + }, + "extent3D": { + "type": "array", + "items": { + "type": [ + "null", + "number" + ], + "minItems": 6, + "maxItems": 6 + } + }, + "coordinateSystem": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/definitions/coordinateSystem" + } + ] + }, + "supportedSRSList": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "object", + "properties": { + "id": { + "type": "object", + "properties": { + "authority": { + "type": "string" + }, + "code": { + "type": "string" + } + } + } + }, + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "wkt": { + "type": "string" + } + }, + "additionalProperties": false + } + ] + } + }, + "xyCoordinateResolution": { + "type": "number" + }, + "zCoordinateResolution": { + "type": "number" + }, + "mCoordinateResolution": { + "type": "number" + }, + "coordinatePrecisionFormatSpecificOptions": { + "type": "object" + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + }, + + "coordinateSystem": { + "type": "object", + "properties": { + "wkt": { + "type": "string" + }, + "projjson": { + "$ref": "https://proj.org/schemas/v0.5/projjson.schema.json" + }, + "dataAxisToSRSAxisMapping": { + "type": "array", + "items": { + "type": "number", + "minItems": 2, + "maxItems": 3 + } + }, + "coordinateEpoch": { + "type": "number" + } + }, + "required": [ + "wkt", + "dataAxisToSRSAxisMapping" + ], + "additionalProperties": false + }, + + "domains": { + "type": "object", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/domain" + } + }, + "additionalProperties": false + }, + + "keyValueDict": { + "type": "object", + "patternProperties": { + "^.*$": {} + } + }, + + "domain": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "type": { + "enum": [ + "coded", + "range", + "glob" + ] + }, + "fieldType": { + "$ref": "#/definitions/fieldType" + }, + "fieldSubType": { + "$ref": "#/definitions/fieldSubType" + }, + "glob": { + "$comment": "only present when type=coded", + "type": "string" + }, + "codedValues": { + "$comment": "only present when type=coded", + "$ref": "#/definitions/keyValueDict" + }, + "splitPolicy": { + "enum": [ + "default value", + "duplicate", + "geometry ratio" + ] + }, + "mergePolicy": { + "enum": [ + "default value", + "sum", + "geometry weighted" + ] + }, + "minValue": { + "$comment": "only present when type=range", + "any": [ + { + "type": "string " + }, + { + "type": "number" + } + ] + }, + "minValueIncluded": { + "$comment": "only present when type=range", + "type": "boolean" + }, + "maxValue": { + "$comment": "only present when type=range", + "any": [ + { + "type": "string " + }, + { + "type": "number" + } + ] + }, + "maxValueIncluded": { + "$comment": "only present when type=range", + "type": "boolean" + } + }, + "required": [ + "type", + "fieldType", + "splitPolicy", + "mergePolicy" + ], + "additionalProperties": false + }, + + "group": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "layerNames": { + "type": "array", + "items": { + "type": "string" + } + }, + "groups": { + "type": "array", + "items": { + "$ref": "#/definitions/group" + } + } + }, + "required": [ + "layerNames", + "groups" + ], + "additionalProperties": false + }, + + "relationships": { + "type": "object", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/relationship" + } + }, + "additionalProperties": false + }, + + "relationship": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "Composite", + "Association", + "Aggregation" + ] + }, + "related_table_type": { + "type": "string" + }, + "cardinality": { + "type": "string", + "enum": [ + "OneToOne", + "OneToMany", + "ManyToOne", + "ManyToMany" + ] + }, + "left_table_name": { + "type": "string" + }, + "right_table_name": { + "type": "string" + }, + "left_table_fields": { + "type": "array", + "items": { + "type": "string" + } + }, + "right_table_fields": { + "type": "array", + "items": { + "type": "string" + } + }, + "mapping_table_name": { + "type": "string" + }, + "left_mapping_table_fields": { + "type": "array", + "items": { + "type": "string" + } + }, + "right_mapping_table_fields": { + "type": "array", + "items": { + "type": "string" + } + }, + "forward_path_label": { + "type": "string" + }, + "backward_path_label": { + "type": "string" + } + }, + "required": [ + "type", + "related_table_type", + "cardinality", + "left_table_name", + "right_table_name", + "left_table_fields", + "right_table_fields", + "forward_path_label", + "backward_path_label" + ], + "additionalProperties": false + } + } +} diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ogrvrt.xsd b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ogrvrt.xsd new file mode 100644 index 00000000..d7458319 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ogrvrt.xsd @@ -0,0 +1,546 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Required element + + + + + Optional element + + + + + SrcLayer or(eclusive) SrcSQL are required elements + + + + + + + + + + + + + + + + Use GeometryField.GeometryType for multi-geometry field support. + + + + + Use GeometryField.SRS for multi-geometry field support. + + + + + May be repeated + + + + + May be repeated + + + + + Use GeometryField.SrcRegion for multi-geometry field support. + + + + + Default to FALSE. + + + + + + Use GeometryField.ExtentXMin, etc... for multi-geometry field support. + + + + + + + + + + + + + + + + + + + + + + + + + + + User-facing name of the FID column. + + + + + + + + + + + + + + + + + + + + + Default to FALSE. + + + + + + Default to FALSE. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defaults to the value of "name" if not specified. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defaults to Direct. + + + + + Name of the geometry field + + + + + Only used if encoding = "PointFromColumns" + + + + + Only used if encoding = "PointFromColumns" + + + + + Only used if encoding = "PointFromColumns" + + + + + Only used if encoding = "PointFromColumns" + + + + + Only used if encoding = "PointFromColumns". Defaults to TRUE. + + + + + Only used if no Field element is found at the OGRVRTLayer level + + + + + + + + + + + + + + + + + + + + + + + + + + Used if encoding = "WKT", "WKB" or "Shape" to find + the attribute field of the source layer. + Used also in multiple geometry fields scenario to retrieve the + source geometry field matching the target VRT geometry field. + + + + + + + + + + + + Defaults to FALSE. + + + + + + + + + A valid WKT for a POLYGON + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + May be repeated + + + + + May be repeated + + + + + May be repeated + + + + + + Use GeometryField.GeometryType for multi-geometry field support. + + + + + Use GeometryField.SRS for multi-geometry field support. + + + + + Defaults to Union if no Field or GeometryField element is specified. + + + + + May be repeated + + + + + May be repeated + + + + + Defaults to FALSE. + + + + + Name of fields in which to place the name of the source layer of each feature. + + + + + + Use GeometryField.ExtentXMin, etc. for multi-geometry field support. + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/osmconf.ini b/.venv/lib/python3.12/site-packages/fiona/gdal_data/osmconf.ini new file mode 100644 index 00000000..b34c6831 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/osmconf.ini @@ -0,0 +1,132 @@ +# +# Configuration file for OSM import +# + +# put here the name of keys, or key=value, for ways that are assumed to be polygons if they are closed +# see http://wiki.openstreetmap.org/wiki/Map_Features +closed_ways_are_polygons=aeroway,amenity,boundary,building,craft,geological,historic,landuse,leisure,military,natural,office,place,shop,sport,tourism,highway=platform,public_transport=platform + +# Uncomment to avoid laundering of keys ( ':' turned into '_' ) +#attribute_name_laundering=no + +# Some tags, set on ways and when building multipolygons, multilinestrings or other_relations, +# are normally filtered out early, independent of the 'ignore' configuration below. +# Uncomment to disable early filtering. The 'ignore' lines below remain active. +#report_all_tags=yes + +# uncomment to report all nodes, including the ones without any (significant) tag +#report_all_nodes=yes + +# uncomment to report all ways, including the ones without any (significant) tag +#report_all_ways=yes + +# uncomment to specify the the format for the all_tags/other_tags field should be JSON +# instead of the default HSTORE formatting. +# Valid values for tags_format are "hstore" and "json" +#tags_format=json + +[points] +# common attributes +osm_id=yes +osm_version=no +osm_timestamp=no +osm_uid=no +osm_user=no +osm_changeset=no + +# keys to report as OGR fields +attributes=name,barrier,highway,ref,address,is_in,place,man_made +# keys that, alone, are not significant enough to report a node as a OGR point +unsignificant=created_by,converted_by,source,time,ele,attribution +# keys that should NOT be reported in the "other_tags" field +ignore=created_by,converted_by,source,time,ele,note,todo,openGeoDB:,fixme,FIXME +# uncomment to avoid creation of "other_tags" field +#other_tags=no +# uncomment to create "all_tags" field. "all_tags" and "other_tags" are exclusive +#all_tags=yes + +[lines] +# common attributes +osm_id=yes +osm_version=no +osm_timestamp=no +osm_uid=no +osm_user=no +osm_changeset=no + +# keys to report as OGR fields +attributes=name,highway,waterway,aerialway,barrier,man_made,railway + +# type of attribute 'foo' can be changed with something like +#foo_type=Integer/Real/String/DateTime + +# keys that should NOT be reported in the "other_tags" field +ignore=created_by,converted_by,source,time,ele,note,todo,openGeoDB:,fixme,FIXME +# uncomment to avoid creation of "other_tags" field +#other_tags=no +# uncomment to create "all_tags" field. "all_tags" and "other_tags" are exclusive +#all_tags=yes + +#computed_attributes must appear before the keywords _type and _sql +computed_attributes=z_order +z_order_type=Integer +# Formula based on https://github.com/openstreetmap/osm2pgsql/blob/master/style.lua#L13 +# [foo] is substituted by value of tag foo. When substitution is not wished, the [ character can be escaped with \[ in literals +# Note for GDAL developers: if we change the below formula, make sure to edit ogrosmlayer.cpp since it has a hardcoded optimization for this very precise formula +z_order_sql="SELECT (CASE [highway] WHEN 'minor' THEN 3 WHEN 'road' THEN 3 WHEN 'unclassified' THEN 3 WHEN 'residential' THEN 3 WHEN 'tertiary_link' THEN 4 WHEN 'tertiary' THEN 4 WHEN 'secondary_link' THEN 6 WHEN 'secondary' THEN 6 WHEN 'primary_link' THEN 7 WHEN 'primary' THEN 7 WHEN 'trunk_link' THEN 8 WHEN 'trunk' THEN 8 WHEN 'motorway_link' THEN 9 WHEN 'motorway' THEN 9 ELSE 0 END) + (CASE WHEN [bridge] IN ('yes', 'true', '1') THEN 10 ELSE 0 END) + (CASE WHEN [tunnel] IN ('yes', 'true', '1') THEN -10 ELSE 0 END) + (CASE WHEN [railway] IS NOT NULL THEN 5 ELSE 0 END) + (CASE WHEN [layer] IS NOT NULL THEN 10 * CAST([layer] AS INTEGER) ELSE 0 END)" + +[multipolygons] +# common attributes +# note: for multipolygons, osm_id=yes instantiates a osm_id field for the id of relations +# and a osm_way_id field for the id of closed ways. Both fields are exclusively set. +osm_id=yes +osm_version=no +osm_timestamp=no +osm_uid=no +osm_user=no +osm_changeset=no + +# keys to report as OGR fields +attributes=name,type,aeroway,amenity,admin_level,barrier,boundary,building,craft,geological,historic,land_area,landuse,leisure,man_made,military,natural,office,place,shop,sport,tourism +# keys that should NOT be reported in the "other_tags" field +ignore=area,created_by,converted_by,source,time,ele,note,todo,openGeoDB:,fixme,FIXME +# uncomment to avoid creation of "other_tags" field +#other_tags=no +# uncomment to create "all_tags" field. "all_tags" and "other_tags" are exclusive +#all_tags=yes + +[multilinestrings] +# common attributes +osm_id=yes +osm_version=no +osm_timestamp=no +osm_uid=no +osm_user=no +osm_changeset=no + +# keys to report as OGR fields +attributes=name,type +# keys that should NOT be reported in the "other_tags" field +ignore=area,created_by,converted_by,source,time,ele,note,todo,openGeoDB:,fixme,FIXME +# uncomment to avoid creation of "other_tags" field +#other_tags=no +# uncomment to create "all_tags" field. "all_tags" and "other_tags" are exclusive +#all_tags=yes + +[other_relations] +# common attributes +osm_id=yes +osm_version=no +osm_timestamp=no +osm_uid=no +osm_user=no +osm_changeset=no + +# keys to report as OGR fields +attributes=name,type +# keys that should NOT be reported in the "other_tags" field +ignore=area,created_by,converted_by,source,time,ele,note,todo,openGeoDB:,fixme,FIXME +# uncomment to avoid creation of "other_tags" field +#other_tags=no +# uncomment to create "all_tags" field. "all_tags" and "other_tags" are exclusive +#all_tags=yes diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ozi_datum.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ozi_datum.csv new file mode 100644 index 00000000..13676d16 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ozi_datum.csv @@ -0,0 +1,131 @@ +NAME,EPSG_DATUM_CODE,ELLIPSOID_CODE,DELTAX,DELTAY,DELTAZ +# +# Note : We have permission from Des Newman on behalf of OziExplorer to use this list. +# See : http://trac.osgeo.org/gdal/ticket/3929#comment:2 +# Note 2: EPSG_DATUM_CODE is used in priority to retrieve ellipsoid and datum shift values +# from the CSV files imported from EPSG database, that are more up-to-date. It +# overrides the values found in this file and from ozi_ellips.csv. See #3929 for more details. +# +Adindan,4201,5,-162,-12,206 # Africa - Eritrea, Ethiopia and Sudan +Afgooye,4205,15,-43,-163,45 # Somalia +Ain el Abd 1970,4204,14,-150,-251,-2 # Asia - Middle East - Bahrain, Kuwait and Saudi Arabia +Anna 1 Astro 1965,4708,2,-491,-22,435 # Cocos (Keeling) Islands +Arc 1950,4209,5,-143,-90,-294 # Africa - Botswana, Malawi, Zambia, Zimbabwe +Arc 1960,4210,5,-160,-8,-300 # Africa - Kenya, Tanzania and Uganda +Ascension Island 1958,4712,14,-207,107,52 # St Helena - Ascension Island +Astro B4 Sorol Atoll,4707,14,114,-116,-333 # USA - Hawaii - Tern Island and Sorel Atoll +Astro Beacon 1945,4709,14,145,75,-272 # Japan - Iwo Jima +Astro DOS 71/4,4710,14,-320,550,-494 # St Helena - St Helena Island +Astronomic Stn 1952,4711,14,124,-234,-25 # Japan - Minamitori-shima (Marcus Island) +Australian Geodetic 1966,4202,2,-133,-48,148 # Australasia - Australia and PNG - AGD66 +Australian Geodetic 1984,4203,2,-134,-48,149 # Australia - AGD84 +Australian Geocentric 1994 (GDA94),4283,11,0,0,0 # Australia - GDA94 +Austrian,4312,3,594,84,471 # MGI - Europe, Austria and former Yugoslavia +Bellevue (IGN),4714,14,-127,-769,472 # Vanuatu - southern islands +Bermuda 1957,4216,4,-73,213,296 # Bermuda +Bogota Observatory,4218,14,307,304,-318 # Colombia +Campo Inchauspe,4221,14,-148,136,90 # Argentina +Canton Astro 1966,4716,14,298,-304,-375 # Kiribati - Phoenix Islands +Cape,4222,5,-136,-108,-292 # Africa - Botswana and South Africa +Cape Canaveral,4717,4,-2,150,181 # North America - Bahamas and USA - Florida +Carthage,4223,5,-263,6,431 # Tunisia +CH-1903,4149,3,674,15,405 # Europe - Liechtenstein and Switzerland +Chatham 1971,4672,14,175,-38,113 # New Zealand - Chatham Islands +Chua Astro,4224,14,-134,229,-29 # South America - Brazil ; N Paraguay +Corrego Alegre,4225,14,-206,172,-6 # Brazil - Corrego Alegre +Djakarta (Batavia),4211,3,-377,681,-50 # Indonesia - Java +DOS 1968,,14,230,-199,-752 # Solomon Islands - Gizo Island : EPSG:4718 + EPSG:15805 (gcs.csv uses EPSG:15807) +Easter Island 1967,4719,14,211,147,111 # Chile - Easter Island +Egypt,,14,-130,-117,-151 # Egypt - EPSG code is 4199, but transformation parameters are missing in gcs.csv +European 1950,4230,14,-87,-98,-121 # Europe +European 1950 (Mean France),,14,-87,-96,-120 # Europe -France +European 1950 (Spain and Portugal),,14,-84,-107,-120 # Europe - Spain and Portugal +European 1979,4668,14,-86,-98,-119 # Europe - west +Finland Hayford,4123,14,-78,-231,-97 # Finland (KKJ) +Gandajika Base,4233,14,-133,-321,50 # Maldives +Geodetic Datum 1949,4272,14,84,-22,209 # New Zealand (NZGD49) +GGRS 87,4121,11,-199.87,74.79,246.62 # Greece +Guam 1963,4675,4,-100,-248,259 # Guam +GUX 1 Astro,4718,14,252,-209,-751 # Solomon Islands - Guadalcanal Island +Hartebeeshoek94,4148,20,0,0,0 # South Africa +Hermannskogel,3906,3,653,-212,449 # Boznia and Herzegovina; Croatia; FYR Macedonia; Montenegro; Serbia; Slovenia (MGI 1901) +Hjorsey 1955,4658,14,-73,46,-86 # Iceland +Hong Kong 1963,4739,14,-156,-271,-189 # China - Hong Kong +Hu-Tzu-Shan,4236,14,-634,-549,-201 # Taiwan +Indian Bangladesh,4682,6,289,734,257 # Bangladesh (Gulshan 303) +Indian Thailand,4240,6,214,836,303 # Thailand +Israeli,4281,23,-235,-85,264 # Asia - Middle East - Israel, Jordan and Palestine Territory (Palestine 1923) +Ireland 1965,4299,1,506,-122,611 # Europe - Ireland (Republic and Ulster) +ISTS 073 Astro 1969,4724,14,208,-435,-229 # British Indian Ocean Territory - Diego Garcia +Johnston Island,4725,14,191,-77,-204 # Johnston Island +Kandawala,4244,6,-97,787,86 # Sri Lanka +Kerguelen Island,4698,14,145,-187,103 # French Southern Territories - Kerguelen +Kertau 1948,4245,7,-11,851,5 # Asia - Malaysia (west) and Singapore +L.C. 5 Astro,4726,4,42,124,147 # Cayman Islands - Little Cayman and Cayman Brac +Liberia 1964,4251,5,-90,40,88 # Liberia +Luzon Mindanao,,4,-133,-79,-72 # Philippines - Mindanao (EPSG:4253 + EPSG:1162 Coordinate Transformation) +Luzon Philippines,4253,4,-133,-77,-51 # Philippines - excluding Mindanao +Mahe 1971,4256,5,41,-220,-134 # Seychelles +Marco Astro,4616,14,-289,-124,60 # Portugal - Selvagens islands (Madeira) +Massawa,4262,3,639,405,60 # Eritrea +Merchich,4261,5,31,146,47 # Morocco +Midway Astro 1961,4727,14,912,-58,1227 # Midway Islands - Sand and Eastern Islands +Minna,4263,5,-92,-93,122 # Nigeria +NAD27 Alaska,,4,-5,135,172 # Alaska (EPSG:4269 + EPSG:1176 Coordinate Transformation) +NAD27 Bahamas,,4,-4,154,178 # Bahamas (EPSG:4269 + EPSG:1177 Coordinate Transformation) +NAD27 Canada,,4,-10,158,187 # Canada (EPSG:4269 + EPSG:1172 Coordinate Transformation) +NAD27 Canal Zone,,4,0,125,201 # Panama (EPSG:4269 + EPSG:1184 Coordinate Transformation) +NAD27 Caribbean,,4,-7,152,178 # Caribbean +NAD27 Central,,4,0,125,194 # Central America (EPSG:4269 + EPSG:1171 Coordinate Transformation) +NAD27 CONUS,,4,-8,160,176 # Continental US (EPSG:4269 + EPSG:1173 Coordinate Transformation) +NAD27 Cuba,,4,-9,152,178 # Cuba (EPSG:4269 + EPSG:1185 Coordinate Transformation) +NAD27 Greenland,,4,11,114,195 # Greenland - Hayes Peninsula (EPSG:4269 + EPSG:1186 Coordinate Transformation) +NAD27 Mexico,,4,-12,130,190 # Mexico (EPSG:4269 + EPSG:1187 Coordinate Transformation) +NAD27 San Salvador,,4,1,140,165 # San Salvador (EPSG:4269 + EPSG:1178 Coordinate Transformation) +NAD83,4269,11,0,0,0 # North America +Nahrwn Masirah Ilnd,,5,-247,-148,369 # Oman - Masirah Island (EPSG:4270 + EPSG:1189) +Nahrwn Saudi Arbia,,5,-231,-196,482 # Saudi Arabia (EPSG:4270 + EPSG:1190) +Nahrwn United Arab,,5,-249,-156,381 # United Arab Emirates (UAE) (EPSG:4270 + EPSG:1191) +Naparima BWI,4271,14,-2,374,172 # Trinidad and Tobago - Tobago +NGO1948,4273,27,315,-217,528 # Norway +NTF France,4275,24,-168,-60,320 # France +Norsk,4817,27,278,93,474 # Norway (NGO 1948) +NZGD1949,4272,14,84,-22,209 # New Zealand +NZGD2000,4167,20,0,0,0 # New Zealand +Observatorio 1966,4182,14,-425,-169,81 # Portugal - western Azores +Old Egyptian,4229,12,-130,110,-13 # Egypt (1907) +Old Hawaiian,4135,4,61,-285,-181 # USA - Hawaii +Oman,4232,5,-346,-1,224 # Oman +Ord Srvy Grt Britn,4277,0,375,-111,431 # UK - Great Britain; Isle of Man +Pico De Las Nieves,4728,14,-307,-92,127 # Spain - Canary Islands +Pitcairn Astro 1967,4729,14,185,165,42 # Pitcairn Island +Potsdam Rauenberg DHDN,4314,3,606,23,413 # Germany +Prov So Amrican 1956,4248,14,-288,175,-376 # South America - PSAD56 +Prov So Chilean 1963,4254,14,16,196,93 # South America - Tierra del Fuego +Puerto Rico,4139,4,11,72,-101 # Caribbean - Puerto Rico and the Virgin Islands +Pulkovo 1942 (1),4284,15,28,-130,-95 # Europe - FSU +Pulkovo 1942 (2),4284,15,28,-130,-95 # Europe - FSU +Qatar National,4285,14,-128,-283,22 # Qatar +Qornoq,4287,14,164,138,-189 # Greenland +Reunion,4626,14,94,-948,-1262 # France - Reunion Island +Rijksdriehoeksmeting,4289,3,593,26,478 # Netherlands +Rome 1940,4806,14,-225,-65,9 # Italy - including San Marino and Vatican +RT 90,4124,3,498,-36,568 # Sweden +S42,4179,15,28,-121,-77 # Europe - eastern - S-42 +Santo (DOS),4730,14,170,42,84 # Vanuatu - northern islands +Sao Braz,4184,14,-203,141,53 # Portugal - eastern Azores +Sapper Hill 1943,4292,14,-355,16,74 # Falkland Islands +Schwarzeck,4293,21,616,97,-251 # Namibia +South American 1969,4291,16,-57,1,-41 # South America - SAD69 +South Asia,,8,7,-10,-26 # Singapore (unknown EPSG code) +Southeast Base,4615,14,-499,-249,314 # Porto Santo and Madeira Islands +Southwest Base,4183,14,-104,167,-38 # Faial, Graciosa, Pico, Sao Jorge and Terceira +Timbalai 1948,4298,6,-689,691,-46 # Asia - Brunei and East Malaysia +Tokyo,4301,3,-128,481,664 # Asia - Japan and Korea +Tristan Astro 1968,4734,14,-632,438,-609 # St Helena - Tristan da Cunha +Viti Levu 1916,4731,5,51,391,-36 # Fiji - Viti Levu +Wake-Eniwetok 1960,4732,13,101,52,-39 # Marshall Islands - Eniwetok, Kwajalein and Wake islands +WGS 72,4322,19,0,0,5 # World +WGS 84,4326,20,0,0,0 # World +Yacare,4309,14,-155,171,37 # Uruguay +Zanderij,4311,14,-265,120,-358 # Suriname diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ozi_ellips.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ozi_ellips.csv new file mode 100644 index 00000000..071e39e5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ozi_ellips.csv @@ -0,0 +1,35 @@ +ELLIPSOID_CODE,NAME,A,INVF +# +# Note : We have permission from Des Newman on behalf of OziExplorer to use this list. +# See : http://trac.osgeo.org/gdal/ticket/3929#comment:2 +# +0,Airy 1830,6377563.396,299.3249646 +1,Modified Airy,6377340.189,299.3249646 +2,Australian National,6378160.0,298.25 +3,Bessel 1841,6377397.155,299.1528128 +4,Clarke 1866,6378206.4,294.9786982 +5,Clarke 1880,6378249.145,293.465 +6,Everest (India 1830),6377276.345,300.8017 +7,Everest (1948),6377304.063,300.8017 +8,Modified Fischer 1960,6378155.0,298.3 +9,Everest (Pakistan),6377309.613,300.8017 +10,Indonesian 1974,6378160.0,298.247 +11,GRS 80,6378137.0,298.257222101 +12,Helmert 1906,6378200.0,298.3 +13,Hough 1960,6378270.0,297.0 +14,International 1924,6378388.0,297.0 +15,Krassovsky 1940,6378245.0,298.3 +16,South American 1969,6378160.0,298.25 +17,Everest (Malaysia 1969),6377295.664,300.8017 +18,Everest (Sabah Sarawak),6377298.556,300.8017 +19,WGS 72,6378135.0,298.26 +20,WGS 84,6378137.0,298.257223563 +21,Bessel 1841 (Namibia),6377483.865,299.1528128 +22,Everest (India 1956),6377301.243,300.8017 +23,Clarke 1880 Palestine,6378300.789,293.466 +24,Clarke 1880 IGN,6378249.2,293.466021 +25,Hayford 1909,6378388.0,296.959263 +26,Clarke 1858,6378350.87,294.26 +27,Bessel 1841 (Norway),6377492.0176,299.1528 +28,Plessis 1817 (France),6376523.0,308.6409971 +29,Hayford 1924,6378388.0,297.0 diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/pci_datum.txt b/.venv/lib/python3.12/site-packages/fiona/gdal_data/pci_datum.txt new file mode 100644 index 00000000..68507ea0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/pci_datum.txt @@ -0,0 +1,530 @@ +! +! From https://github.com/OSGeo/gdal/issues/8034, June 30, 2023 +! +! I, Michael Goldberg, on behalf of PCI Geomatics agree to allow the ellips.txt +! and datum.txt file to be distributed under the GDAL open source license. +! +! Michael Goldberg +! Development Manager +! PCI Geomatics +! +! +! NOTE: The range of "D950" to "D998" is set aside for +! the use of local customer development. +! +! And the range of "D-90" to "D-98" is set aside for +! the use of local customer development. +! +!For datums using a grid shift file entries are: +!DatumNumber,DatumName,EllipsoidNumber,Location,GridShiftTo,GridShiftFile,GridShiftFile +!If GridShiftTo is negative the shift is reversed +!For datums not using a grid shift file converting to WGS84 using coordinate frame rotation +! (EPSG:9607 which is opposite rotation to EPSG TOWGS84) entries are: +!DatumNumber,DatumName,EllipsoidNumber,XOffset,YOffset,ZOffset,Location,XSigma,YSigma,ZSigma,Doppler,XRotate,YRotate,ZRotate,Scale +"DoD World Geodetic System 1984, DMA TR 8350.2" +"4 JUL 1997, Third Printing, Includes 3 JAN 2000 Updates" +"D-01","NAD27 (USA, NADCON)","E000","Conterminous U.S.","D122","conus.los","conus.las" +"D-02","NAD83 (Deprecated - use D122)","E008",0,0,0,"Conterminous U.S.",2,2,2,354 +"D-03","NAD27 (Canada, NTv1)","E000","Canada","D122","grid.dac" +"D-04","NAD83 (Deprecated - use D122)","E008",0,0,0,"Canada",2,2,2,354 +"D-07","NAD27 (USA, NADCON)","E000","Alaska","D122","alaska.los","alaska.las" +"D-08","NAD83 (Deprecated - use D122)","E008",0,0,0,"Alaska",2,2,2,354 +"D-09","NAD27 (USA, NADCON)","E000","St. George","D122","stgeorge.los","stgeorge.las" +"D-10","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. George",2,2,2,354 +"D-11","NAD27 (USA, NADCON)","E000","St. Lawrence","D122","stlrnc.los","stlrnc.las" +"D-12","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. Lawrence",2,2,2,354 +"D-13","NAD27 (USA, NADCON)","E000","St. Paul","D122","stpaul.los","stpaul.las" +"D-14","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. Paul",2,2,2,354 +"D-15","Old Hawaiian (USA, NADCON)","E000","Hawaii","D122","hawaii.los","hawaii.las" +"D-16","NAD83 (Deprecated - use D122)","E008",0,0,0,"Hawaii",2,2,2,354 +"D-17","NAD27 (USA, NADCON)","E000","Puerto Rico Virgin Islands","D122","prvi.los","prvi.las" +"D-18","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico Virgin Islands",2,2,2,354 +"D-21","GDA94 (from AGD66, NTv2)","E008","Australia","D029","A66_National_13_09_01_.gsb" +"D-22","GDA94 (from AGD84, NTv2)","E008","Australia","D030","National_84_02.07.01.gsb" +"D-24","NZGD2000 (NTv2)","E008","New Zealand","D510","nzgd2kgrid0005.gsb" +"D-25","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia","D536","GDA94_GDA2020_conformal.gsb" +"D-26","GDA2020 (conformal and distortion, from GDA94, NTv2)","E008","Australia","D536","GDA94_GDA2020_conformal_and_distortion.gsb" +"D-27","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia (Christmas Island)","D536","GDA94_GDA2020_conformal_christmas_island.gsb" +"D-28","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia (Cocos Islands)","D536","GDA94_GDA2020_conformal_cocos_island.gsb" +"D-55","NAD83 (CSRS 2002) (NTv2)","E008","British Columbia","D122","BC_93_05.gsb" +"D-56","NAD27 (NTv2)","E000","British Columbia","-D-55","BC_27_05.gsb" +"D-57","NAD83 (CSRS) (NTv2)","E008","BC (CRD)","D122","CRD93_00.gsb" +"D-58","NAD27 (NTv2)","E000","BC (CRD)","-D-57","CRD27_00.gsb" +"D-59","NAD83 (CSRS) (NTv2)","E008","BC (Vancouver Island)","D122","NVI93_05.gsb" +"D-62","NAD27 (NTv2)","E000","Ontario (Toronto)","-D-65","TO27CSv1.gsb" +"D-63","NAD27 (NTv2)","E000","Ontario","-D-65","ON27CSv1.gsb" +"D-64","NAD27 (1976) (NTv2)","E000","Ontario","-D-65","ON76CSv1.gsb" +"D-65","NAD83 (CSRS98) (NTv2)","E008","Ontario","D122","ON83CSv1.gsb" +"D-67","NAD83 (SCRS) (NTv2)","E008","Quebec","D-68","na27scrs.gsb" +"D-68","NAD27 (NTv2)","E000","Quebec","-D122","na27na83.gsb" +"D-71","NAD83 (SCRS) (NTv2)","E008","Quebec","D-72","cq77scrs.gsb" +"D-72","NAD27 (CGQ77) (NTv2)","E000","Quebec","D122","cq77na83.gsb" +"D-75","NAD83 (SCRS) (NTv2)","E008","Quebec","D122","na83scrs.gsb" +"D-76","NAD27 (NTv2)","E000","Saskatchewan","-D-79","sk27-98.gsb" +"D-77","NAD27 (NTv2)","E000","Saskatchewan","-D122","sk27-83.gsb" +"D-79","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","-D122","sk83-98.gsb" +"D-81","NAD83 (CSRS98) (NTv2)","E008","Nova Scotia","D895","ns778301.gsb" +"D-82","ATS77 (NTv2)","E910","Nova Scotia","-D122","GS7783.GSB" +"D-83","NAD83 (CSRS98) (NTv2)","E008","Prince Edward Island","D895","pe7783v2.gsb" +"D-84","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","D122","nb2783v2.gsb" +"D-85","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","D895","nb7783v2.gsb" +"D-86","NAD27 (NTv2)","E000","Canada","-D122","ntv2_0.gsb" +"D-87","NAD83 (CSRS98) (NTv2)","E008","Alberta","D122","ABCSRSV4.DAC" +"D-88","NAD27 (1976) (NTv2)","E000","Ontario","D122","may76v20.gsb" +"D800","Normal Sphere","E019",0,0,0,"",0,0,0,0 +"D000","WGS 1984","E012",0,0,0,"Global Definition",0,0,0,0 +"D001","WGS 1972","E005",0,0,0,"Global Definition",3,3,3,1 +"D002","Adindan","E001",-166,-15,204,"MEAN FOR Ethiopia, Sudan",5,5,3,22 +"D003","Adindan","E001",-118,-14,218,"Burkina Faso",25,25,25,1 +"D004","Adindan","E001",-134,-2,210,"Cameroon",25,25,25,1 +"D005","Adindan","E001",-165,-11,206,"Ethiopia",3,3,3,8 +"D006","Adindan","E001",-123,-20,220,"Mali",25,25,25,1 +"D007","Adindan","E001",-128,-18,224,"Senegal",25,25,25,2 +"D008","Adindan","E001",-161,-14,205,"Sudan",3,5,3,14 +"D009","Afgooye","E015",-43,-163,45,"Somalia",25,25,25,1 +"D010","Ain el Abd 1970","E004",-150,-250,-1,"Bahrain",25,25,25,2 +"D011","Ain el Abd 1970","E004",-143,-236,7,"Saudi Arabia",10,10,10,9 +"D012","Anna 1 Astro 1965","E014",-491,-22,435,"Cocos Islands",25,25,25,1 +"D013","Antigua Island Astro 1943","E001",-270,13,62,"Antigua (Leeward Islands)",25,25,25,1 +"D014","Arc 1950","E001",-143,-90,-294,"MEAN Solution",20,33,20,41 +"D015","Arc 1950","E001",-138,-105,-289,"Botswana",3,5,3,9 +"D016","Arc 1950","E001",-153,-5,-292,"Burundi",20,20,20,3 +"D017","Arc 1950","E001",-125,-108,-295,"Lesotho",3,3,8,5 +"D018","Arc 1950","E001",-161,-73,-317,"Malawi",9,24,8,6 +"D019","Arc 1950","E001",-134,-105,-295,"Swaziland",15,15,15,4 +"D020","Arc 1950","E001",-169,-19,-278,"Zaire",25,25,25,2 +"D021","Arc 1950","E001",-147,-74,-283,"Zambia",21,21,27,5 +"D022","Arc 1950","E001",-142,-96,-293,"Zimbabwe",5,8,11,10 +"D023","Arc 1960","E001",-160,-6,-302,"MEAN FOR Kenya, Tanzania",20,20,20,25 +"D024","Ascension Island 1958","E004",-205,107,53,"Ascension Island",25,25,25,2 +"D025","Astro Beacon E 1945","E004",145,75,-272,"Iwo Jima",25,25,25,1 +"D026","Astro DOS 71/4","E004",-320,550,-494,"St Helena Island",25,25,25,1 +"D027","Astro Tern Island (FRIG) 1961","E004",114,-116,-333,"Tern Island",25,25,25,1 +"D028","Astronomical Station 1952","E004",124,-234,-25,"Marcus Island",25,25,25,1 +"D029","Australian Geodetic 1966","E014",-133,-48,148,"Australia & Tasmania",3,3,3,105 +"D030","Australian Geodetic 1984","E014",-134,-48,149,"Australia & Tasmania",2,2,2,90 +"D031","Ayabelle Lighthouse","E001",-79,-129,145,"Djibouti",25,25,25,1 +"D032","Bellevue (IGN)","E004",-127,-769,472,"Efate & Erromango Islands",20,20,20,3 +"D033","Bermuda 1957","E000",-73,213,296,"Bermuda",20,20,20,3 +"D034","Bissau","E004",-173,253,27,"Guinea-Bissau",25,25,25,2 +"D035","Bogota Observatory","E004",307,304,-318,"Colombia",6,5,6,7 +"D036","Bukit Rimpah","E002",-384,664,-48,"Indonesia (Bangka & Belitung Islands)",-1,-1,-1,0 +"D037","Camp Area Astro","E004",-104,-129,239,"Antarctica (McMurdo Camp Area)",-1,-1,-1,0 +"D038","Campo Inchauspe 1969","E004",-148,136,90,"Argentina",5,5,5,20 +"D039","Canton Astro 1966","E004",298,-304,-375,"Phoenix Islands",15,15,15,4 +"D040","Cape (Superceded by D517)","E001",-136,-108,-292,"South Africa",3,6,6,5 +"D041","Cape Canaveral","E000",-2,151,181,"MEAN FOR Florida,Bahamas",3,3,3,19 +"D042","Carthage","E001",-263,6,431,"Tunisia",6,9,8,5 +"D043","Chatham Island Astro 1971","E004",175,-38,113,"New Zealand (Chatham Island)",15,15,15,4 +"D044","Chua Astro","E004",-134,229,-29,"Paraguay",6,9,5,6 +"D045","Corrego Alegre","E004",-206,172,-6,"Brazil",5,3,5,17 +"D046","Dabola","E001",-83,37,124,"Guinea",15,15,15,4 +"D047","Djakarta (Batavia)","E002",-377,681,-50,"Indonesia (Sumatra)",3,3,3,5 +"D048","DOS 1968","E004",230,-199,-752,"New Georgia Islands (Gizo Island)",25,25,25,1 +"D049","Easter Island 1967","E004",211,147,111,"Easter Island",25,25,25,1 +"D050","European 1950","E004",-87,-98,-121,"MEAN FOR Europe,",3,8,5,85 +"D051","European 1950","E004",-87,-96,-120,"MEAN FOR Western Europe,",3,3,3,52 +"D052","European 1950","E004",-103,-106,-141,"MEAN FOR Iraq, Israel, Jordan, Lebanon",-1,-1,-1,0 +"D053","European 1950","E004",-104,-101,-140,"Cyprus",15,15,15,4 +"D054","European 1950","E004",-130,-117,-151,"Egypt",6,8,8,14 +"D055","European 1950","E004",-86,-96,-120,"MEAN FOR England, Channel Islands, Ireland",3,3,3,40 +"D056","European 1950","E004",-87,-95,-120,"Finland, Norway",3,5,3,20 +"D057","European 1950","E004",-84,-95,-130,"Greece",25,25,25,2 +"D058","European 1950","E004",-117,-132,-164,"Iran",9,12,11,27 +"D059","European 1950","E004",-97,-103,-120,"Italy (Sardinia)",25,25,25,2 +"D060","European 1950","E004",-97,-88,-135,"Italy (Sicily)",20,20,20,3 +"D061","European 1950","E004",-107,-88,-149,"Malta",25,25,25,1 +"D062","European 1950","E004",-84,-107,-120,"Portugal, Spain",5,6,3,18 +"D063","European 1979","E004",-86,-98,-119,"MEAN Solution",3,3,3,22 +"D064","Fort Thomas 1955","E001",-7,215,225,"Nevis, St. Kitts (Leeward Islands)",25,25,25,2 +"D065","Gan 1970","E004",-133,-321,50,"Republic of Maldives",25,25,25,1 +"D066","Geodetic Datum 1949","E004",84,-22,209,"New Zealand",5,3,5,14 +"D067","Graciosa Base SW 1948","E004",-104,167,-38,"Azores (Faial,Graciosa,Pico)",3,3,3,5 +"D068","Guam 1963","E000",-100,-248,259,"Guam",3,3,3,5 +"D069","Gunung Segara","E002",-403,684,41,"Indonesia (Kalimantan)",-1,-1,-1,0 +"D070","GUX 1 Astro","E004",252,-209,-751,"Guadalcanal Island",25,25,25,1 +"D071","Herat North","E004",-333,-222,114,"Afghanistan",-1,-1,-1,0 +"D072","Hjorsey 1955","E004",-73,46,-86,"Iceland",3,3,6,6 +"D073","Hong Kong 1963","E004",-156,-271,-189,"Hong Kong",25,25,25,2 +"D074","Hu-Tzu-Shan","E004",-637,-549,-203,"Taiwan",15,15,15,4 +"D075","Indian","E006",282,726,254,"Bangladesh",10,8,12,6 +"D076","Indian","E901",295,736,257,"India, Nepal",12,10,15,7 +"D077","Indian 1954","E006",217,823,299,"Thailand",15,6,12,11 +"D078","Indian 1975 (Cycle 1)","E006",210,814,289,"Thailand",3,2,3,62 +"D079","Ireland 1965","E011",506,-122,611,"Ireland",3,3,3,7 +"D080","ISTS 061 Astro 1968","E004",-794,119,-298,"South Georgia Islands",25,25,25,1 +"D081","ISTS 073 Astro 1969","E004",208,-435,-229,"Diego Garcia",25,25,25,2 +"D082","Johnston Island 1961","E004",189,-79,-202,"Johnston Island",25,25,25,1 +"D083","Kandawala","E006",-97,787,86,"Sri Lanka",20,20,20,3 +"D084","Kerguelen Island 1949","E004",145,-187,103,"Kerguelen Island",25,25,25,1 +"D085","Kertau 1948","E010",-11,851,5,"West Malaysia & Singapore",10,8,6,6 +"D086","Kusaie Astro 1951","E004",647,1777,-1124,"Caroline Islands",25,25,25,1 +"D087","L. C. 5 Astro 1961","E000",42,124,147,"Cayman Brac Island",25,25,25,1 +"D088","Leigon","E001",-130,29,364,"Ghana",2,3,2,8 +"D089","Liberia 1964","E001",-90,40,88,"Liberia",15,15,15,4 +"D090","Luzon","E000",-133,-77,-51,"Philippines (Excluding Mindanao)",8,11,9,6 +"D091","Luzon","E000",-133,-79,-72,"Philippines (Mindanao)",25,25,25,1 +"D092","Mahe 1971","E001",41,-220,-134,"Mahe Island",25,25,25,1 +"D093","Massawa","E002",639,405,60,"Ethiopia (Eritrea)",25,25,25,1 +"D094","Merchich","E001",31,146,47,"Morocco",5,3,3,9 +"D095","Midway Astro 1961","E004",912,-58,1227,"Midway Islands",25,25,25,1 +"D096","Minna","E001",-81,-84,115,"Cameroon",25,25,25,2 +"D097","Minna","E001",-92,-93,122,"Nigeria",3,6,5,6 +"D098","Montserrat Island Astro 1958","E001",174,359,365,"Montserrat (Leeward Islands)",25,25,25,1 +"D099","M'Poraloko","E001",-74,-130,42,"Gabon",25,25,25,1 +"D100","Nahrwan","E001",-247,-148,369,"Oman (Masirah Island)",25,25,25,2 +"D101","Nahrwan","E001",-243,-192,477,"Saudi Arabia",20,20,20,3 +"D102","Nahrwan","E001",-249,-156,381,"United Arab Emirates",25,25,25,2 +"D103","Naparima BWI","E004",-10,375,165,"Trinidad & Tobago",15,15,15,4 +"D104","North American 1927","E000",-3,142,183,"MEAN FOR Caribbean",3,9,12,15 +"D105","North American 1927","E000",0,125,194,"MEAN FOR Central America",8,3,5,19 +"D106","North American 1927","E000",-10,158,187,"MEAN FOR Canada",15,11,6,112 +"D107","North American 1927","E000",-8,160,176,"MEAN FOR CONUS",5,5,6,405 +"D108","North American 1927","E000",-9,161,179,"MEAN FOR CONUS (East of Mississippi River)",5,5,8,129 +"D109","North American 1927","E000",-8,159,175,"MEAN FOR CONUS (West of Mississippi River)",5,3,3,276 +"D110","North American 1927","E000",-5,135,172,"Alaska",5,9,5,47 +"D111","North American 1927","E000",-4,154,178,"Bahamas (Except San Salvador Island)",5,3,5,11 +"D112","North American 1927","E000",1,140,165,"Bahamas (San Salvador Island)",25,25,25,1 +"D113","North American 1927","E000",-7,162,188,"Canada (Alberta, British Columbia)",8,8,6,25 +"D114","North American 1927","E000",-9,157,184,"Canada (Manitoba, Ontario)",9,5,5,25 +"D115","North American 1927","E000",-22,160,190,"Canada (Atlantic Provinces)",6,6,3,37 +"D116","North American 1927","E000",4,159,188,"Canada (Northwest Territories, Saskatchewan)",5,5,3,17 +"D117","North American 1927","E000",-7,139,181,"Canada (Yukon)",5,8,3,8 +"D118","North American 1927","E000",0,125,201,"Canal Zone",20,20,20,3 +"D119","North American 1927","E000",-9,152,178,"Cuba",25,25,25,1 +"D120","North American 1927","E000",11,114,195,"Greenland (Hayes Peninsula)",25,25,25,2 +"D121","North American 1927","E000",-12,130,190,"Mexico",8,6,6,22 +"D122","North American 1983","E008",0,0,0,"Alaska, Canada, CONUS, Central America, Mexico",2,2,2,354 +"D123","Observatorio Metereo. 1939","E004",-425,-169,81,"Azores (Corvo & Flores Islands)",20,20,20,3 +"D124","Old Egyptian 1907","E904",-130,110,-13,"Egypt",3,6,8,14 +"D125","Old Hawaiian (Clarke 1866)","E000",61,-285,-181,"MEAN FOR Hawaii, Kauai, Maui, Oahu",25,20,20,15 +"D126","Old Hawaiian (Clarke 1866)","E000",89,-279,-183,"Hawaii",25,25,25,2 +"D127","Old Hawaiian (Clarke 1866)","E000",45,-290,-172,"Kauai",20,20,20,3 +"D128","Old Hawaiian (Clarke 1866)","E000",65,-290,-190,"Maui",25,25,25,2 +"D129","Old Hawaiian (Clarke 1866)","E000",58,-283,-182,"Oahu",10,6,6,8 +"D130","Oman","E001",-346,-1,224,"Oman",3,3,9,7 +"D131","Ord. Survey G. Britain 1936","E009",375,-111,431,"MEAN Solution",10,10,15,38 +"D132","Ord. Survey G. Britain 1936","E009",371,-112,434,"England",5,5,6,21 +"D133","Ord. Survey G. Britain 1936","E009",371,-111,434,"England, Isle of Man, Wales",10,10,15,25 +"D134","Ord. Survey G. Britain 1936","E009",384,-111,425,"Scotland, Shetland Islands",10,10,10,13 +"D135","Ord. Survey G. Britain 1936","E009",370,-108,434,"Wales",20,20,20,3 +"D136","Pico de las Nieves","E004",-307,-92,127,"Canary Islands",25,25,25,1 +"D137","Pitcairn Astro 1967","E004",185,165,42,"Pitcairn Island",25,25,25,1 +"D138","Point 58","E001",-106,-129,165,"MEAN FOR Burkina Faso & Niger",25,25,25,2 +"D139","Pointe Noire 1948","E001",-148,51,-291,"Congo",25,25,25,1 +"D140","Porto Santo 1936","E004",-499,-249,314,"Porto Santo, Madeira Islands",25,25,25,2 +"D141","Provisional S. American 1956","E004",-288,175,-376,"MEAN Solution",17,27,27,63 +"D142","Provisional S. American 1956","E004",-270,188,-388,"Bolivia",5,11,14,5 +"D143","Provisional S. American 1956","E004",-270,183,-390,"Chile (Northern, Near 19dS)",25,25,25,1 +"D144","Provisional S. American 1956","E004",-305,243,-442,"Chile (Southern, Near 43dS)",20,20,20,3 +"D145","Provisional S. American 1956","E004",-282,169,-371,"Colombia",15,15,15,4 +"D146","Provisional S. American 1956","E004",-278,171,-367,"Ecuador",3,5,3,11 +"D147","Provisional S. American 1956","E004",-298,159,-369,"Guyana",6,14,5,9 +"D148","Provisional S. American 1956","E004",-279,175,-379,"Peru",6,8,12,6 +"D149","Provisional S. American 1956","E004",-295,173,-371,"Venezuela",9,14,15,24 +"D150","Provisional S. Chilean 1963","E004",16,196,93,"Chile (South, Near 53dS) (Hito XVIII)",25,25,25,2 +"D151","Puerto Rico","E000",11,72,-101,"Puerto Rico, Virgin Islands",3,3,3,11 +"D152","Qatar National Datum 1995","E004",-127.78098,-283.37477,21.24081,"Qatar",20,20,20,3 +"D153","Qornoq","E004",164,138,-189,"Greenland (South)",25,25,32,2 +"D154","Reunion","E004",94,-948,-1262,"Mascarene Islands",25,25,25,1 +"D155","Rome 1940","E004",-225,-65,9,"Italy (Sardinia)",25,25,25,1 +"D156","Santo (DOS) 1965","E004",170,42,84,"Espirito Santo Island",25,25,25,1 +"D157","Sao Braz","E004",-203,141,53,"Azores (Sao Miguel, Santa Maria Islands)",25,25,25,2 +"D158","Sapper Hill 1943","E004",-355,21,72,"East Falkland Island",1,1,1,5 +"D159","Schwarzeck","E900",616,97,-251,"Namibia",20,20,20,3 +"D160","Selvagem Grande 1938","E004",-289,-124,60,"Salvage Islands",25,25,25,1 +"D161","SGS 85","E905",3,9,-9,"Soviet Geodetic System 1985",10,10,10,1 +"D162","South American 1969 (SAD69)","E907",-57,1,-41,"MEAN Solution,",15,6,9,84 +"D163","South American 1969","E907",-62,-1,-37,"Argentina",5,5,5,10 +"D164","South American 1969","E907",-61,2,-48,"Bolivia",15,15,15,4 +"D165","South American 1969 (old)","E907",-60,-2,-41,"Brazil",3,5,5,22 +"D166","South American 1969","E907",-75,-1,-44,"Chile",15,8,11,9 +"D167","South American 1969","E907",-44,6,-36,"Colombia",6,6,5,7 +"D168","South American 1969","E907",-48,3,-44,"Ecuador",3,3,3,11 +"D169","South American 1969","E907",-47,26,-42,"Ecuador (Baltra, Galapagos)",25,25,25,1 +"D170","South American 1969","E907",-53,3,-47,"Guyana",9,5,5,5 +"D171","South American 1969","E907",-61,2,-33,"Paraguay",15,15,15,4 +"D172","South American 1969","E907",-58,0,-44,"Peru",5,5,5,6 +"D173","South American 1969","E907",-45,12,-33,"Trinidad & Tobago",25,25,25,1 +"D174","South American 1969","E907",-45,8,-33,"Venezuela",3,6,3,5 +"D175","South Asia","E013",7,-10,-26,"Singapore",25,25,25,1 +"D176","Tananarive Observatory 1925","E004",-189,-242,-91,"Madagascar",-1,-1,-1,0 +"D177","Timbalai 1948","E903",-679,669,-48,"Brunei, East Malaysia (Sabah, Sarawak)",10,10,12,8 +"D178","Tokyo","E002",-148,507,685,"MEAN FOR Japan, Korea, Okinawa",20,5,20,31 +"D179","Tokyo","E002",-148,507,685,"Japan",8,5,8,16 +"D180","Tokyo (Cycle 1)","E002",-147,506,687,"South Korea",2,2,2,29 +"D181","Tokyo","E002",-158,507,676,"Okinawa",20,5,20,3 +"D182","Tristan Astro 1968","E004",-632,438,-609,"Tristan da Cunha",25,25,25,1 +"D183","Viti Levu 1916","E001",51,391,-36,"Fiji (Viti Levu Island)",25,25,25,1 +"D184","Wake-Eniwetok 1960","E016",102,52,-38,"Marshall Islands",3,3,3,10 +"D185","Wake Island Astro 1952","E004",276,-57,149,"Wake Atoll",25,25,25,2 +"D186","WGS 1972","E005",0,0,0,"Global Definition",3,3,3,1 +"D187","Yacare","E004",-155,171,37,"Uruguay",-1,-1,-1,0 +"D188","Zanderij","E004",-265,120,-358,"Suriname",5,5,8,5 +"D189","American Samoa 1962","E000",-115,118,426,"American Samoa Is",25,25,25,2 +"D190","Arc 1960","E001",-157,-2,-299,"Kenya",4,3,3,24 +"D191","Arc 1960","E001",-175,-23,-303,"Tanzania",6,9,10,12 +"D192","Coordinate System 1937 of Estonia","E002",374,150,588,"Estonia",2,2,3,19 +"D193","Deception Island","E001",260,12,-147,"Deception Is., Antarctica",20,20,20,3 +"D194","European 1950","E004",-112,-77,-145,"Tunisia",25,25,25,4 +"D195","Hermannskogel","E002",682,-203,480,"Yugoslavia (Pre 1990)",-1,-1,-1,0 +"D196","Indian","E201",283,682,231,"Pakistan",-1,-1,-1,0 +"D197","Indian 1960","E006",198,881,317,"Vietnam (near 16dN)",25,25,25,2 +"D198","Indian 1960","E006",182,915,344,"Con Son Island (Vietnam)",25,25,25,1 +"D199","Indonesian 1974","E200",-24,-15,5,"Indonesia",25,25,25,1 +"D200","North American 1927","E000",-2,152,149,"Aleutian Is (E of 180dW)",6,8,10,6 +"D201","North American 1927","E000",2,204,105,"Aleutian Is (W of 180dW)",10,8,10,5 +"D202","North Sahara 1959","E001",-186,-93,310,"Algeria",25,25,25,3 +"D203","Pulkovo 1942","E015",28,-130,-95,"Russia",-1,-1,-1,0 +"D204","S-42 (Pulkovo 1942)","E015",28,-121,-77,"Hungary",2,2,2,5 +"D205","S-42 (Pulkovo 1942)","E015",23,-124,-82,"Poland",4,2,4,11 +"D206","S-42 (Pulkovo 1942)","E015",26,-121,-78,"Czechoslovakia (Prior 1 Jan 1993)",3,3,2,6 +"D207","S-42 (Pulkovo 1942)","E015",24,-124,-82,"Latvia",2,2,2,5 +"D208","S-42 (Pulkovo 1942)","E015",15,-130,-84,"Kazakhstan",25,25,25,2 +"D209","S-42 (Pulkovo 1942)","E015",24,-130,-92,"Albania",3,3,3,7 +"D210","S-42 (Pulkovo 1942)","E015",28,-121,-77,"Romania",3,5,3,4 +"D211","S-JTSK","E002",589,76,480,"Czechoslovakia (1 Jan 1993 on)",4,2,3,6 +"D212","Sierra Leone 1960","E001",-88,4,101,"Sierra Leone",15,15,15,8 +"D213","Voirol 1874","E001",-73,-247,227,"Tunisia, Algeria",-1,-1,-1,0 +"D214","Voirol 1960","E001",-123,-206,219,"Algeria",25,25,25,2 +"D215","Indian 1975 (Cycle 0)","E006",209,818,290,"Thailand",12,10,12,6 +"D216","Korean Geodetic System 1995","E012",0,0,0,"South Korea",1,1,1,29 +"D217","Tokyo (Cycle 0)","E002",-146,507,687,"South Korea",8,5,8,12 +"D218","South American Geocentric Reference System (SIRGAS)","E008",0,0,0,"South America",1,1,1,66 +"D219","Old Hawaiian (Int 1924)","E004",201,-228,-346,"MEAN FOR Hawaii, Kauai, Maui, Oahu",25,20,20,15 +"D220","Old Hawaiian (Int 1924)","E004",229,-222,-348,"Hawaii",25,25,25,2 +"D221","Old Hawaiian (Int 1924)","E004",185,-233,-337,"Kauai",20,20,20,3 +"D222","Old Hawaiian (Int 1924)","E004",205,-233,-355,"Maui",25,25,25,2 +"D223","Old Hawaiian (Int 1924)","E004",198,-226,-347,"Oahu",10,6,6,8 +"D333","Tokyo Datum (Japan By Law)","E333",-147.54,507.26,680.47,"Japan",0,0,0,0 +"D334","Japanese Geodetic Datum 2000 (JGD2000)","E008",0.0,0.0,0.0,"Japan",0,0,0,0 +"D340","WGS 1972BE","E005",0,0,1.9,"Global Definition",3,3,3,1,-0,-0,-0.814,-0.38 +"D350","GRS 1980","E008",0.0,0.0,0.0,"Global Definition",0,0,0,0,0.0,0.0,0.0,1.0 +"D360","Pulkovo 1942","E015",27,-135,-84.5,"Russia",-1,-1,-1,0,-0.0,-0.0,-0.554,-0.2263 +"D400","Greece 1987","E008",-199.695,74.815,246.045,"Greece",0,0,0,0 +"D401","RT90 (Superceded by D403)","E002",-424,80,-613,"Sweden",0,0,0,0,-4.40,1.99,-5.18,1.0 +"D402","Indian 1960","E209",198,881,317,"India",0,0,0,0 +"D403","RT90 (Supercedes D401)","E002",414.1055246174,41.3265500042,603.0582474221,"Sweden",0,0,0,0,0.8551163377,-2.1413174055,7.0227298286,1.0 +"D450","ETRS89 (European Terrestrial Reference System 1989)","E008",0.0,0.0,0.0,"Europe",0,0,0,0 +"D500","Deutsches Hauptdreiecksnetz (DHDN), Potsdam (Rauenburg)","E002",580.0,80.9,395.3,"Germany",0,0,0,0,0.35,-0.10,3.58,1.00001112 +"D501","MGI (Militar-Geographische Institut) (Hermannskogel)","E002",575.0,93.0,466.0,"Austria",0,0,0,0,-5.1,-1.6,-5.2,1.0000025 +"D502","CH1903 (Superceded by D514)","E002",660.08,13.55,369.34,"Switzerland",0,0,0,0,0.805,0.578,0.952,1.00000566 +"D503","Belgian 72","E004",-99.059,53.322,-112.486,"Belgium",0,0,0,0,-0.419,0.830,-1.885,0.999999 +"D504","NTF (Nouvelle Triangulation Francaise)","E202",-166.817,-59.821,318.753,"France",0,0,0,0 +"D505","South American 1969 (new)","E907",-66.87,4.37,-38.52,"Brazil",0.43,0.44,0.40,0 +"D506","Rijksdriehoeks Datum","E002",565.04,49.91,465.84,"Netherlands",0,0,0,0,0.4094,-0.3597,1.8685,1.0000040772 +"D507","KKJ (Kartastokoordinaattijarjestelma)","E004",93.477,103.453,123.431,"Finland",0,0,0,0,4.801,0.345,-1.376,0.999998503 +"D508","Aratu (Brasil)","E004",-158,315,-148,"Brazil",2,3,2,0 +"D509","Hungarian Datum 1972 (HD-72)","E203",56.0,-75.77,-15.31,"Hungary",0,0,0,0,-0.37,-0.20,-0.21,1.00000101 +"D510","NZGD 1949 (7 terms)","E004",59.47,-5.04,187.44,"New Zealand",0,0,0,0,-0.47,0.10,-1.024,0.9999954007 +"D511","NZGD 1949 (3 terms)","E004",54.4,-20.1,183.1,"New Zealand",0,0,0,0 +"D512","NZGD 2000 (7 terms)","E008",0.0,0.0,0.0,"New Zealand",0,0,0,0,0.0,0.0,0.0,1.0 +"D513","NGO 1948","E206", 278.2932, 93.0497, 474.4745,"Norway",0,0,0,0, -7.8885, -0.0499, 6.6098, 6.2050 +"D514","CH1903+ (Supercedes D502)","E002",674.374,15.056,405.346,"Switzerland",0,0,0,0 +"D515","SL datum 95","E006",-2.0553,763.5581,87.6682,"Sri Lanka",0,0,0,0,-0.198003,-1.706361,-3.466120,-0.0315 +"D516","SL datum 1999","E006",-0.2933,766.9499,87.7131,"Sri Lanka",0,0,0,0,-0.1957040,-1.6950677,-3.4730161,-0.0393 +"D517","Cape (Supercedes D040)","E205",-134.73,-110.92,-292.66,"South Africa",0,0,0,0 +"D518","Hartebeesthoek94","E012",0,0,0,"South Africa",0,0,0,0 +"D519","Abidjan 1987","E001",-124.76,53,466.79,"Cote d'Ivoire",0,0,0,0 +"D520","Accra","E204",-199,32,322,"Ghana",0,0,0,0 +"D521","Azores Central 1948","E004",-104,167,-38,"Azores",0,0,0,0 +"D522","Azores Oriental 1940","E004",-203,141,53,"Azores",0,0,0,0 +"D523","Azores Occidental 1939","E004",-422.651,-172.995,84.02,"Azores",0,0,0,0 +"D524","Barbados 1938","E001",31.95,300.99,419.19,"Barbados",0,0,0,0 +"D525","Camacupa","E001",-50.9,-347.6,-231,"Angola",0,0,0,0 +"D526","Chos Malal 1914","E004",5.5,176.7,141.4,"Argentina",0,0,0,0 +"D527","Conakry 1905","E202",-23.0,259.0,-9.0,"Guinea",0,0,0,0 +"D528","Dealul Piscului 1933","E004",103.25,-100.40,-307.19,"Romania",0,0,0,0 +"D529","Dealul Piscului 1970","E015",44.107,-116.147,-54.648,"Romania",0,0,0,0 +"D530","Deir ez Zor","E202",-190.421,8.532,238.69,"Syria",0,0,0,0 +"D531","Dominica 1945","E001",725,685,536,"Dominica",0,0,0,0 +"D532","Kalianpur 1937","E209",214,804,268,"India",0,0,0,0 +"D533","Kalianpur 1962","E210",275.57,676.78,229.6,"Pakistan",0,0,0,0 +"D534","Kalianpur 1975","E216",295,736,257,"India",0,0,0,0 +"D535","SWEREF99","E008",0.0,0.0,0.0,"Sweden",0,0,0,0 +"D536","GDA94 (Geocentric Datum of Australia 1994)","E008",0.0,0.0,0.0,"Australia",0,0,0,0 +"D537","ETRF89 (European Terrestrial Reference Frame 1989)","E012",0.0,0.0,0.0,"Europe",0,0,0,0 +"D538","Bermuda 2000","E012",0.0,0.0,0.0,"Bermuda",0,0,0,0 +"D539","Samboja","E002",-404.78,-685.68,-45.47,"Indonesia",0,0,0,0 +"D540","Australian Antarctic 1998","E008",0.0,0.0,0.0,"Australian Antarctic Territory",0,0,0,0 +"D541","Everest (India and Nepal)","E226",295,736,257,"India",0,0,0,0 +"D542","Korea Datum 1985","E002",-323,309,653,"South Korea",0,0,0,0 +"D543","Israel","E008",-48,55,52,"Israel",0,0,0,0 +"D544","Lao National Datum 1997","E015",46.012,-127.108,-38.131,"Laos",0,0,0,0 +"D545","Hong Kong 1980 Datum","E004",-162.619,-276.959,-161.764,"Hong Kong",0,0,0,0,-0.067753,2.243649,1.158827,-1.094246 +"D546","HITO XVIII","E004",18.38,192.45,96.82,"Argentina",0,0,0,0,-0.056,0.142,0.200,-0.0013 +"D547","GDM 2000MRSO","E008",1.69276,-1.92994,2.07108,"West Malaysia",0,0,0,0,0.03515,-0.02858,-0.00617,0.24859 +"D548","GDM 2000BRSO","E008",-1.04278,-0.30902,0.57544,"East Malaysia",0,0,0,0,0.01102,-0.03471,0.02865,-0.01934 +"D549","Gulshan 303","E209",283.729,735.942,261.143,"Bangladesh",0,0,0,0 +"D551","CHTRF95 (Swiss Terrestrial Reference Frame 1995)","E008",0.0,0.0,0.0,"Switzerland",0,0,0,0 +"D600","D-PAF (Orbits)","E600",0.082,-0.502,-0.224,"Satellite Orbits",0,0,0,0,0.30444,0.04424,0.00609,0.9999999937 +"D601","Test Data Set 1","E601",0.071,-0.509,-0.166,"Test 1",0,0,0,0,0.0179,-0.0005,0.0067,0.999999983 +"D602","Test Data Set 2","E602",580.0,80.9,399.8,"Test 2",0,0,0,0,0.35,0.1,3.026,1.0000113470025 +"D610","US Standard Datum (USA, NADCON5)","E000","Conterminous U.S.","D611","nadcon5.ussd.nad27.conus.lon.trn.20160901.b","nadcon5.ussd.nad27.conus.lat.trn.20160901.b" +"D611","NAD27 (USA, NADCON5)","E000","Conterminous U.S.","D122","nadcon5.nad27.nad83_1986.conus.lon.trn.20160901.b","nadcon5.nad27.nad83_1986.conus.lat.trn.20160901.b" +"D612","NAD83 (HARN) (USA, NADCON5)","E008","Conterminous U.S.","-D122","nadcon5.nad83_1986.nad83_harn.conus.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_harn.conus.lat.trn.20160901.b" +"D613","NAD83 (FBN) (USA, NADCON5)","E008","Conterminous U.S.","-D612","nadcon5.nad83_harn.nad83_fbn.conus.lon.trn.20160901.b","nadcon5.nad83_harn.nad83_fbn.conus.lat.trn.20160901.b","nadcon5.nad83_harn.nad83_fbn.conus.eht.trn.20160901.b" +"D614","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Conterminous U.S.","-D613","nadcon5.nad83_fbn.nad83_2007.conus.lon.trn.20160901.b","nadcon5.nad83_fbn.nad83_2007.conus.lat.trn.20160901.b","nadcon5.nad83_fbn.nad83_2007.conus.eht.trn.20160901.b" +"D615","NAD83 (2011) (USA, NADCON5)","E008","Conterminous U.S.","-D614","nadcon5.nad83_2007.nad83_2011.conus.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.conus.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.conus.eht.trn.20160901.b" +"D620","Puerto Rico Datum, adjustment of 1940 (USA, NADCON5)","E000","Puerto Rico, Virgin Islands","D122","nadcon5.pr40.nad83_1986.prvi.lon.trn.20160901.b","nadcon5.pr40.nad83_1986.prvi.lat.trn.20160901.b" +"D621","NAD83 (1993) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D122","nadcon5.nad83_1986.nad83_1993.prvi.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1993.prvi.lat.trn.20160901.b" +"D622","NAD83 (1997) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D621","nadcon5.nad83_1993.nad83_1997.prvi.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_1997.prvi.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_1997.prvi.eht.trn.20160901.b" +"D623","NAD83 (2002) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D622","nadcon5.nad83_1997.nad83_2002.prvi.lon.trn.20160901.b","nadcon5.nad83_1997.nad83_2002.prvi.lat.trn.20160901.b","nadcon5.nad83_1997.nad83_2002.prvi.eht.trn.20160901.b" +"D624","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D623","nadcon5.nad83_2002.nad83_2007.prvi.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_2007.prvi.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_2007.prvi.eht.trn.20160901.b" +"D625","NAD83 (2011) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D624","nadcon5.nad83_2007.nad83_2011.prvi.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.prvi.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.prvi.eht.trn.20160901.b" +"D630","Old Hawaiian Datum (USA, NADCON5)","E000","Hawaii","D122","nadcon5.ohd.nad83_1986.hawaii.lon.trn.20160901.b","nadcon5.ohd.nad83_1986.hawaii.lat.trn.20160901.b" +"D631","NAD83 (1993) (USA, NADCON5)","E008","Hawaii","-D122","nadcon5.nad83_1986.nad83_1993.hawaii.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1993.hawaii.lat.trn.20160901.b" +"D632","NAD83 (PA11) (USA, NADCON5)","E008","Hawaii","-D631","nadcon5.nad83_1993.nad83_pa11.hawaii.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_pa11.hawaii.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_pa11.hawaii.eht.trn.20160901.b" +"D640","NAD27 (USA, NADCON5)","E000","Alaska","D122","nadcon5.nad27.nad83_1986.alaska.lon.trn.20160901.b","nadcon5.nad27.nad83_1986.alaska.lat.trn.20160901.b" +"D641","NAD83 (1992) (USA, NADCON5)","E008","Alaska","-D122","nadcon5.nad83_1986.nad83_1992.alaska.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1992.alaska.lat.trn.20160901.b" +"D642","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Alaska","-D641","nadcon5.nad83_1992.nad83_2007.alaska.lon.trn.20160901.b","nadcon5.nad83_1992.nad83_2007.alaska.lat.trn.20160901.b","nadcon5.nad83_1992.nad83_2007.alaska.eht.trn.20160901.b" +"D643","NAD83 (2011) (USA, NADCON5)","E008","Alaska","-D642","nadcon5.nad83_2007.nad83_2011.alaska.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.alaska.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.alaska.eht.trn.20160901.b" +"D650","St. Paul 1897 (USA, NADCON5)","E000","St. Paul, Alaska","D651","nadcon5.sp1897.sp1952.stpaul.lon.trn.20160901.b","nadcon5.sp1897.sp1952.stpaul.lat.trn.20160901.b" +"D651","St. Paul 1952 (USA, NADCON5)","E000","St. Paul, Alaska","D122","nadcon5.sp1952.nad83_1986.stpaul.lon.trn.20160901.b","nadcon5.sp1952.nad83_1986.stpaul.lat.trn.20160901.b" +"D652","St. George 1897 (USA, NADCON5)","E000","St. George, Alaska","D653","nadcon5.sg1897.sg1952.stgeorge.lon.trn.20160901.b","nadcon5.sg1897.sg1952.stgeorge.lat.trn.20160901.b" +"D653","St. George 1952 (USA, NADCON5)","E000","St. George, Alaska","D122","nadcon5.sg1952.nad83_1986.stgeorge.lon.trn.20160901.b","nadcon5.sg1952.nad83_1986.stgeorge.lat.trn.20160901.b" +"D654","St. Lawrence 1952 (USA, NADCON5)","E000","St. Lawrence, Alaska","D122","nadcon5.sl1952.nad83_1986.stlawrence.lon.trn.20160901.b","nadcon5.sl1952.nad83_1986.stlawrence.lat.trn.20160901.b" +"D660","American Samoa 1962 (USA, NADCON5)","E000","American Samoa","D122","nadcon5.as62.nad83_1993.as.lon.trn.20160901.b","nadcon5.as62.nad83_1993.as.lat.trn.20160901.b" +"D661","NAD83 (2002) (USA, NADCON5)","E008","American Samoa","-D122","nadcon5.nad83_1993.nad83_2002.as.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.as.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.as.eht.trn.20160901.b" +"D662","NAD83 (PA11) (USA, NADCON5)","E008","American Samoa","-D661","nadcon5.nad83_2002.nad83_pa11.as.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_pa11.as.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_pa11.as.eht.trn.20160901.b" +"D670","Guam 1963 (USA, NADCON5)","E000","Guam and the Commonwealth of the Northern Mariana Islands","D122","nadcon5.gu63.nad83_1993.guamcnmi.lon.trn.20160901.b","nadcon5.gu63.nad83_1993.guamcnmi.lat.trn.20160901.b" +"D671","NAD83 (2002) (USA, NADCON5)","E008","Guam and the Commonwealth of the Northern Mariana Islands","-D122","nadcon5.nad83_1993.nad83_2002.guamcnmi.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.guamcnmi.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.guamcnmi.eht.trn.20160901.b" +"D672","NAD83 (MA11) (USA, NADCON5)","E008","Guam and the Commonwealth of the Northern Mariana Islands","-D671","nadcon5.nad83_2002.nad83_ma11.guamcnmi.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_ma11.guamcnmi.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_ma11.guamcnmi.eht.trn.20160901.b" +"D700","MODIS","E700",0,0,0,"Global Definition",0,0,0,0 +"D701","NAD83 (Deprecated - use D122)","E008",0,0,0,"Alabama",2,2,2,354 +"D702","NAD83 HARN (USA, NADCON)","E008","Alabama","D122","alhpgn.los","alhpgn.las" +"D703","NAD83 (Deprecated - use D122)","E008",0,0,0,"Arkansas",2,2,2,354 +"D704","NAD83 HARN (USA, NADCON)","E008","Arkansas","D122","arhpgn.los","arhpgn.las" +"D705","NAD83 (Deprecated - use D122)","E008",0,0,0,"Arizona",2,2,2,354 +"D706","NAD83 HARN (USA, NADCON)","E008","Arizona","D122","azhpgn.los","azhpgn.las" +"D707","NAD83 (Deprecated - use D122)","E008",0,0,0,"California (North of 37dN)",2,2,2,354 +"D708","NAD83 HARN (USA, NADCON)","E008","California (North of 37dN)","D122","cnhpgn.los","cnhpgn.las" +"D709","NAD83 (Deprecated - use D122)","E008",0,0,0,"California (South of 37dN)",2,2,2,354 +"D710","NAD83 HARN (USA, NADCON)","E008","California (South of 37dN)","D122","cshpgn.los","cshpgn.las" +"D711","NAD83 (Deprecated - use D122)","E008",0,0,0,"Colorado",2,2,2,354 +"D712","NAD83 HARN (USA, NADCON)","E008","Colorado","D122","cohpgn.los","cohpgn.las" +"D713","NAD83 (Deprecated - use D122)","E008",0,0,0,"Florida",2,2,2,354 +"D714","NAD83 HARN (USA, NADCON)","E008","Florida","D122","flhpgn.los","flhpgn.las" +"D715","NAD83 (Deprecated - use D122)","E008",0,0,0,"Georgia",2,2,2,354 +"D716","NAD83 HARN (USA, NADCON)","E008","Georgia","D122","gahpgn.los","gahpgn.las" +"D717","Guam 1963 (Deprecated - use D068)","E000",-100,-248,259,"Guam",3,3,3,5 +"D718","NAD83 HARN (USA, NADCON)","E008","Guam","D068","guhpgn.los","guhpgn.las" +"D719","NAD83 (Deprecated - use D122)","E008",0,0,0,"Hawaii",2,2,2,354 +"D720","NAD83 HARN (USA, NADCON)","E008","Hawaii","D122","hihpgn.los","hihpgn.las" +"D721","NAD83 (Deprecated - use D122)","E008",0,0,0,"Idaho-Montana (East of 113dW)",2,2,2,354 +"D722","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","D122","emhpgn.los","emhpgn.las" +"D723","NAD83 (Deprecated - use D122)","E008",0,0,0,"Idaho-Montana (West of 113dW)",2,2,2,354 +"D724","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","D122","wmhpgn.los","wmhpgn.las" +"D725","NAD83 (Deprecated - use D122)","E008",0,0,0,"Iowa",2,2,2,354 +"D726","NAD83 HARN (USA, NADCON)","E008","Iowa","D122","iahpgn.los","iahpgn.las" +"D727","NAD83 (Deprecated - use D122)","E008",0,0,0,"Illinois",2,2,2,354 +"D728","NAD83 HARN (USA, NADCON)","E008","Illinois","D122","ilhpgn.los","ilhpgn.las" +"D729","NAD83 (Deprecated - use D122)","E008",0,0,0,"Indiana",2,2,2,354 +"D730","NAD83 HARN (USA, NADCON)","E008","Indiana","D122","inhpgn.los","inhpgn.las" +"D731","NAD83 (Deprecated - use D122)","E008",0,0,0,"Kansas",2,2,2,354 +"D732","NAD83 HARN (USA, NADCON)","E008","Kansas","D122","kshpgn.los","kshpgn.las" +"D733","NAD83 (Deprecated - use D122)","E008",0,0,0,"Kentucky",2,2,2,354 +"D734","NAD83 HARN (USA, NADCON)","E008","Kentucky","D122","kyhpgn.los","kyhpgn.las" +"D735","NAD83 (Deprecated - use D122)","E008",0,0,0,"Louisiana",2,2,2,354 +"D736","NAD83 HARN (USA, NADCON)","E008","Louisiana","D122","lahpgn.los","lahpgn.las" +"D737","NAD83 (Deprecated - use D122)","E008",0,0,0,"Maryland-Delaware",2,2,2,354 +"D738","NAD83 HARN (USA, NADCON)","E008","Maryland-Delaware","D122","mdhpgn.los","mdhpgn.las" +"D739","NAD83 (Deprecated - use D122)","E008",0,0,0,"Maine",2,2,2,354 +"D740","NAD83 HARN (USA, NADCON)","E008","Maine","D122","mehpgn.los","mehpgn.las" +"D741","NAD83 (Deprecated - use D122)","E008",0,0,0,"Michigan",2,2,2,354 +"D742","NAD83 HARN (USA, NADCON)","E008","Michigan","D122","mihpgn.los","mihpgn.las" +"D743","NAD83 (Deprecated - use D122)","E008",0,0,0,"Minnesota",2,2,2,354 +"D744","NAD83 HARN (USA, NADCON)","E008","Minnesota","D122","mnhpgn.los","mnhpgn.las" +"D745","NAD83 (Deprecated - use D122)","E008",0,0,0,"Mississippi",2,2,2,354 +"D746","NAD83 HARN (USA, NADCON)","E008","Mississippi","D122","mshpgn.los","mshpgn.las" +"D747","NAD83 (Deprecated - use D122)","E008",0,0,0,"Missouri",2,2,2,354 +"D748","NAD83 HARN (USA, NADCON)","E008","Missouri","D122","mohpgn.los","mohpgn.las" +"D749","NAD83 (Deprecated - use D122)","E008",0,0,0,"Nebraska",2,2,2,354 +"D750","NAD83 HARN (USA, NADCON)","E008","Nebraska","D122","nbhpgn.los","nbhpgn.las" +"D751","NAD83 (Deprecated - use D122)","E008",0,0,0,"Nevada",2,2,2,354 +"D752","NAD83 HARN (USA, NADCON)","E008","Nevada","D122","nvhpgn.los","nvhpgn.las" +"D753","NAD83 (Deprecated - use D122)","E008",0,0,0,"New England (CT,MA,NH,RI,VT",2,2,2,354 +"D754","NAD83 HARN (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","D122","nehpgn.los","nehpgn.las" +"D755","NAD83 (Deprecated - use D122)","E008",0,0,0,"New Jersey",2,2,2,354 +"D756","NAD83 HARN (USA, NADCON)","E008","New Jersey","D122","njhpgn.los","njhpgn.las" +"D757","NAD83 (Deprecated - use D122)","E008",0,0,0,"New Mexico",2,2,2,354 +"D758","NAD83 HARN (USA, NADCON)","E008","New Mexico","D122","nmhpgn.los","nmhpgn.las" +"D759","NAD83 (Deprecated - use D122)","E008",0,0,0,"New York",2,2,2,354 +"D760","NAD83 HARN (USA, NADCON)","E008","New York","D122","nyhpgn.los","nyhpgn.las" +"D761","NAD83 (Deprecated - use D122)","E008",0,0,0,"North Carolina",2,2,2,354 +"D762","NAD83 HARN (USA, NADCON)","E008","North Carolina","D122","nchpgn.los","nchpgn.las" +"D763","NAD83 (Deprecated - use D122)","E008",0,0,0,"North Dakota",2,2,2,354 +"D764","NAD83 HARN (USA, NADCON)","E008","North Dakota","D122","ndhpgn.los","ndhpgn.las" +"D765","NAD83 (Deprecated - use D122)","E008",0,0,0,"Ohio",2,2,2,354 +"D766","NAD83 HARN (USA, NADCON)","E008","Ohio","D122","ohhpgn.los","ohhpgn.las" +"D767","NAD83 (Deprecated - use D122)","E008",0,0,0,"Oklahoma",2,2,2,354 +"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","D122","okhpgn.los","okhpgn.las" +"D769","NAD83 (Deprecated - use D122)","E008",0,0,0,"Pennsylvania",2,2,2,354 +"D770","NAD83 HARN (USA, NADCON)","E008","Pennsylvania","D122","pahpgn.los","pahpgn.las" +"D771","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico-Virgin Is",2,2,2,354 +"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","D122","pvhpgn.los","pvhpgn.las" +"D773","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Eastern Islands)",25,25,25,2 +"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","D189","eshpgn.los","eshpgn.las" +"D775","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Western Islands)",25,25,25,2 +"D776","NAD83 HARN (USA, NADCON)","E008","Samoa (Western Islands)","D189","wshpgn.los","wshpgn.las" +"D777","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Carolina",2,2,2,354 +"D778","NAD83 HARN (USA, NADCON)","E008","South Carolina","D122","schpgn.los","schpgn.las" +"D779","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Dakota",2,2,2,354 +"D780","NAD83 HARN (USA, NADCON)","E008","South Dakota","D122","sdhpgn.los","sdhpgn.las" +"D781","NAD83 (Deprecated - use D122)","E008",0,0,0,"Tennessee",2,2,2,354 +"D782","NAD83 HARN (USA, NADCON)","E008","Tennessee","D122","tnhpgn.los","tnhpgn.las" +"D783","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (East of 100dW)",2,2,2,354 +"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","D122","ethpgn.los","ethpgn.las" +"D785","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (West of 100dW)",2,2,2,354 +"D786","NAD83 HARN (USA, NADCON)","E008","Texas (West of 100dW)","D122","wthpgn.los","wthpgn.las" +"D787","NAD83 (Deprecated - use D122)","E008",0,0,0,"Utah",2,2,2,354 +"D788","NAD83 HARN (USA, NADCON)","E008","Utah","D122","uthpgn.los","uthpgn.las" +"D789","NAD83 (Deprecated - use D122)","E008",0,0,0,"Virginia",2,2,2,354 +"D790","NAD83 HARN (USA, NADCON)","E008","Virginia","D122","vahpgn.los","vahpgn.las" +"D791","NAD83 (Deprecated - use D122)","E008",0,0,0,"Washington-Oregon",2,2,2,354 +"D792","NAD83 HARN (USA, NADCON)","E008","Washington-Oregon","D122","wohpgn.los","wohpgn.las" +"D793","NAD83 (Deprecated - use D122)","E008",0,0,0,"West Virginia",2,2,2,354 +"D794","NAD83 HARN (USA, NADCON)","E008","West Virginia","D122","wvhpgn.los","wvhpgn.las" +"D795","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wisconsin",2,2,2,354 +"D796","NAD83 HARN (USA, NADCON)","E008","Wisconsin","D122","wihpgn.los","wihpgn.las" +"D797","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wyoming",2,2,2,354 +"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","D122","wyhpgn.los","wyhpgn.las" +"D888","Lebanon Stereographic","E012",154.2668777,107.2190767,-263.01161212,"Lebanon",0,0,0,0,0.310716,0.218736,0.191232,0.99999913 +"D889","Lebanon Lambert","E202",190.9999,133.32473,-232.8391,"Lebanon",0,0,0,0,0.307836,0.216756,0.189036,0.9995341 +"D890","Luxembourg (LUREF)","E004",-192.986,13.673,-39.309,"Luxembourg",0,0,0,0,0.409900,2.933200,-2.688100,1.00000043 +"D891","Datum 73","E004",-223.237,110.193,36.649,"Portugal",0,0,0,0 +"D892","Datum Lisboa","E004",-304.046,-60.576,103.640,"Portugal",0,0,0,0 +"D893","PDO Survey Datum 1993","E001",-180.624,-225.516,173.919,"Oman",0,0,0,0,0.80970,1.89755,-8.33604,16.71006 +"D898","TWD97","E008",0,0,0,"Taiwan",0,0,0,0,0.0,0.0,0.0,0.0 +"D899","TWD67","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D886","Reseau Geodesique Francais 1993","E899",-752,-358,-179,"France",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D887","Reseau National Belge 1972","E899",-752,-358,-179,"Belgium",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D819","Xian 1980","E224",0,0,0,"China",0,0,0,0,0,0,0,0 +"D820","Korea 2000","E008",0.0,0.0,0.0,"South Korea",0,0,0,0 +"D821","Pulkovo 1995","E015",24.47,-130.89,-81.56,"Russian Federation",0,0,0,0,0,0,-0.13,-0.22 +"D822","Beijing 1954","E015",15.8,-154.4,-82.3,"China",0,0,0,0 +"D823","Stockholm 1938 (RT38)","E002",0.0,0.0,0.0,"Sweden",0,0,0,0 +"D824","Greenland 1996 (GR96)","E008",0.0,0.0,0.0,"Greenland",0,0,0,0 +"D825","Libyan Geodetic Datum 2006 (LGD2006)","E004",-208.406,-109.878,-2.5764,"Libya",0,0,0,0 +"D826","Reseau Geodesique de la Polynesie Francaise (RGPF)","E008",0.072,-0.507,-0.245,"French Polynesia",0,0,0,0,0.0183,-0.0003,0.007,-0.0093 +"D827","IGC 1962 6th Parallel South","E001",0.0,0.0,0.0,"Democratic Republic of the Congo - adjacent to 6th parallel south",0,0,0,0 +"D828","Geodetic Datum of Malaysia (GDM)","E008",0.0,0.0,0.0,"Malaysia",0,0,0,0 +"D829","New Beijing","E015",0.0,0.0,0.0,"China",0,0,0,0 +"D830","Turkish National Reference Frame (TUKREF)","E008",0.0,0.0,0.0,"Turkey",0,0,0,0 +"D831","Bhutan National Geodetic Datum (DRUKREF)","E008",0.0,0.0,0.0,"Bhutan",0,0,0,0 +"D832","Ukraine 2000","E015",0.0,0.0,0.0,"Ukraine",0,0,0,0 +"D833","Japanese Geodetic Datum 2011 (JGD2011)","E008",0.0,0.0,0.0,"Japan",0,0,0,0 +"D834","Posiciones Geodesicas Argentinas 1998 (POSGAR 98)","E008",0.0,0.0,0.0,"Argentina",0,0,0,0 +"D835","Posiciones Geodesicas Argentinas 1994 (POSGAR 94)","E012",0.0,0.0,0.0,"Argentina",0,0,0,0 +"D836","Posiciones Geodesicas Argentinas 2007 (POSGAR 07)","E008",0.0,0.0,0.0,"Argentina",0,0,0,0 +"D837","Datum Geodesi Nasional 1995 (DGN95)","E012",0.0,0.0,0.0,"Indonesia",0,0,0,0 +"D838","Korea 1995","E012",0.0,0.0,0.0,"South Korea",0,0,0,0 +"D839","Institut Geographique du Congo Belge (IGCB) 1955","E001",-79.9,-158,-168.9,"The Democratic Republic of the Congo (Zaire) - Lower Congo",0,0,0,0 +"D894","WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0 +"D895","ATS77","E910",-95.323,166.098,-69.942,"Maritime Provinces",0,0,0,0,0.215,1.031,-0.047,1.922 +"D896","GosatCAIL1B+ EarthRadius","E025",0,0,0,"GosatCAIL1B+ EarthRadius",0,0,0,0 +"D897","Myanmar","E227",247,785,277,"Myanmar",0,0,0,0 +"D900","China 2000","E231",0,0,0,"China 2000",0,0,0,0 +"D901","Nouvelle Triangulation Francaise (grid shift)","E202","France","-D350","ntf_r93.gsb" +"D902","PRS92","E000",-127.62153,-67.24339,-47.04738,"Philippines Reference System 1992",0,0,0,0,3.06803,-4.90297,-1.57807,-1.06002 +"D903","North American 1983 2011","E008",0,0,0,"Alaska, Canada, CONUS, Central America, Mexico",2,2,2,354 diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/pci_ellips.txt b/.venv/lib/python3.12/site-packages/fiona/gdal_data/pci_ellips.txt new file mode 100644 index 00000000..ff6ae298 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/pci_ellips.txt @@ -0,0 +1,129 @@ +! +! From https://github.com/OSGeo/gdal/issues/8034, June 30, 2023 +! +! I, Michael Goldberg, on behalf of PCI Geomatics agree to allow the ellips.txt +! and datum.txt file to be distributed under the GDAL open source license. +! +! Michael Goldberg +! Development Manager +! PCI Geomatics +! +! PCI Ellipsoid Database +! ---------------------- +! This file lists the different reference ellipsoids that may +! be used by PCI coordinate systems. Ellipsoid entries in datum.txt +! refer to entries in this file. +! +! Each ellipsoid is listed on a single line. The format of each record +! is as follows: +! +! Ellipsoid_code, Description_string, Semimajor_axis_m, Semiminor_axis_m [,extra comments] +! +! Ellipsoid_code is the code that uniquely identifies the ellipsoid +! within PCI software +! Description_string is a short description that helps users to identify +! the ellipsoid. It may be listed, for example, in a dropdown list in +! a PCI dialog box. +! Semimajor_axis_m is the ellipsoid semi-major (equatorial) axis length in metres. +! Semiminor_axis_m is the ellipsoid semi-minor (polar) axis length in metres. +! +! Any extra fields may be added after these four elements if desired; they will +! not be read by PCI software but may be helpful for the user. +! +! NOTE: The range of "E908" to "E998" is set aside for +! the use of local customer development. +! +"E000","Clarke 1866",6378206.4,6356583.8 +"E001","Clarke 1880 (RGS)",6378249.145,6356514.86955 +"E002","Bessel 1841",6377397.155,6356078.96284 +"E003","New International 1967",6378157.5,6356772.2 +"E004","International 1924",6378388.,6356911.94613 +"E005","WGS 72",6378135.,6356750.519915 +"E006","Everest (India 1830)",6377276.3452,6356075.4133 +"E007","WGS 66",6378145.,6356759.769356 +"E008","GRS 1980",6378137.,6356752.31414 +"E009","Airy 1830",6377563.396,6356256.91 +"E010","Everest (W. Malaysia and Singapore 1948)",6377304.063,6356103.039 +"E011","Modified Airy",6377340.189,6356034.448 +"E012","WGS 84",6378137.,6356752.314245 +"E013","Modified Fischer 1960",6378155.,6356773.3205 +"E014","Australian National 1965",6378160.,6356774.719 +"E015","Krassovsky 1940",6378245.,6356863.0188 +"E016","Hough 1960",6378270.,6356794.343479 +"E017","Fischer 1960",6378166.,6356784.283666 +"E018","Fischer 1968",6378150.,6356768.337303 +"E019","Normal Sphere",6370997.,6370997. +"E020","WGS 84 semimajor axis",6378137.,6378137. +"E021","WGS 84 semiminor axis",6356752.314245,6356752.314245 +"E022","Clarke 1866 Authalic Sphere", 6370997.000000, 6370997.000000 +"E023","GRS 1980 Authalic Sphere", 6371007.000000, 6371007.000000 +"E024","International 1924 Authalic Sphere", 6371228.000000, 6371228.000000 +"E025","GosatCAIL1B+ EarthRadius",6371008.77138,6371008.77138 +"E200","Indonesian 1974",6378160.,6356774.504086 +"E201","Everest (Pakistan)",6377309.613,6356108.570542 +"E202","Clarke 1880 (IGN, France)",6378249.2,6356515.0 +"E203","IUGG 67",6378160.,6356774.516090714 +"E204","War Office",6378300.000,6356751.689189 +"E205","Clarke 1880 Arc",6378249.145,6356514.966 +"E206","Bessel Modified",6377492.018,6356173.5087 +"E207","Clarke 1858",6378293.639,6356617.98149 +"E208","Clarke 1880",6378249.138,6356514.95942 +"E209","Everest (1937 Adjustment)",6377276.345,6356075.413 +"E210","Everest (1962 Definition)",6377301.243,6356100.23 +"E211","Everest Modified",6377304.063,6356103.039 +"E212","Modified Everest 1969",6377295.664,6356094.668 +"E213","Everest (1967 Definition)",6377298.556,6356097.550 +"E214","Clarke 1880 (Benoit)",6378300.789000,6356566.435000 +"E215","Clarke 1880 (SGA)",6378249.2,6356515.0 +"E216","Everest (1975 Definition)",6377299.151,6356098.1451 +"E217","GEM 10C",6378137,6356752.31414 +"E218","OSU 86F",6378136.2,6356751.516672 +"E219","OSU 91A",6378136.3,6356751.6163367 +"E220","Sphere",6371000,6371000 +"E221","Struve 1860",6378298.300000,6356657.142670 +"E222","Walbeck",6376896,6355834.847 +"E223","Plessis 1817",6376523,6355862.933 +"E224","Xian 1980",6378140.0,6356755.288 +"E225","EMEP Sphere",6370000,6370000 +"E226","Everest (India and Nepal)",6377301.243,6356100.228368 +"E227","Everest (1830 Definition)", 6377299.365595, 6356098.359005,"EPSG:7042" +"E228","Danish 1876", 6377019.270000, 6355762.539100 +"E229","Bessel Namibia (GLM)", 6377483.865280, 6356165.383246 +"E230","PZ-90", 6378136.000000, 6356751.361746 +"E231","CGCS2000", 6378137.000000, 6356752.314140 +"E232","IAG 1975", 6378140.000000, 6356755.288158 +"E233","NWL 9D", 6378145.000000, 6356759.769489 +"E234","Hughes 1980", 6378273.000000, 6356889.449000 +"E235","Clarke 1880 (international foot)", 6378306.369600, 6356571.996000 +"E236","Clarke 1866 Michigan", 6378450.047549, 6356826.621488 +"E237","APL 4.5 (1968)", 6378144.000000, 6356757.338698 +"E238","Airy (War Office)", 6377542.178, 6356235.764 +"E239","Clarke 1858 (DIGEST)", 6378235.600, 6356560.140 +"E240","Clarke 1880 (Palestine)", 6378300.782, 6356566.427 +"E241","Clarke 1880 (Syria)", 6378247.842, 6356513.671 +"E242","Clarke 1880 (Fiji)", 6378301.000, 6356566.548 +"E243","Andrae", 6377104.430, 6355847.415 +"E244","Delambre 1810", 6376985.228, 6356323.664 +"E245","Delambre (Carte de France)", 6376985.000, 6356323.436 +"E246","Germaine (Djibouti)", 6378284.000, 6356589.156 +"E247","Hayford 1909", 6378388.000, 6356909.000 +"E248","Krayenhoff 1827", 6376950.400, 6356356.341 +"E249","Plessis Reconstituted", 6376523.994, 6355862.907 +"E250","GRS 1967", 6378160.000, 6356774.516 +"E251","Svanberg", 6376797.000, 6355837.971 +"E252","Walbeck 1819 (Planheft 1942)", 6376895.000, 6355834.000 +"E333","Bessel 1841 (Japan By Law)",6377397.155,6356078.963 +"E600","D-PAF (Orbits)",6378144.0,6356759.0 +"E601","Test Data Set 1",6378144.0,6356759.0 +"E602","Test Data Set 2",6377397.2,6356079.0 +"E700","MODIS (Sphere from WGS84)",6371007.181,6371007.181 +"E899","GRS 1967 Modified",6378160.,6356774.719195306 +"E900","Bessel 1841 (Namibia)",6377483.865,6356165.382966 +"E901","Everest (India 1956)",6377301.243,6356100.228368 +"E902","Everest (W. Malaysia 1969)",6377295.664,6356094.667915 +"E903","Everest (E. Malaysia and Brunei)",6377298.556,6356097.550301 +"E904","Helmert 1906",6378200.,6356818.169628 +"E905","SGS 85",6378136.,6356751.301569 +"E906","WGS 60",6378165.,6356783.286959 +"E907","South American 1969",6378160.,6356774.719 +"E910","ATS77",6378135.0,6356750.304922 diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/plscenesconf.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/plscenesconf.json new file mode 100644 index 00000000..9258fbd6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/plscenesconf.json @@ -0,0 +1,1985 @@ +{ + "v1_data": { + "PSOrthoTile": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "clear_confidence_percent", + "type": "double" + }, + { + "name": "clear_percent", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "double" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "grid_cell", + "type": "string" + }, + { + "name": "ground_control", + "type": "boolean" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "heavy_haze_percent", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "light_haze_percent", + "type": "double" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "shadow_percent", + "type": "double" + }, + { + "name": "snow_ice_percent", + "type": "double" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + }, + { + "name": "visible_confidence_percent", + "type": "double" + }, + { + "name": "visible_percent", + "type": "double" + } + ], + "assets": [ + "analytic", + "analytic_5b", + "analytic_5b_xml", + "analytic_dn", + "analytic_dn_xml", + "analytic_sr", + "analytic_xml", + "udm", + "udm2", + "visual", + "visual_xml" + ] + }, + "REOrthoTile": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "catalog_id", + "type": "string" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "grid_cell", + "type": "string" + }, + { + "name": "ground_control", + "type": "boolean" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic", + "analytic_sr", + "analytic_xml", + "udm", + "visual", + "visual_xml" + ] + }, + "PSScene": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "clear_confidence_percent", + "type": "double" + }, + { + "name": "clear_percent", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "double" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "ground_control", + "type": "boolean" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "heavy_haze_percent", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "light_haze_percent", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "shadow_percent", + "type": "double" + }, + { + "name": "snow_ice_percent", + "type": "double" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + }, + { + "name": "visible_confidence_percent", + "type": "double" + }, + { + "name": "visible_percent", + "type": "double" + } + ], + "assets": [ + "ortho_analytic_4b", + "ortho_analytic_8b", + "ortho_analytic_8b_sr", + "ortho_analytic_8b_xml", + "ortho_analytic_4b_sr", + "ortho_analytic_4b_xml", + "basic_analytic_4b", + "basic_analytic_8b", + "basic_analytic_8b_xml", + "basic_analytic_4b_rpc", + "basic_analytic_4b_xml", + "basic_udm2", + "ortho_udm2", + "ortho_visual" + ] + }, + "PSScene3Band": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "ground_control", + "type": "boolean" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic", + "analytic_dn", + "analytic_dn_xml", + "analytic_xml", + "basic_analytic", + "basic_analytic_dn", + "basic_analytic_dn_rpc", + "basic_analytic_dn_xml", + "basic_analytic_rpc", + "basic_analytic_xml", + "basic_udm", + "basic_udm2", + "udm", + "udm2", + "visual", + "visual_xml" + ] + }, + "PSScene4Band": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "clear_confidence_percent", + "type": "int" + }, + { + "name": "clear_percent", + "type": "int" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "int" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "ground_control", + "type": "boolean" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "heavy_haze_percent", + "type": "int" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "light_haze_percent", + "type": "int" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "shadow_percent", + "type": "int" + }, + { + "name": "snow_ice_percent", + "type": "int" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + }, + { + "name": "visible_confidence_percent", + "type": "int" + }, + { + "name": "visible_percent", + "type": "int" + } + ], + "assets": [ + "analytic", + "analytic_dn", + "analytic_dn_xml", + "analytic_xml", + "basic_analytic", + "basic_analytic_dn", + "basic_analytic_dn_nitf", + "basic_analytic_dn_rpc", + "basic_analytic_dn_rpc_nitf", + "basic_analytic_dn_xml", + "basic_analytic_dn_xml_nitf", + "basic_analytic_nitf", + "basic_analytic_rpc", + "basic_analytic_rpc_nitf", + "basic_analytic_xml", + "basic_analytic_xml_nitf", + "basic_udm", + "basic_udm2", + "udm", + "udm2" + ] + }, + "REScene": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "catalog_id", + "type": "string" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "basic_analytic_b1", + "basic_analytic_b1_nitf", + "basic_analytic_b2", + "basic_analytic_b2_nitf", + "basic_analytic_b3", + "basic_analytic_b3_nitf", + "basic_analytic_b4", + "basic_analytic_b4_nitf", + "basic_analytic_b5", + "basic_analytic_b5_nitf", + "basic_analytic_rpc", + "basic_analytic_sci", + "basic_analytic_xml", + "basic_analytic_xml_nitf", + "basic_udm", + "browse" + ] + }, + "Landsat8L1G": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "collection", + "type": "int" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "data_type", + "type": "string" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "processed", + "type": "datetime" + }, + { + "name": "product_id", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + }, + { + "name": "wrs_path", + "type": "int" + }, + { + "name": "wrs_row", + "type": "int" + } + ], + "assets": [ + "analytic_b1", + "analytic_b2", + "analytic_b3", + "analytic_b4", + "analytic_b5", + "analytic_b6", + "analytic_b7", + "analytic_b8", + "analytic_b9", + "analytic_b10", + "analytic_b11", + "analytic_bqa", + "metadata_txt", + "visual" + ] + }, + "Sentinel2L1C": { + "fields": [ + { + "name": "abs_orbit_number", + "type": "int" + }, + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "data_type", + "type": "string" + }, + { + "name": "datatake_id", + "type": "string" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "granule_id", + "type": "string" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "mgrs_grid_id", + "type": "string" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "product_generation_time", + "type": "datetime" + }, + { + "name": "product_id", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rel_orbit_number", + "type": "int" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "s2_processor_version", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic_b1", + "analytic_b2", + "analytic_b3", + "analytic_b4", + "analytic_b5", + "analytic_b6", + "analytic_b7", + "analytic_b8", + "analytic_b8a", + "analytic_b9", + "analytic_b10", + "analytic_b11", + "analytic_b12", + "metadata_aux", + "visual" + ] + }, + "SkySatScene": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "camera_id", + "type": "string" + }, + { + "name": "clear_confidence_percent", + "type": "int" + }, + { + "name": "clear_percent", + "type": "int" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "int" + }, + { + "name": "ground_control", + "type": "boolean" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "heavy_haze_percent", + "type": "int" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "light_haze_percent", + "type": "int" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "shadow_percent", + "type": "int" + }, + { + "name": "snow_ice_percent", + "type": "int" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "view_angle", + "type": "double" + }, + { + "name": "visible_confidence_percent", + "type": "int" + }, + { + "name": "visible_percent", + "type": "int" + } + ], + "assets": [ + "basic_analytic", + "basic_analytic_dn", + "basic_analytic_dn_rpc", + "basic_analytic_rpc", + "basic_analytic_udm", + "basic_analytic_udm2", + "basic_l1a_panchromatic_dn", + "basic_l1a_panchromatic_dn_rpc", + "basic_panchromatic", + "basic_panchromatic_dn", + "basic_panchromatic_dn_rpc", + "basic_panchromatic_rpc", + "basic_panchromatic_udm2", + "ortho_analytic", + "ortho_analytic_sr", + "ortho_analytic_dn", + "ortho_analytic_udm", + "ortho_analytic_udm2", + "ortho_panchromatic", + "ortho_panchromatic_dn", + "ortho_panchromatic_udm", + "ortho_panchromatic_udm2", + "ortho_pansharpened", + "ortho_pansharpened_udm", + "ortho_pansharpened_udm2", + "ortho_visual" + ] + }, + "SkySatCollect": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "clear_confidence_percent", + "type": "int" + }, + { + "name": "clear_percent", + "type": "int" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "int" + }, + { + "name": "ground_control_ratio", + "type": "double" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "heavy_haze_percent", + "type": "int" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "light_haze_percent", + "type": "int" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "shadow_percent", + "type": "int" + }, + { + "name": "snow_ice_percent", + "type": "int" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "view_angle", + "type": "double" + }, + { + "name": "visible_confidence_percent", + "type": "int" + }, + { + "name": "visible_percent", + "type": "int" + } + ], + "assets": [ + "basic_l1a_all_frames", + "ortho_analytic", + "ortho_analytic_sr", + "ortho_analytic_dn", + "ortho_analytic_udm", + "ortho_analytic_udm2", + "ortho_panchromatic", + "ortho_panchromatic_dn", + "ortho_panchromatic_udm", + "ortho_panchromatic_udm2", + "ortho_pansharpened", + "ortho_pansharpened_udm", + "ortho_pansharpened_udm2", + "ortho_visual" + ] + }, + "SkySatVideo": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "camera_id", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "publishing_stage", + "type": "string" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_azimuth", + "type": "double" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "strip_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "video_file", + "video_frames", + "video_metadata" + ] + }, + "Sentinel1": { + "fields": [ + { + "name": "abs_orbit_number", + "type": "int" + }, + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "antenna_look_direction", + "type": "string" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "clear_percent", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "double" + }, + { + "name": "columns", + "type": "int" + }, + { + "name": "datatake_id", + "type": "string" + }, + { + "name": "epsg_code", + "type": "int" + }, + { + "name": "granule_id", + "type": "string" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "incidence_far", + "type": "double" + }, + { + "name": "incidence_near", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "orbit_direction", + "type": "string" + }, + { + "name": "origin_x", + "type": "double" + }, + { + "name": "origin_y", + "type": "double" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "polarisation_channels", + "type": "string" + }, + { + "name": "polarisation_mode", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "rel_orbit_number", + "type": "int" + }, + { + "name": "rows", + "type": "int" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sensor_mode", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "ortho_analytic_vh", + "ortho_analytic_vv" + ] + }, + "MOD09GA": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "data_type", + "type": "string" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "product_generation_time", + "type": "datetime" + }, + { + "name": "product_version", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sgrid_tile_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic_gflags", + "analytic_granule_pnt", + "analytic_iobs_res", + "analytic_num_observations_1km", + "analytic_num_observations_500m", + "analytic_obscov_500m", + "analytic_orbit_pnt", + "analytic_q_scan", + "analytic_qc_500m", + "analytic_range", + "analytic_sensor_azimuth", + "analytic_sensor_zenith", + "analytic_solar_azimuth", + "analytic_solar_zenith", + "analytic_state_1km", + "analytic_sur_refl_b01", + "analytic_sur_refl_b02", + "analytic_sur_refl_b03", + "analytic_sur_refl_b04", + "analytic_sur_refl_b05", + "analytic_sur_refl_b06", + "analytic_sur_refl_b07" + ] + }, + "MYD09GA": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "data_type", + "type": "string" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "product_generation_time", + "type": "datetime" + }, + { + "name": "product_version", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sgrid_tile_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic_gflags", + "analytic_granule_pnt", + "analytic_iobs_res", + "analytic_num_observations_1km", + "analytic_num_observations_500m", + "analytic_obscov_500m", + "analytic_orbit_pnt", + "analytic_q_scan", + "analytic_qc_500m", + "analytic_range", + "analytic_sensor_azimuth", + "analytic_sensor_zenith", + "analytic_solar_azimuth", + "analytic_solar_zenith", + "analytic_state_1km", + "analytic_sur_refl_b01", + "analytic_sur_refl_b02", + "analytic_sur_refl_b03", + "analytic_sur_refl_b04", + "analytic_sur_refl_b05", + "analytic_sur_refl_b06", + "analytic_sur_refl_b07" + ] + }, + "MOD09GQ": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "clear_percent", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "double" + }, + { + "name": "data_type", + "type": "string" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "product_generation_time", + "type": "datetime" + }, + { + "name": "product_version", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sgrid_tile_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic_granule_pnt", + "analytic_iobs_res", + "analytic_num_observations", + "analytic_obscov", + "analytic_orbit_pnt", + "analytic_qc_250m", + "analytic_sur_refl_b01", + "analytic_sur_refl_b02" + ] + }, + "MYD09GQ": { + "fields": [ + { + "name": "acquired", + "type": "datetime" + }, + { + "name": "anomalous_pixels", + "type": "double" + }, + { + "name": "black_fill", + "type": "double" + }, + { + "name": "clear_percent", + "type": "double" + }, + { + "name": "cloud_cover", + "type": "double" + }, + { + "name": "cloud_percent", + "type": "double" + }, + { + "name": "data_type", + "type": "string" + }, + { + "name": "gsd", + "type": "double" + }, + { + "name": "instrument", + "type": "string" + }, + { + "name": "item_type", + "type": "string" + }, + { + "name": "pixel_resolution", + "type": "double" + }, + { + "name": "product_generation_time", + "type": "datetime" + }, + { + "name": "product_version", + "type": "string" + }, + { + "name": "provider", + "type": "string" + }, + { + "name": "published", + "type": "datetime" + }, + { + "name": "quality_category", + "type": "string" + }, + { + "name": "satellite_id", + "type": "string" + }, + { + "name": "sgrid_tile_id", + "type": "string" + }, + { + "name": "sun_azimuth", + "type": "double" + }, + { + "name": "sun_elevation", + "type": "double" + }, + { + "name": "updated", + "type": "datetime" + }, + { + "name": "usable_data", + "type": "double" + }, + { + "name": "view_angle", + "type": "double" + } + ], + "assets": [ + "analytic_granule_pnt", + "analytic_iobs_res", + "analytic_num_observations", + "analytic_obscov", + "analytic_orbit_pnt", + "analytic_qc_250m", + "analytic_sur_refl_b01", + "analytic_sur_refl_b02" + ] + } + } +} diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_ob_v1.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_ob_v1.gfs new file mode 100644 index 00000000..516f2882 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_ob_v1.gfs @@ -0,0 +1,1455 @@ + + + + Obce + Data|Obce|Obec + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice3 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + StatusKod + StatusKod + Integer + + + + OkresKod + Okres|Kod + Integer + + + + PouKod + Pou|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + VlajkaText + VlajkaText + String + 4000 + + + + VlajkaObrazek + VlajkaObrazek + Complex + + + + ZnakText + ZnakText + String + 4000 + + + + ZnakObrazek + ZnakObrazek + Complex + + + + CleneniSMRozsahKod + CleneniSMRozsahKod + Integer + + + + CleneniSMTypKod + CleneniSMTypKod + Integer + + + + NutsLau + NutsLau + String + 12 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + SpravniObvody + Data|SpravniObvody|SpravniObvod + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniMomcKod + SpravniMomcKod + Integer + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Mop + Data|Mop|Mop + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Momc + Data|Momc|Momc + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + MopKod + Mop|Kod + Integer + + + + ObecKod + Obec|Kod + Integer + + + + SpravniObvodKod + SpravniObvod|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + VlajkaText + VlajkaText + String + 4000 + + + + VlajkaObrazek + VlajkaObrazek + Complex + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + ZnakText + ZnakText + String + 4000 + + + + ZnakObrazek + ZnakObrazek + Complex + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + CastiObci + Data|CastiObci|CastObce + + + DefinicniBod + Geometrie|DefinicniBod + Point + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + KatastralniUzemi + Data|KatastralniUzemi|KatastralniUzemi + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice2 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ExistujeDigitalniMapa + ExistujeDigitalniMapa + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + RizeniId + RizeniId + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Zsj + Data|Zsj|Zsj + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + KatastralniUzemiKod + KatastralniUzemi|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + Vymera + Vymera + Integer + Integer64 + + + + CharakterZsjKod + CharakterZsjKod + Integer + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Ulice + Data|Ulice|Ulice + + + DefinicniCara + Geometrie|DefinicniCara + MultiLineString + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + + Parcely + Data|Parcely|Parcela + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + Polygon + + + + OriginalniHraniceOmpv + Geometrie|OriginalniHraniceOmpv + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Id + Id + Integer + Integer64 + + + + Nespravny + Nespravny + String + 5 + + + + KmenoveCislo + KmenoveCislo + Integer + + + + PododdeleniCisla + PododdeleniCisla + Integer + + + + VymeraParcely + VymeraParcely + Integer + Integer64 + + + + ZpusobyVyuzitiPozemku + ZpusobyVyuzitiPozemku + Integer + + + + DruhCislovaniKod + DruhCislovaniKod + Integer + + + + DruhPozemkuKod + DruhPozemkuKod + Integer + + + + KatastralniUzemiKod + KatastralniUzemi|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + RizeniId + RizeniId + Integer + Integer64 + + + + BonitovanyDilVymera + BonitovaneDily|BonitovanyDil|Vymera + IntegerList + + + + BonitovanyDilBonitovanaJednotkaKod + BonitovaneDily|BonitovanyDil|BonitovanaJednotkaKod + IntegerList + + + + BonitovanyDilIdTranskace + BonitovaneDily|BonitovanyDil|IdTranskace + IntegerList + Integer64 + + + + BonitovanyDilRizeniId + BonitovaneDily|BonitovanyDil|RizeniId + IntegerList + Integer64 + + + + ZpusobOchranyKod + ZpusobyOchranyPozemku|ZpusobOchrany|Kod + IntegerList + + + + ZpusobOchranyTypOchranyKod + ZpusobyOchranyPozemku|ZpusobOchrany|TypOchranyKod + IntegerList + + + + ZpusobOchranyIdTransakce + ZpusobyOchranyPozemku|ZpusobOchrany|IdTransakce + IntegerList + + + + ZpusobOchranyRizeniId + ZpusobyOchranyPozemku|ZpusobOchrany|RizeniId + IntegerList + Integer64 + + + + + StavebniObjekty + Data|StavebniObjekty|StavebniObjekt + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + OriginalniHraniceOmpv + Geometrie|OriginalniHraniceOmpv + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nespravny + Nespravny + String + 5 + + + + CisloDomovni + CislaDomovni|CisloDomovni + IntegerList + + + + IdentifikacniParcelaId + IdentifikacniParcela|Id + Integer + Integer64 + + + + TypStavebnihoObjektuKod + TypStavebnihoObjektuKod + Integer + + + + ZpusobVyuzitiKod + ZpusobVyuzitiKod + Integer + + + + CastObceKod + CastObce|Kod + Integer + + + + MomcKod + Momc|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + IsknBudovaId + IsknBudovaId + Integer + Integer64 + + + + Dokonceni + Dokonceni + String + 19 + + + + DruhKonstrukceKod + DruhKonstrukceKod + Integer + + + + ObestavenyProstor + ObestavenyProstor + Integer + + + + PocetBytu + PocetBytu + Integer + + + + PocetPodlazi + PocetPodlazi + Integer + + + + PodlahovaPlocha + PodlahovaPlocha + Integer + + + + PripojeniKanalizaceKod + PripojeniKanalizaceKod + Integer + + + + PripojeniPlynKod + PripojeniPlynKod + Integer + + + + PripojeniVodovodKod + PripojeniVodovodKod + Integer + + + + VybaveniVytahemKod + VybaveniVytahemKod + Integer + + + + ZastavenaPlocha + ZastavenaPlocha + Integer + + + + ZpusobVytapeniKod + ZpusobVytapeniKod + Integer + + + + ZpusobOchranyKod + ZpusobyOchrany|ZpusobOchrany|Kod + IntegerList + + + + ZpusobOchranyTypOchranyKod + ZpusobyOchrany|ZpusobOchrany|TypOchranyKod + IntegerList + + + + ZpusobOchranyIdTransakce + ZpusobyOchrany|ZpusobOchrany|IdTransakce + IntegerList + + + + ZpusobOchranyRizeniId + ZpusobyOchrany|ZpusobOchrany|RizeniId + IntegerList + Integer64 + + + + DetailniTEAKod + DetailniTEA|DetailniTEA|Kod + IntegerList + + + + DetailniTEAPlatiOd + DetailniTEA|DetailniTEA|PlatiOd + StringList + 19 + + + + DetailniTEAGlobalniIdNavrhuZmeny + DetailniTEA|DetailniTEA|GlobalniIdNavrhuZmeny + IntegerList + Integer64 + + + + DetailniTEADruhKonstrukceKod + DetailniTEA|DetailniTEA|DruhKonstrukceKod + IntegerList + + + + DetailniTEAPocetBytu + DetailniTEA|DetailniTEA|PocetBytu + IntegerList + + + + DetailniTEAPocetPodlazi + DetailniTEA|DetailniTEA|PocetPodlazi + IntegerList + + + + DetailniTEAPripojeniKanalizaceKod + DetailniTEA|DetailniTEA|PripojeniKanalizaceKod + IntegerList + + + + DetailniTEAPripojeniPlynKod + DetailniTEA|DetailniTEA|PripojeniPlynKod + IntegerList + + + + DetailniTEAPripojeniVodovodKod + DetailniTEA|DetailniTEA|PripojeniVodovodKod + IntegerList + + + + DetailniTEAZpusobVytapeniKod + DetailniTEA|DetailniTEA|ZpusobVytapeniKod + IntegerList + + + + DetailniTEAAdresniMistoKod + DetailniTEA|DetailniTEA|AdresniMistoKod|Kod + IntegerList + + + + + AdresniMista + Data|AdresniMista|AdresniMisto + + + AdresniBod + Geometrie|DefinicniBod|AdresniBod + Point + + + + Zachranka + Geometrie|DefinicniBod|Zachranka + Point + + + + Hasici + Geometrie|DefinicniBod|Hasici + Point + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nespravny + Nespravny + String + 5 + + + + CisloDomovni + CisloDomovni + Integer + + + + CisloOrientacni + CisloOrientacni + Integer + + + + CisloOrientacniPismeno + CisloOrientacniPismeno + String + 1 + + + + Psc + Psc + Integer + + + + StavebniObjektKod + StavebniObjekt|Kod + Integer + + + + UliceKod + Ulice|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + IsknBudovaId + IsknBudovaId + Integer + Integer64 + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_st_uvoh_v1.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_st_uvoh_v1.gfs new file mode 100644 index 00000000..c8b532fe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_st_uvoh_v1.gfs @@ -0,0 +1,86 @@ + + + + VolebniOkrsek + Data|VolebniOkrsek|VO + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Cislo + Cislo + Integer + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + MomcKod + Momc|Kod + Integer + + + + Poznamka + Poznamka + String + 60 + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_st_v1.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_st_v1.gfs new file mode 100644 index 00000000..e585f1bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_st_v1.gfs @@ -0,0 +1,1489 @@ + + + + Staty + Data|Staty|Stat + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 2 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + RegionySoudrznosti + Data|RegionySoudrznosti|RegionSoudrznosti + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + StatKod + Stat|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 4 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Kraje + Data|Kraje|Kraj + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + StatKod + Stat|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 4 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Vusc + Data|Vusc|Vusc + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + 6 + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + RegionSoudrznostiKod + RegionSoudrznosti|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 5 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Okresy + Data|Okresy|Okres + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice4 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + KrajKod + Kraj|Kod + Integer + + + + VuscKod + Vusc|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 6 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Orp + Data|Orp|Orp + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice4 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniObecKod + SpravniObecKod + Integer + + + + VuscKod + Vusc|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Pou + Data|Pou|Pou + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice4 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniObecKod + SpravniObecKod + Integer + 6 + + + + OrpKod + Orp|Kod + Integer + 6 + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Obce + Data|Obce|Obec + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice3 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + StatusKod + StatusKod + Integer + + + + OkresKod + Okres|Kod + Integer + + + + PouKod + Pou|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + VlajkaText + VlajkaText + String + 4000 + + + + VlajkaObrazek + VlajkaObrazek + Complex + + + + ZnakText + ZnakText + String + 4000 + + + + ZnakObrazek + ZnakObrazek + Complex + + + + CleneniSMRozsahKod + CleneniSMRozsahKod + Integer + + + + CleneniSMTypKod + CleneniSMTypKod + Integer + + + + NutsLau + NutsLau + String + 12 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + SpravniObvody + Data|SpravniObvody|SpravniObvod + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniMomcKod + SpravniMomcKod + Integer + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Mop + Data|Mop|Mop + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Momc + Data|Momc|Momc + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + MopKod + Mop|Kod + Integer + + + + ObecKod + Obec|Kod + Integer + + + + SpravniObvodKod + SpravniObvod|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + VlajkaText + VlajkaText + String + 4000 + + + + VlajkaObrazek + VlajkaObrazek + Complex + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + ZnakText + ZnakText + String + 4000 + + + + ZnakObrazek + ZnakObrazek + Complex + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + CastiObci + Data|CastiObci|CastObce + + + DefinicniBod + Geometrie|DefinicniBod + Point + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + KatastralniUzemi + Data|KatastralniUzemi|KatastralniUzemi + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice2 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ExistujeDigitalniMapa + ExistujeDigitalniMapa + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + RizeniId + RizeniId + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Zsj + Data|Zsj|Zsj + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + KatastralniUzemiKod + KatastralniUzemi|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + Vymera + Vymera + Integer + Integer64 + + + + CharakterZsjKod + CharakterZsjKod + Integer + + + + DatumVzniku + DatumVzniku + String + 19 + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_v1.gfs b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_v1.gfs new file mode 100644 index 00000000..9d62bcf4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/ruian_vf_v1.gfs @@ -0,0 +1,2126 @@ + + + + Staty + Data|Staty|Stat + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 2 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + RegionySoudrznosti + Data|RegionySoudrznosti|RegionSoudrznosti + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + StatKod + Stat|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 4 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Kraje + Data|Kraje|Kraj + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + StatKod + Stat|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 4 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Vusc + Data|Vusc|Vusc + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice5 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + 6 + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + RegionSoudrznostiKod + RegionSoudrznosti|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 5 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Okresy + Data|Okresy|Okres + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice4 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + KrajKod + Kraj|Kod + Integer + + + + VuscKod + Vusc|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + NutsLau + NutsLau + String + 6 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Orp + Data|Orp|Orp + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice4 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniObecKod + SpravniObecKod + Integer + + + + VuscKod + Vusc|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Pou + Data|Pou|Pou + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice4 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniObecKod + SpravniObecKod + Integer + 6 + + + + OrpKod + Orp|Kod + Integer + 6 + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Obce + Data|Obce|Obec + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice3 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + StatusKod + StatusKod + Integer + + + + OkresKod + Okres|Kod + Integer + + + + PouKod + Pou|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + VlajkaText + VlajkaText + String + 4000 + + + + VlajkaObrazek + VlajkaObrazek + Complex + + + + ZnakText + ZnakText + String + 4000 + + + + ZnakObrazek + ZnakObrazek + Complex + + + + CleneniSMRozsahKod + CleneniSMRozsahKod + Integer + + + + CleneniSMTypKod + CleneniSMTypKod + Integer + + + + NutsLau + NutsLau + String + 12 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + SpravniObvody + Data|SpravniObvody|SpravniObvod + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + SpravniMomcKod + SpravniMomcKod + Integer + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Mop + Data|Mop|Mop + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 32 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Momc + Data|Momc|Momc + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + MopKod + Mop|Kod + Integer + + + + ObecKod + Obec|Kod + Integer + + + + SpravniObvodKod + SpravniObvod|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + VlajkaText + VlajkaText + String + 4000 + + + + VlajkaObrazek + VlajkaObrazek + Complex + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + ZnakText + ZnakText + String + 4000 + + + + ZnakObrazek + ZnakObrazek + Complex + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + CastiObci + Data|CastiObci|CastObce + + + DefinicniBod + Geometrie|DefinicniBod + Point + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + KatastralniUzemi + Data|KatastralniUzemi|KatastralniUzemi + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + GeneralizovaneHranice + Geometrie|GeneralizovaneHranice2 + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ExistujeDigitalniMapa + ExistujeDigitalniMapa + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + RizeniId + RizeniId + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Zsj + Data|Zsj|Zsj + + + DefinicniBod + Geometrie|DefinicniBod + MultiPoint + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + KatastralniUzemiKod + KatastralniUzemi|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + MluvnickeCharakteristikyPad2 + MluvnickeCharakteristiky|Pad2 + String + 48 + + + MluvnickeCharakteristikyPad3 + MluvnickeCharakteristiky|Pad3 + String + 48 + + + MluvnickeCharakteristikyPad4 + MluvnickeCharakteristiky|Pad4 + String + 48 + + + MluvnickeCharakteristikyPad6 + MluvnickeCharakteristiky|Pad6 + String + 48 + + + MluvnickeCharakteristikyPad7 + MluvnickeCharakteristiky|Pad7 + String + 48 + + + + Vymera + Vymera + Integer + Integer64 + + + + CharakterZsjKod + CharakterZsjKod + Integer + + + + DatumVzniku + DatumVzniku + String + 19 + + + + + Ulice + Data|Ulice|Ulice + + + DefinicniCara + Geometrie|DefinicniCara + MultiLineString + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nazev + Nazev + String + 48 + + + + Nespravny + Nespravny + String + 5 + + + + ObecKod + Obec|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + + Parcely + Data|Parcely|Parcela + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + Polygon + + + + OriginalniHraniceOmpv + Geometrie|OriginalniHraniceOmpv + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Id + Id + Integer + Integer64 + + + + Nespravny + Nespravny + String + 5 + + + + KmenoveCislo + KmenoveCislo + Integer + + + + PododdeleniCisla + PododdeleniCisla + Integer + + + + VymeraParcely + VymeraParcely + Integer + Integer64 + + + + ZpusobyVyuzitiPozemku + ZpusobyVyuzitiPozemku + Integer + + + + DruhCislovaniKod + DruhCislovaniKod + Integer + + + + DruhPozemkuKod + DruhPozemkuKod + Integer + + + + KatastralniUzemiKod + KatastralniUzemi|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + RizeniId + RizeniId + Integer + Integer64 + + + + BonitovanyDilVymera + BonitovaneDily|BonitovanyDil|Vymera + IntegerList + + + + BonitovanyDilBonitovanaJednotkaKod + BonitovaneDily|BonitovanyDil|BonitovanaJednotkaKod + IntegerList + + + + BonitovanyDilIdTranskace + BonitovaneDily|BonitovanyDil|IdTranskace + IntegerList + Integer64 + + + + BonitovanyDilRizeniId + BonitovaneDily|BonitovanyDil|RizeniId + IntegerList + Integer64 + + + + ZpusobOchranyKod + ZpusobyOchranyPozemku|ZpusobOchrany|Kod + IntegerList + + + + ZpusobOchranyTypOchranyKod + ZpusobyOchranyPozemku|ZpusobOchrany|TypOchranyKod + IntegerList + + + + ZpusobOchranyIdTransakce + ZpusobyOchranyPozemku|ZpusobOchrany|IdTransakce + IntegerList + + + + ZpusobOchranyRizeniId + ZpusobyOchranyPozemku|ZpusobOchrany|RizeniId + IntegerList + Integer64 + + + + + StavebniObjekty + Data|StavebniObjekty|StavebniObjekt + + + DefinicniBod + Geometrie|DefinicniBod + Point + + + + OriginalniHranice + Geometrie|OriginalniHranice + MultiPolygon + + + + OriginalniHraniceOmpv + Geometrie|OriginalniHraniceOmpv + MultiPolygon + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nespravny + Nespravny + String + 5 + + + + CisloDomovni + CislaDomovni|CisloDomovni + IntegerList + + + + IdentifikacniParcelaId + IdentifikacniParcela|Id + Integer + Integer64 + + + + TypStavebnihoObjektuKod + TypStavebnihoObjektuKod + Integer + + + + ZpusobVyuzitiKod + ZpusobVyuzitiKod + Integer + + + + CastObceKod + CastObce|Kod + Integer + + + + MomcKod + Momc|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + IsknBudovaId + IsknBudovaId + Integer + Integer64 + + + + Dokonceni + Dokonceni + String + 19 + + + + DruhKonstrukceKod + DruhKonstrukceKod + Integer + + + + ObestavenyProstor + ObestavenyProstor + Integer + + + + PocetBytu + PocetBytu + Integer + + + + PocetPodlazi + PocetPodlazi + Integer + + + + PodlahovaPlocha + PodlahovaPlocha + Integer + + + + PripojeniKanalizaceKod + PripojeniKanalizaceKod + Integer + + + + PripojeniPlynKod + PripojeniPlynKod + Integer + + + + PripojeniVodovodKod + PripojeniVodovodKod + Integer + + + + VybaveniVytahemKod + VybaveniVytahemKod + Integer + + + + ZastavenaPlocha + ZastavenaPlocha + Integer + + + + ZpusobVytapeniKod + ZpusobVytapeniKod + Integer + + + + ZpusobOchranyKod + ZpusobyOchrany|ZpusobOchrany|Kod + IntegerList + + + + ZpusobOchranyTypOchranyKod + ZpusobyOchrany|ZpusobOchrany|TypOchranyKod + IntegerList + + + + ZpusobOchranyIdTransakce + ZpusobyOchrany|ZpusobOchrany|IdTransakce + IntegerList + + + + ZpusobOchranyRizeniId + ZpusobyOchrany|ZpusobOchrany|RizeniId + IntegerList + Integer64 + + + + DetailniTEAKod + DetailniTEA|DetailniTEA|Kod + IntegerList + + + + DetailniTEAPlatiOd + DetailniTEA|DetailniTEA|PlatiOd + StringList + 19 + + + + DetailniTEAGlobalniIdNavrhuZmeny + DetailniTEA|DetailniTEA|GlobalniIdNavrhuZmeny + IntegerList + Integer64 + + + + DetailniTEADruhKonstrukceKod + DetailniTEA|DetailniTEA|DruhKonstrukceKod + IntegerList + + + + DetailniTEAPocetBytu + DetailniTEA|DetailniTEA|PocetBytu + IntegerList + + + + DetailniTEAPocetPodlazi + DetailniTEA|DetailniTEA|PocetPodlazi + IntegerList + + + + DetailniTEAPripojeniKanalizaceKod + DetailniTEA|DetailniTEA|PripojeniKanalizaceKod + IntegerList + + + + DetailniTEAPripojeniPlynKod + DetailniTEA|DetailniTEA|PripojeniPlynKod + IntegerList + + + + DetailniTEAPripojeniVodovodKod + DetailniTEA|DetailniTEA|PripojeniVodovodKod + IntegerList + + + + DetailniTEAZpusobVytapeniKod + DetailniTEA|DetailniTEA|ZpusobVytapeniKod + IntegerList + + + + DetailniTEAAdresniMistoKod + DetailniTEA|DetailniTEA|AdresniMistoKod|Kod + IntegerList + + + + + AdresniMista + Data|AdresniMista|AdresniMisto + + + AdresniBod + Geometrie|DefinicniBod|AdresniBod + Point + + + + Zachranka + Geometrie|DefinicniBod|Zachranka + Point + + + + Hasici + Geometrie|DefinicniBod|Hasici + Point + + urn:ogc:def:crs:EPSG::5514 + + + Kod + Kod + Integer + + + + Nespravny + Nespravny + String + 5 + + + + CisloDomovni + CisloDomovni + Integer + + + + CisloOrientacni + CisloOrientacni + Integer + + + + CisloOrientacniPismeno + CisloOrientacniPismeno + String + 1 + + + + Psc + Psc + Integer + + + + StavebniObjektKod + StavebniObjekt|Kod + Integer + + + + UliceKod + Ulice|Kod + Integer + + + + PlatiOd + PlatiOd + String + 19 + + + + PlatiDo + PlatiDo + String + 19 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + + GlobalniIdNavrhuZmeny + GlobalniIdNavrhuZmeny + Integer + Integer64 + + + + IsknBudovaId + IsknBudovaId + Integer + Integer64 + + + + + ZaniklePrvky + Data|ZaniklePrvky|ZaniklyPrvek + + + TypPrvkuKod + TypPrvkuKod + String + 2 + + + + PrvekId + PrvekId + Integer + Integer64 + + + + IdTransakce + IdTransakce + Integer + Integer64 + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57agencies.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57agencies.csv new file mode 100644 index 00000000..b60016d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57agencies.csv @@ -0,0 +1,249 @@ +#AgencyID,Token,Code,AgencyName +1,AE,530,Ministry of Communications, United Arab Emirates +2,AR,1,Servicio de Hidrografia Naval, Argentina +3,AU,10,Hydrographic Service, Royal Australian Navy, Australia +4,BH,20,Hydrographic Section, Survey Directorate, Bahrain +5,BE,30,Antwerpse Zeediensten Hydrografie, Belgium +6,B1,31,Dienst der Kust Hydrografie, Belgium +7,BR,40,Diretoria de Hidrografia e Navegacao, Brazil +8,CA,50,Canadian Hydrographic Service, Canada +9,CL,60,Servicio Hidrogr fico y Oceanogr fico de la Armada, Chile +10,CN,70,Maritime Safety Administration, China +11,C1,71,Navigation Guarantee Department, China +12,C2,72,Hong Kong Hydrographic Service +13,HR,80,Drzavni Hidrografski Institut, Croatia +14,CU,90,Instituto Cubano de Hidrografia, Cuba +15,CY,100,Department of Lands & Surveys, Hydrographic Unit, Cyprus +16,DK,110,Kort-Og Matrikelstyrelsen, Denmark +17,D1,111,Farvandsvaesenet, Denmark +18,DO,120,Departamento Hidrografico, Marina de Guerra, Dominican Rep. +19,DZ,610,Service Hydrographique des forces navales, Algeria +20,EC,130,Instituto Oceanografico de la Armada, Ecuador +21,EG,140,Shobat al Misaha al Baharia, Egypt +22,FJ,150,Fiji Hydrographic Service, Fiji +23,FI,160,Merenkulkuhallitus, Merikarttaosasto, Finland +24,FR,170,Service Hydrographique et Oceanographique de la Marine, France +25,DE,180,Bundesamt fuer Seeschiffahrt und Hydrographie, Germany +26,GR,190,Hellenic Navy Hydrographic Service, Greece +27,GT,200,Departamento de Sistemas Hidraulicos, Guatemala +28,G1,201,Instituto Geogr fico Militar, Guatemala +29,IS,210,Sjomaelingar Islands, Iceland +30,IN,220,Naval Hydrographic Office, India +31,ID,230,Dinas Hidro-Oseanografi (Dishidros), Indonesia +32,IR,240,Ports and Shipping Organization, Iran +33,IT,250,Istituto Idrografico della Marina, Italy +34,JP,260,Japan Hydrographic Department, Japan +35,KR,270,Hydrographic Department of the DPRK, Korea (DPR of) +36,KP,280,Office of Hydrographic Affairs, Korea (Rep. of) +37,MY,290,Royal Malaysian Navy Hydrographic Department, Malaysia +38,MC,300,Departement des Travaux Publics et des Affaires Sociales, Monaco +39,NL,310,Dienst der Hydrografie Koninklijke Marine, Netherlands +40,NZ,320,Royal New Zealand Navy Hydrographic Office, New Zealand +41,NG,330,Nigerian Navy Hydrographic Office, Nigeria +42,NO,340,Norwegian Hydrographic Service, Norway +43,N1,341,Electronic Chart Centre, Norway +44,OM,350,National Hydrographic Organization, Oman +45,PK,360,Pakistan Hydrographic Department, Pakistan +46,PG,370,Department of Transport, Maritime Division, Papua New Guinea +47,PE,380,Direccion de Hidrografia y Navegacion de la Marina, Peru +48,PH,390,Coast & Geodetic Survey Dept., Philippines +49,PL,400,Biuro Hydrograficzne Marynarki Wojennej, Poland +50,PT,410,Instituto Hidrografico, Portugal +51,RU,420,Head Department of Navigation & Oceanography, Russian Federation +52,SG,430,Hydrographic Department, Singapore +53,ZA,440,South African Navy Hydrographic Office, South Afrika (Rep. of) +54,ES,450,Instituto Hidrogr fico de la Marina, Spain +55,LK,460,National Aquatic Resources Agency, Sri Lanka +56,SR,470,Ministry of Transports, Maritime Affairs, Suriname +57,SE,480,Sjoekarteavdelningen, Sweden +58,SY,490,General Directorate of Ports, Syria +59,TH,500,Krom Utoksastr, Thailand +60,TT,510,Trinidad & Tobago Hydrographic Unit, Trinidad & Tobago +61,TR,520,Seyir, Hidrografi ve Osinografi Dairesi Baskanligi, Turkey +62,GB,540,Hydrographic Office, UK +63,US,550,Office of Coast Survey, USA +64,U1,551,National Imagery and Mapping Agency, USA +65,U2,552,Naval Oceanography Command, USA +66,U3,553,US Army Corps of Engineers +67,UY,560,Servicio de Oceanografia, Hidrografia y Meteorologia de la Armada, Uruguay +68,VE,570,Direccion de Hidrografia y Navegacion, Venezuela +69,YU,580,Hydrographic Institute of the Navy, Yugoslavia +70,ZR,590,Direction de la Marine et des Voies Navigables, Zaire +71,AL,600,Sherbimi Hidrografik Shqiptar, Albania +72,AO,620,Not known, Angola +73,AG,630,Department of Marine Services and Merchant Shipping, Antigua and Barbuda +74,AW,640,Not known, Aruba +75,BS,650,Department of Lands and Surveys, Bahamas +76,BD,660,Department of Hydrography, Bangladesh +77,BB,670,Barbados Port Authority, Barbados +78,BZ,680,Not known, Belize +79,BJ,690,Direction Generale du Port Autonome de Cotonou, Benin +80,BO,700,Servicio de Hidrografia Naval, Bolivia +81,BN,710,Department of Marine, Brunei Darussalam +82,BG,720,Hidrografska Sluzhba Pri Ministerstvo Na Otbranata, Bulgaria +83,KH,730,Service de l'Hydraulique et des Voies Navigables, Cambodia +84,CM,740,Office National des Ports du Cameroun, Cameroon +85,CV,750,Direccao Geral da Marinha Mercante, Cape Verde +86,CO,760,Ministerio de Defensa Nacional, Armada Nacional, Direccion General Maritima, Colombia +87,KM,770,Not known, Comoros +88,CG,780,Direction du Port de Pointe-Noire, Congo +89,CK,790,Department of Trade Labour and Transport, Cook Islands +90,CR,800,Ministerio de Obras Publicas y Transportes, Costa Rica +91,CI,810,Direction G_n_rale du Port Autonome d'Abidjan, Cote-d'Ivoire +92,DJ,820,Ministere du Port et des Affaires Maritimes, Djibuti +93,DM,830,Not known, Dominica +94,SV,840,Instituto Geografico Nacional, El Salvador +95,GQ,850,Not known, Equatorial Guinea +96,ER,860,Port and Maritime Transport Authority, Eritrea +97,EE,870,Tuletorni - Huedrograafiatalitus, Estonia +98,ET,880,Ministry of Transport and Communications, Ethiopia +99,GA,890,Service de la Signalisation Maritime, Gabon +100,GM,900,Gambia Ports Authority, Gambia +101,GH,910,Ghana Ports and Harbours Authority, Ghana +102,GD,920,Grenada Ports Authority, Grenada +103,GN,930,Minist_re des Transports et Travaux Publics, Guinea +104,GW,940,Servicos da Marinha, Guinea-Bissau +105,GY,950,Transport and Harbours Department, Guyana +106,HT,960,Service Maritime et de Navigation d'Haiti, Haiti +107,HN,970,Departamento de Geologia e Hidrografia, Honduras +108,IQ,980,Marine Department, Iraq +109,IE,990,Department of the Marine, Ireland +110,IL,1000,Administration of Shipping and Ports, Israel +111,JM,1010,Harbour Master's Department, Jamaica +112,JO,1020,The Ports Corporation, Jordan +113,KE,1030,Survey of Kenya, Kenya +114,KI,1040,Ministry of Transport and Communications, Kiribati +115,KW,1050,Ministry of Communications, Kuwait +116,LV,1060,Latvijas Hidrografijas Dienests, Latvia +117,LB,1070,Service du Transport Maritime, Lebanon +118,LR,1080,Ministry of Lands, Mines and Energy, Liberia +119,LY,1090,Not known, Libyan Arab Jamahiriya +120,LT,1100,Klaipeda State Seaport Authority, Lithuania +121,MG,1110,Foiben-Taosarintanin'i Madagasikara, Madagascar +122,MW,1120,Hydrographic Survey Unit, Malawi +123,MV,1130,Department of Information and Broadcasting, Maldives +124,MT,1140,Malta Maritime Authority Ports Directorate, Malta +125,MH,1150,Ministry of Resources and Development, Marshall Islands +126,MR,1160,Ministere de la Defense Nationale, Mauritania +127,MU,1170,Ministry of Housing, Lands and Town and Country Planning, Mauritius +128,MX,1180,Direccion General de Oceanografia Naval, Mexiko +129,FM,1190,Not known, Micronesia (Federated State of) +130,MA,1200,Service Hydrographique et Oceanographique de la Marine Royale, Morocco +131,MZ,1210,Instituto Nacional de Hidrografia e Navegacao, Mozambique +132,MM,1220,Naval Hydrographic Office, Myanmar +133,NA,1230,Not known, Namibia +134,NR,1240,Nauru Phosphate Corporation, Nauru +135,NI,1250,Secretaria de Planificacion y Presupuesto de la Presidencia de la Republica, Instituto Nicaraguense de Estudios Territoriales, Nicaragua +136,PW,1260,Bureau of Domestic Affairs, Palau +137,PA,1270,Instituto Geografico Nacional, Panama +138,PY,1280,Direccion de Hidrografia y Navegacion, Paraguay +139,QA,1290,Ministry of Municipal Affairs and Agriculture, Qatar +140,RO,1300,Directia Hidrografica Maritima, Romania +141,KN,1310,St. Christopher Air and Sea Ports Authority, Hydrographic Service, Saint Kitts and Nevis +142,LC,1320,Ministry of Planning, Personnel Establishment and Training, Saint Lucia +143,VC,1330,Ministry of Communications and Works, Saint Vincent and Grenadines +144,WS,1340,Ministry of Transport, Marine and Shipping Division, Samoa +145,ST,1350,Not known, Sao Tombe and Principe +146,SA,1360,Military Survey Department, Hydrographic Section, Saudi Arabia +147,SN,1370,Ministere de l'Equipement, des Transports et de la Mer, Senegal +148,SC,1380,Hydrographic and Topographic Brigade, Seychelles +149,SL,1390,Department of Transport and Communications, Sierra Leone +150,SI,1400,Not known, Slovenia +151,SB,1410,Solomon Islands Hydrographic Unit, Solomon Islands +152,SO,1420,Somali Hydrographic Office, Marine Department, Ministry of Marine Transports and Ports, Somalia +153,SD,1430,Survey Department, Sudan +154,TZ,1440,Tanzania Harbours Authority, Tanzania +155,TG,1450,University of Benin, Togo +156,TK,1460,Not known, Tokelau +157,TN,1470,Service Hydrographique et Oceanographique, Armee de Mer, Ministere de la Defense Nationale, Tunisia +158,TV,1480,Ministry of Labour, Works and Communications, Tuvalu +159,UA,1490,National Agency of Marine Research and Technology, Ukraine +160,VU,1500,Vanuatu Hydrographic Unit, Vanuata +161,VN,1510,Not known, Vietnam +162,YE,1520,Ministry of Communications, Yemen Ports and Shipping Corporation, Yemen Ports Authority, Yemen +163,QM,1600,Antarctic Treaty Consultative Committee +164,QN,1610,International Radio Consultative Committee +165,QO,1620,Comite International Radio-Maritime +166,QP,1630,IHO Data Centre for Digital Bathymetry +167,QQ,1640,Digital Geographic Information Working Group +168,QR,1650,European Communities Commission +169,QS,1660,European Harbour Masters Association +170,QT,1670,Food and Agriculture Organization +171,QU,1680,Federation Internationale des Geometres +172,QV,1690,International Atomic Energy Agency +173,QW,1700,International Association of Geodesy +174,QX,1710,International Association of Institutes of Navigation +175,QY,1720,International Association of Lighthouse Authorities +176,QZ,1730,International Association of Ports and Harbours +177,XA,1740,International Cartographic Association +178,XB,1750,International Cable Protection Committee +179,XC,1760,International Chamber of Shipping +180,XD,1770,International Commission for the Scientific Exploration of the Mediterranean +181,XE,1780,International Council of Scientific Unions +182,XF,1790,International Electrotechnical Commission +183,XG,1800,International Geographical Union +184,AA,1810,International Hydrographic Organization +185,XH,1820,International Maritime Academy +186,XI,1830,International Maritime Organization +187,XJ,1840,International Maritime Satellite Organization +188,XK,1850,Intergovernmental Oceanographic Commission +189,XL,1860,International Organization for Standardization +190,XM,1870,International Society for Photogrammetry and Remote Sensing +191,XN,1880,International Telecommunication Union +192,XO,1890,International Union of Geodesy and Geophysics +193,XP,1900,International Union of Surveying and Mapping +194,XQ,1910,Oil Companies International Marine Forum +195,XR,1920,Pan American Institute of Geography and History +196,XS,1930,Radio Technical Commission for Maritime Services +197,XT,1940,Scientific Commission on Antarctic Research +198,XU,1950,The Hydrographic Society +199,XV,1960,World Meteorological Organization +200,XW,1970,United Nations, Office for Ocean Affairs and Law of the Sea +201,PM,2020,PRIMAR - European ENC Coordinating Centre +202,1A,6682,ARAMCO +203,1B,0,UKHO test and sample datasets +204,1C,7196,CARIS +205,1D,7453,Amt fuer Geoinformationswesen der Bundeswehr +206,1E,7710,TerraNautical Data, Inc. +207,1F,7967,Force Technology, Danish Maritime Institute +208,1G,7968,_sterreichische Donau-Technik-GmbH +209,1H,7969,Vituki Water Resources Research Centre Hungary +210,1I,7970,Navionics S.p.A. +211,1K,7972,Kingway Technology Co +212,1L,7973,Laser-Scan Ltd +213,1M,7974,Channel of Moscow +214,1N,7975,Nautical Data International, Inc. +215,1O,7976,Offshore Charts Ltd. +216,1P,7977,Port Of London +217,1Q,7978,Quality Positioning Services +218,1R,7979,Rijkswaterstaat +219,1S,7980,Austrian Supreme Shippig Authority +220,1T,7981,UKHO - private production +221,1U,7982,ENC Center, National Taiwan Ocean University +222,1V,7983,The Volga-Baltic State Territorial Department for Waterways Management and Navigation +223,1W,7984,Wasser- und Schiffahrtsverwaltung des Bundes - Wasser- und Schiffahrtsdirektion S_d-West +224,1X,7985,Noorderzon Software +225,2A,10794,Azienda Regionale Navigazione Interna (ARNI) +226,2C,11308,IIC Technologies +227,2I,12056,Innovative Navigation GmbH +228,2M,12060,MARIN (Maritime Research Institute Netherlands) +229,2P,12063,PLOVPUT Beograd +230,2R,12065,Port of Rotterdam +231,2S,12079,Ssangyong Information & Communications Corp. +232,2T,12093,Transas Marine +233,2W,12096,Austrian Waterways Authority +234,3R,16203,A.F.D.J. R.A. Galati +235,3S,16204,Science Applications International Corp. +236,4R,20315,MD Atlantic Technologies +237,3T,16205,Tresco Navigation Systems +238,5M,24422,Hydrographic Office of Sarawak Marine Department +239,5T,24455,TEC Asociados +240,6C,27756,Guoy Consultancy Sdn Bhd +241,7C,31868,SevenCs AG & Co KG +242,7R,32651,The Federal Service of Geodesy and Cartography of Russia +243,7S,32652,Centre Sevzapgeoinform (SZGI) +244,7T,32653,Terra Corp +245,8A,35466,HSA Systems Pty Ltd +246,9A,39578,CherSoft Ltd +247,9T,40877,Tresco Engineering bvba +248,0_,65534,unknown producer diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57attributes.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57attributes.csv new file mode 100644 index 00000000..3b2c04e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57attributes.csv @@ -0,0 +1,484 @@ +"Code","Attribute","Acronym","Attributetype","Class" +1,Agency responsible for production,AGENCY,A,F +2,Beacon shape,BCNSHP,E,F +3,Building shape,BUISHP,E,F +4,Buoy shape,BOYSHP,E,F +5,Buried depth,BURDEP,F,F +6,Call sign,CALSGN,S,F +7,Category of airport/airfield,CATAIR,L,F +8,Category of anchorage,CATACH,L,F +9,Category of bridge,CATBRG,L,F +10,Category of built-up area,CATBUA,E,F +11,Category of cable,CATCBL,E,F +12,Category of canal,CATCAN,E,F +13,Category of cardinal mark,CATCAM,E,F +14,Category of checkpoint,CATCHP,E,F +15,Category of coastline,CATCOA,E,F +16,Category of control point,CATCTR,E,F +17,Category of conveyor,CATCON,E,F +18,Category of coverage,CATCOV,E,F +19,Category of crane,CATCRN,E,F +20,Category of dam,CATDAM,E,F +21,Category of distance mark,CATDIS,E,F +22,Category of dock,CATDOC,E,F +23,Category of dumping ground,CATDPG,L,F +24,Category of fence/wall,CATFNC,E,F +25,Category of ferry,CATFRY,E,F +26,Category of fishing facility,CATFIF,E,F +27,Category of fog signal,CATFOG,E,F +28,Category of fortified structure,CATFOR,E,F +29,Category of gate,CATGAT,E,F +30,Category of harbour facility,CATHAF,L,F +31,Category of hulk,CATHLK,L,F +32,Category of ice,CATICE,E,F +33,Category of installation buoy,CATINB,E,F +34,Category of land region,CATLND,L,F +35,Category of landmark,CATLMK,L,F +36,Category of lateral mark,CATLAM,E,F +37,Category of light,CATLIT,L,F +38,Category of marine farm/culture,CATMFA,E,F +39,Category of military practice area,CATMPA,L,F +40,Category of mooring/warping facility,CATMOR,E,F +41,Category of navigation line,CATNAV,E,F +42,Category of obstruction,CATOBS,E,F +43,Category of offshore platform,CATOFP,L,F +44,Category of oil barrier,CATOLB,E,F +45,Category of pile,CATPLE,E,F +46,Category of pilot boarding place,CATPIL,E,F +47,Category of pipeline / pipe,CATPIP,L,F +48,Category of production area,CATPRA,E,F +49,Category of pylon,CATPYL,E,F +50,Category of quality of data,CATQUA,E,F +51,Category of radar station,CATRAS,E,F +52,Category of radar transponder beacon,CATRTB,E,F +53,Category of radio station,CATROS,L,F +54,Category of recommended track,CATTRK,E,F +55,Category of rescue station,CATRSC,L,F +56,Category of restricted area,CATREA,L,F +57,Category of road,CATROD,E,F +58,Category of runway,CATRUN,E,F +59,Category of sea area,CATSEA,E,F +60,Category of shoreline construction,CATSLC,E,F +61,"Category of signal station, traffic",CATSIT,L,F +62,"Category of signal station, warning",CATSIW,L,F +63,Category of silo/tank,CATSIL,E,F +64,Category of slope,CATSLO,E,F +65,Category of small craft facility,CATSCF,L,F +66,Category of special purpose mark,CATSPM,L,F +67,Category of Traffic Separation Scheme,CATTSS,E,F +68,Category of vegetation,CATVEG,L,F +69,Category of water turbulence,CATWAT,E,F +70,Category of weed/kelp,CATWED,E,F +71,Category of wreck,CATWRK,E,F +72,Category of zone of confidence data,CATZOC,E,F +73,Character spacing,$SPACE,E,$ +74,Character specification,$CHARS,A,$ +75,Colour,COLOUR,L,F +76,Colour pattern,COLPAT,L,F +77,Communication channel,COMCHA,A,F +78,Compass size,$CSIZE,F,$ +79,Compilation date,CPDATE,A,F +80,Compilation scale,CSCALE,I,F +81,Condition,CONDTN,E,F +82,"Conspicuous, Radar",CONRAD,E,F +83,"Conspicuous, visual",CONVIS,E,F +84,Current velocity,CURVEL,F,F +85,Date end,DATEND,A,F +86,Date start,DATSTA,A,F +87,Depth range value 1,DRVAL1,F,F +88,Depth range value 2,DRVAL2,F,F +89,Depth units,DUNITS,E,F +90,Elevation,ELEVAT,F,F +91,Estimated range of transmission,ESTRNG,F,F +92,Exhibition condition of light,EXCLIT,E,F +93,Exposition of sounding,EXPSOU,E,F +94,Function,FUNCTN,L,F +95,Height,HEIGHT,F,F +96,Height/length units,HUNITS,E,F +97,Horizontal accuracy,HORACC,F,F +98,Horizontal clearance,HORCLR,F,F +99,Horizontal length,HORLEN,F,F +100,Horizontal width,HORWID,F,F +101,Ice factor,ICEFAC,F,F +102,Information,INFORM,S,F +103,Jurisdiction,JRSDTN,E,F +104,Justification - horizontal,$JUSTH,E,$ +105,Justification - vertical,$JUSTV,E,$ +106,Lifting capacity,LIFCAP,F,F +107,Light characteristic,LITCHR,E,F +108,Light visibility,LITVIS,L,F +109,Marks navigational - System of,MARSYS,E,F +110,Multiplicity of lights,MLTYLT,I,F +111,Nationality,NATION,A,F +112,Nature of construction,NATCON,L,F +113,Nature of surface,NATSUR,L,F +114,Nature of surface - qualifying terms,NATQUA,L,F +115,Notice to Mariners date,NMDATE,A,F +116,Object name,OBJNAM,S,F +117,Orientation,ORIENT,F,F +118,Periodic date end,PEREND,A,F +119,Periodic date start,PERSTA,A,F +120,Pictorial representation,PICREP,S,F +121,Pilot district,PILDST,S,F +122,Producing country,PRCTRY,A,F +123,Product,PRODCT,L,F +124,Publication reference,PUBREF,S,F +125,Quality of sounding measurement,QUASOU,L,F +126,Radar wave length,RADWAL,A,F +127,Radius,RADIUS,F,F +128,Recording date,RECDAT,A,F +129,Recording indication,RECIND,A,F +130,Reference year for magnetic variation,RYRMGV,A,F +131,Restriction,RESTRN,L,F +132,Scale maximum,SCAMAX,I,F +133,Scale minimum,SCAMIN,I,F +134,Scale value one,SCVAL1,I,F +135,Scale value two,SCVAL2,I,F +136,Sector limit one,SECTR1,F,F +137,Sector limit two,SECTR2,F,F +138,Shift parameters,SHIPAM,A,F +139,Signal frequency,SIGFRQ,I,F +140,Signal generation,SIGGEN,E,F +141,Signal group,SIGGRP,A,F +142,Signal period,SIGPER,F,F +143,Signal sequence,SIGSEQ,A,F +144,Sounding accuracy,SOUACC,F,F +145,Sounding distance - maximum,SDISMX,I,F +146,Sounding distance - minimum,SDISMN,I,F +147,Source date,SORDAT,A,F +148,Source indication,SORIND,A,F +149,Status,STATUS,L,F +150,Survey authority,SURATH,S,F +151,Survey date - end,SUREND,A,F +152,Survey date - start,SURSTA,A,F +153,Survey type,SURTYP,L,F +154,Symbol scaling factor,$SCALE,F,$ +155,Symbolization code,$SCODE,A,$ +156,Technique of sounding measurement,TECSOU,L,F +157,Text string,$TXSTR,S,$ +158,Textual description,TXTDSC,S,F +159,Tidal stream - panel values,TS_TSP,A,F +160,"Tidal stream, current - time series values",TS_TSV,A,F +161,Tide - accuracy of water level,T_ACWL,E,F +162,Tide - high and low water values,T_HWLW,A,F +163,Tide - method of tidal prediction,T_MTOD,E,F +164,Tide - time and height differences,T_THDF,A,F +165,"Tide, current - time interval of values",T_TINT,I,F +166,Tide - time series values,T_TSVL,A,F +167,Tide - value of harmonic constituents,T_VAHC,A,F +168,Time end,TIMEND,A,F +169,Time start,TIMSTA,A,F +170,Tint,$TINTS,E,$ +171,Topmark/daymark shape,TOPSHP,E,F +172,Traffic flow,TRAFIC,E,F +173,Value of annual change in magnetic variation,VALACM,F,F +174,Value of depth contour,VALDCO,F,F +175,Value of local magnetic anomaly,VALLMA,F,F +176,Value of magnetic variation,VALMAG,F,F +177,Value of maximum range,VALMXR,F,F +178,Value of nominal range,VALNMR,F,F +179,Value of sounding,VALSOU,F,F +180,Vertical accuracy,VERACC,F,F +181,Vertical clearance,VERCLR,F,F +182,"Vertical clearance, closed",VERCCL,F,F +183,"Vertical clearance, open",VERCOP,F,F +184,"Vertical clearance, safe",VERCSA,F,F +185,Vertical datum,VERDAT,E,F +186,Vertical length,VERLEN,F,F +187,Water level effect,WATLEV,E,F +188,Category of Tidal stream,CAT_TS,E,F +189,Positional accuracy units,PUNITS,E,F +190,Object class definition,CLSDEF,S,F +191,Object class name,CLSNAM,S,F +192,Symbol instruction,SYMINS,S,F +300,Information in national language,NINFOM,S,N +301,Object name in national language,NOBJNM,S,N +302,Pilot district in national language,NPLDST,S,N +303,Text string in national language,$NTXST,S,N +304,Textual description in national language,NTXTDS,S,N +400,Horizontal datum,HORDAT,E,S +401,Positional Accuracy,POSACC,F,S +402,Quality of position,QUAPOS,E,S +0,"###Codes in the 17xxx range come from past s57attributes_iw.csv (Inland Waterways)",###,S,F +17000,Category of Anchorage area,catach,L,F +17001,Category of distance mark,catdis,E,F +17002,Category of signal station trafficcatsit,catsit,L,F +17003,Category of signal station warning,catsiw,L,F +17004,Restriction,restrn,L,F +17005,Vertical datum,verdat,E,F +17006,Category of bridge,catbrg,L,F +17007,Category of ferry,catfry,L,F +17008,Category of harbour facilities,cathaf,L,F +17009,"Marks navigational – System of",marsys,E,F +17050,Additional mark,addmrk,L,F +17051,Category of bank,catbnk,E,F +17052,Category of notice mark,catnmk,E,F +17055,Class of dangerous cargo,clsdng,E,F +17056,Direction of impact,dirimp,L,F +17057,Distance from bank,disbk1,F,F +17058,Distance from bank,disbk2,F,F +17059,"Distance of impact, upstream",disipu,F,F +17060,"Distance of impact, downstream",disipd,F,F +17061,Elevation 1,eleva1,F,F +17062,Elevation 2,eleva2,F,F +17063,Function of notice mark,fnctnm,E,F +17064,Waterway distance,wtwdis,F,F +17065,Bunker vessel,bunves,E,F +17066,Category of berth,catbrt,L,F +17067,Category of bunker,catbun,L,F +17068,Category of CEMT class,catccl,L,F +17069,Category of communication,catcom,L,F +17070,Category of harbour area,cathbr,L,F +17071,Category of refuse dump,catrfd,L,F +17072,Category of terminal,cattml,L,F +17073,Communication,comctn,S,F +17074,"Horizontal clearance, length",horcll,F,F +17075,"Horizontal clearance, width",horclw,F,F +17076,Transshipping goods,trshgd,L,F +17077,UN Location Code,unlocd,S,F +17112,Category of waterway mark,catwwm,E,F +0,"###Codes in the 20xxx and 22xxx range come from past s57attributes_aml.csv (Additional_Military_Layers)",###,S,F +20484,"Abandonment Date","databa","A","?" +20485,"Attenuation","attutn","F","?" +20486,"Beam of Vessel","vesbem","F","?" +20487,"Bearing","bearng","F","?" +20488,"Blind Zone","blndzn","A","?" +20489,"Breaker Type","brktyp","E","?" +20490,"Density","bulkdn","F","?" +20491,"Burial Mechanism","brmchm","E","?" +20492,"Burial Percentage","brpctg","I","?" +20493,"Burial Period","brperd","I","?" +20494,"Burial Probability","brprob","E","?" +20495,"Cardinal Point Orientation","orcard","E","?" +20496,"Category of administration area","catadm","E","?" +20497,"Category of airspace restriction","catasr","E","?" +20498,"Category of bedrock","N/A","N/A","?" +20499,"Bottom Feature Classification","catbot","E","?" +20500,"Category of coastguard station","catcgs","E","?" +20501,"Category of controlled airspace","catcas","E","?" +20502,"Fishing Activity","catfsh","E","?" +20503,"Type of Imagery","catimg","L","?" +20504,"Category of marine management area","catmma","E","?" +20505,"Category of maritime safety information","catmsi","E","?" +20506,"Category of military exercise airspace ","catmea","E","?" +20507,"Category of patrol area","catpat","E","?" +20508,"Category of reporting/radio calling-in point","catrep","E","?" +20509,"Category of regulated airspace","N/A","N/A","?" +20510,"Category of territorial sea baseline","catsbl","E","?" +20511,"Trafficability","cattrf","E","?" +20512,"Command System","comsys","S","?" +20515,"Controlled airspace class designation","caircd","E","?" +20516,"Controlling authority","authty","S","?" +20517,"Current Scour Dimensions","scrdim","A","?" +20518,"Dangerous Marine and Land Life","dgmrlf","L","?" +20519,"Date Sunk","datsnk","A","?" +20520,"Debris Field","debfld","A","?" +20521,"Depth of Activity","depact","F","?" +20522,"Depth of Layer","deplyr","F","?" +20523,"Distance from Small Bottom Object","discon","F","?" +20524,"Diver’s Thrust Test Depth","dttdep","E","?" +20525,"Diver’s Thrust Test Number","dttnum","I","?" +20526,"Diving Activity","divact","E","?" +20527,"Draught of Vessel","vesdgh","F","?" +20528,"Exit Usability","exitus","E","?" +20529,"Field Name","fldnam","S","?" +20530,"First Detection Year","datfir","A","?" +20531,"First Sensor","senfir","E","?" +20532,"First Source","sorfir","E","?" +20533,"Foliar Index","folinx","F","?" +20534,"Gas Content","gascon","I","?" +20535,"General Water Depth","gendep","I","?" +20536,"Gradient","gradnt","E","?" +20537,"Grain Size","grnsiz","F","?" +20538,"Inclination","incltn","F","?" +20539,"Internal Data Record Identification Number","N/A","N/A","?" +20540,"Last Detection Year","datlst","A","?" +20541,"Last Sensor","senlst","E","?" +20542,"Last Source","sorlst","E","?" +20543,"Lay Platform","layptm","E","?" +20544,"Lay Reference Number","layrfn","S","?" +20545,"Lay Time","laytim","A","?" +20546,"Layer Number","laynum","I","?" +20547,"Legal Status","legsta","S","?" +20548,"Length of Vessel","veslen","F","?" +20549,"Magnetic Anomaly Detector (MAD) Signature","madsig","E","?" +20550,"Magnetic Intensity","magint","I","?" +20551,"Mean Shear Strength","msstrg","F","?" +20552,"Migration Direction","migdir","I","?" +20553,"Migration Speed","migspd","F","?" +20554,"Milec Density","milden","E","?" +20555,"Mine Index Mine Case","mnimnc","E","?" +20556,"Mine Index Mine Type","mnimnt","L","?" +20557,"Mine Reference Number","minern","S","?" +20558,"Mine-Hunting Classification","mhclas","E","?" +20559,"Minehunting System","mnhsys","S","?" +20560,"Minesweeping System","mnssys","S","?" +20561,"Mission Classification","miscls","E","?" +20562,"Mission Comments","miscom","S","?" +20563,"Mission Date","misdat","A","?" +20564,"Mission Name","misnme","S","?" +20565,"MWDC Reference Number","mwdcrn","S","?" +20566,"Nature of Geological Layer","natsed","E","?" +20567,"Navigation System","navsys","S","?" +20568,"NOMBO Density","nomden","E","?" +20569,"Not Found","notfnd","S","?" +20570,"Number of Previous Observations","nmprob","I","?" +20571,"Operator","oprtor","S","?" +20572,"Orientation of Best Observation","orbobn","F","?" +20573,"Origin of Data","orgdat","E","?" +20574,"Originator","orgntr","S","?" +20575,"Porosity","porsty","I","?" +20576,"Quality of Beach Data","quabch","A","?" +20577,"Re-entered Date","datren","A","?" +20578,"Re-suspended Date","datres","A","?" +20579,"Reverberation","revebn","E","?" +20580,"Safety Zone","N/A","N/A","?" +20581,"Sample Retained","samret","S","?" +20582,"Seabed Coverage","sbdcov","I","?" +20583,"Ships Speed","shpspd","F","?" +20584,"Sonar Frequency","snrfrq","E","?" +20585,"Sonar Range Scale","snrrsc","F","?" +20586,"Sonar Reflectivity","snrflc","E","?" +20587,"Sonar Signal Strength","sonsig","E","?" +20588,"Sound Velocity","sndvel","F","?" +20589,"Sounding Datum","soudat","E","?" +20590,"Spudded Date","datspd","A","?" +20592,"Steepest Face Orientation","stfotn","F","?" +20593,"Strength According to Richter Scale","ricsca","I","?" +20594,"Strength of Magnetic Anomaly","magany","E","?" +20595,"Suitability for ACV Use","stbacv","E","?" +20596,"Surf Height","srfhgt","F","?" +20597,"Surf Zone","srfzne","I","?" +20598,"Survey Date and Time","surdat","A","?" +20599,"Suspension Date","datsus","A","?" +20600,"Swell Height","swlhgt","F","?" +20601,"Tidal Range","tdlrng","F","?" +20602,"Time of Year","timeyr","L","?" +20603,"Tonnage","tonage","I","?" +20604,"Towed Body Depth","twdbdp","F","?" +20605,"Type of military activity","milact","L","?" +20606,"Type of Tonnage","typton","E","?" +20607,"Type of Wreck","typewk","E","?" +20608,"Underwater Reference Mark","unwrfm","E","?" +20609,"Unique ID from a Navigational Product","N/A","N/A","?" +20610,"Water Clarity","watclr","F","?" +20611,"Wavelength","wavlen","F","?" +20612,"Weight Bearing Capability","wbrcap","I","?" +20613,"Width (left)","lftwid","F","?" +20614,"Width (right)","rgtwid","F","?" +20615,"Contour Type","hypcat","E","?" +20616,"Sounding Velocity","souvel","E","?" +20617,"Access Restriction","accres","S","?" +20618,"Approach","apprch","S","?" +20619,"Category of Beach","catbch","E","?" +20620,"Clearance Percentage","clperc","I","?" +20621,"Communications","commns","L","?" +20622,"Confidence Level","conlev","F","?" +20624,"Exit Description","extdes","S","?" +20625,"Industry","indtry","S","?" +20626,"Landing Conditions","lndcon","S","?" +20627,"Leisure Activity","lsract","S","?" +20628,"Logistics","logtcs","L","?" +20629,"Manoeuvring","manvrg","S","?" +20630,"Mine Threat Density","mntden","I","?" +20631,"Multiple Contacts","mulcon","I","?" +20632,"Navigational Description","navdes","S","?" +20633,"Navigational Difficulty","navdif","E","?" +20634,"Number of Remaining Mines","numrmn","I","?" +20635,"Pier Contact Details","pierod","S","?" +20636,"Pier Description","pierdn","S","?" +20637,"Prairies Density","prsden","I","?" +20638,"Probability for Remaining Mines","prbrmn","F","?" +20639,"Remaining Mines Likely, Maximum Number","rmnlmn","I","?" +20640,"Self Protection (Air)","sfptna","E","?" +20641,"Self Protection (Near Defence)","sptnnd","E","?" +20642,"Self Protection (Surface)","sfptns","E","?" +20643,"Sensor Coverage","sencov","S","?" +20644,"Simple Initial Threat","sminth","F","?" +20645,"Target Reference Weight","tgrfwt","E","?" +20646,"Tidal Type","tdltyp","E","?" +20647,"Type of Resource Location","typres","E","?" +20648,"Undetectable Mines Ratio","undmnr","F","?" +20649,"Undetectable Mines Ratio with Burial","umnrwb","F","?" +20650,"Undetectable Mines Ratio without Burial","umrwob","F","?" +20651,"Weapon Coverage","wpncov","S","?" +20652,"On Sonar","onsonr","E","?" +20653,"HF Bottom Loss","hfbmls","F","?" +20654,"LF Bottom Loss","lfbmls","F","?" +20655,"Detection Probability","dtprob","F","?" +20656,"Disposal Probability","dsprob","F","?" +20657,"Classification Probability","clprob","F","?" +20658,"Characteristic Detection Width (A)","cswidt","I","?" +20659,"Characteristic Detection Probability (B)","csprob","F","?" +20660,"Zone Colour","znecol","E","?" +20661,"Reverberation Frequency","revfqy","F","?" +20662,"Reverberation Grazing Angle","revgan","F","?" +20663,"International Defence Organisation (IDO) status","secido","E","?" +20664,"Protective Marking","secpmk","E","?" +20665,"Owner Authority","secown","S","?" +20666,"Caveat ","seccvt","S","?" +20667,"Species","spcies","S","?" +20668,"Swept date","swpdat","A","?" +20669,"Runway length","rwylen","I","?" +20670,"Active period","actper","S","?" +20671,"Maximum altitude","maxalt","I","?" +20672,"Minimum altitude","minalt","I","?" +20673,"Maximum Flight Level","maxftl","I","?" +20674,"Minimum Flight Level","minftl","I","?" +20675,"Bottom Vertical Safety Separation","bverss","I","?" +20676,"Minimum Safe Depth","mindep","I","?" +20677,"Interpolated line characteristic","linech","E","?" +20678,"Identification","identy","S","?" +20679,"Route Classification","rclass","E","?" +20680,"Population","popltn","I","?" +20681,"Surface Threat","surtht","E","?" +20682,"Heading-Up Bearing","upbear","F","?" +20683,"Heading-Down Bearing","dnbear","F","?" +20684,"Ice Concentration","icencn","I","?" +20685,"Danger height","dgrhgt","I","?" +20686,"Depth Restriction","depres","S","?" +20687,"Area Category","arecat","E","?" +20688,"Existence of Restricted Area","exzres","E","?" +20689,"Target Strength","tarstg","I","?" +20690,"Qualification of Radar Coverage","quarad","I","?" +20691,"Contact Details","condet","S","?" +20692,"Limit of Anchors and Chains","limanc","F","?" +20693,"CCM Index","ccmidx","I","?" +20694,"Military Load Classification","mlclas","E","?" +20695,"MGS Type","mgstyp","E","?" +20696,"Ice Attribute Concentration Total","iceact","E","?" +20697,"Ice Stage of Development","icesod","E","?" +20698,"Ice Advisory Code","iceadc","S","?" +20699,"Number of Icebergs in Area","icebnm","I","?" +20700,"Ice Line Category","icelnc","E","?" +20701,"Ice Polynya Type","icepty","E","?" +20702,"Ice Polynya Status","icepst","E","?" +20703,"Ice Lead Type","icelty","E","?" +20704,"Ice Lead Status","icelst","E","?" +20705,"Iceberg Size","icebsz","E","?" +20706,"Iceberg Shape","icebsh","E","?" +20707,"Icedrift or Iceberg Direction","icebdr","E","?" +20708,"Icedrift or Iceberg Speed","icebsp","F","?" +20709,"Maximum Ice Thickness","icemax","F","?" +20710,"Minimum Ice Thickness","icemin","F","?" +20711,"Ice Ridge Development","icerdv","E","?" +20712,"Land Ice","icelnd","E","?" +20713,"Sea Direction","seadir","E","?" +20714,"Traffic density","traden","S","?" +20715,"Type of shipping","typshp","L","?" +20716,"Ice Coverage Type","icecvt","E","?" +20718,"Status of Small Bottom Object","staobj","L","?" +20719,"ICAO code","icaocd","S","?" +20720,"textual description","txtdes","S","?" +20721,"Object Reference Number","objtrn","S","?" +20722,"Object Shape","objshp","S","?" +22484,"Category of completeness","catcnf","E","?" +22485,"Error Ellipse","errell","A","?" +22486,"Object classes","N/A","N/A","?" +22487,"Security classification","N/A","N/A","?" +22488,"Vertical Datum Shift Parameter","vershf","F","?" +22489,"Absolute Vertical Accuracy","elvacc","F","?" +22490,"Reflection Coefficient","reflco","F","?" +22491,"Copyright statement","cpyrit","S","?" +0,"###40000 comes from past s57attributes_iw.csv (Inland Waterways)",###,S,F +40000,Update message,updmsg,S,F diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57expectedinput.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57expectedinput.csv new file mode 100644 index 00000000..e71249f3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57expectedinput.csv @@ -0,0 +1,1008 @@ +"Code","ID","Meaning" +2,1,"stake, pole, perch, post" +2,2,whity +2,3,beacon tower +2,4,lattice beacon +2,5,pile beacon +2,6,cairn +2,7,buoyant beacon +3,5,high-rise building +3,6,pyramid +3,7,cylindrical +3,8,spherical +3,9,cubic +4,1,"conical (nun, ogival)" +4,2,can (cylindrical) +4,3,spherical +4,4,pillar +4,5,spar (spindle) +4,6,barrel (tun) +4,7,super-buoy +4,8,ice buoy +7,1,military aeroplane airport +7,2,civil aeroplane airport +7,3,military heliport +7,4,civil heliport +7,5,glider airfield +7,6,small planes airfield +7,8,emergency airfield +8,1,unrestricted anchorage +8,2,deep water anchorage +8,3,tanker anchorage +8,4,explosives anchorage +8,5,quarantine anchorage +8,6,sea-plane anchorage +8,7,small craft anchorage +8,8,small craft mooring area +8,9,anchorage for periods up to 24 hours +9,1,fixed bridge +9,2,opening bridge +9,3,swing bridge +9,4,lifting bridge +9,5,bascule bridge +9,6,pontoon bridge +9,7,draw bridge +9,8,transporter bridge +9,9,footbridge +9,10,viaduct +9,11,aqueduct +9,12,suspension bridge +10,1,urban area +10,2,settlement +10,3,village +10,4,town +10,5,city +10,6,holiday village +11,1,power line +11,3,transmission line +11,4,telephone +11,5,telegraph +11,6,mooring cable/chain +12,1,transportation +12,2,drainage +12,3,irrigation +13,1,north cardinal mark +13,2,east cardinal mark +13,3,south cardinal mark +13,4,west cardinal mark +14,1,custom +15,1,steep coast +15,2,flat coast +15,3,sandy shore +15,4,stony shore +15,5,shingly shore +15,6,glacier (seaward end) +15,7,mangrove +15,8,marshy shore +15,9,coral reef +15,10,ice coast +16,1,triangulation point +16,2,observation spot +16,3,fixed point +16,4,bench-mark +16,5,boundary mark +16,6,"horizontal control, main station" +16,7,"horizontal control, secondary station" +17,1,aerial cableway (telepheric) +17,2,belt conveyor +18,1,coverage available +18,2,no coverage available +19,2,container crane/gantry +19,3,sheerlegs +19,4,travelling crane +19,5,A-frame +20,1,weir +20,2,dam +20,3,flood barrage +21,1,distance mark not physically installed +21,2,"visible mark, pole" +21,3,"visible mark, board" +21,4,"visible mark, unknown shape" +22,1,tidal +22,2,non-tidal (wet dock) +23,2,chemical waste dumping ground +23,3,nuclear waste dumping ground +23,4,explosives dumping ground +23,5,spoil ground +23,6,vessel dumping ground +24,1,fence +24,3,hedge +24,4,wall +25,1,'free-moving' ferry +25,2,cable ferry +25,3,ice ferry +26,1,fishing stake +26,2,fish trap +26,3,fish weir +26,4,tunny net +27,1,explosive +27,2,diaphone +27,3,siren +27,4,nautophone +27,5,reed +27,6,tyfon +27,7,bell +27,8,whistle +27,9,gong +27,10,horn +28,1,castle +28,2,fort +28,3,battery +28,4,blockhouse +28,5,Martello tower +29,2,flood barrage gate +29,3,caisson +29,4,lock gate +29,5,dyke gate +30,1,RoRo-terminal +30,3,ferry terminal +30,4,fishing harbour +30,5,yacht harbour/marina +30,6,naval base +30,7,tanker terminal +30,8,passenger terminal +30,9,shipyard +30,10,container terminal +30,11,bulk terminal +31,1,floating restaurant +31,2,historic ship +31,3,museum +31,4,accomodation +31,5,floating breakwater +32,1,fast ice +32,5,glacier +32,8,polar ice +33,1,catenary anchor leg mooring (CALM) +33,2,single buoy mooring (SBM or SPM) +34,1,fen +34,2,marsh +34,3,moor/bog +34,4,heathland +34,5,mountain range +34,6,lowlands +34,7,canyon lands +34,8,paddy field +34,9,agricultural land +34,10,savanna/grassland +34,11,parkland +34,12,swamp +34,13,landslide +34,14,lava flow +34,15,salt pan +34,16,moraine +34,17,crater +34,18,cave +34,19,rock column or pinnacle +35,1,cairn +35,2,cemetery +35,3,chimney +35,4,dish aerial +35,5,flagstaff (flagpole) +35,6,flare stack +35,7,mast +35,8,windsock +35,9,monument +35,10,column (pillar) +35,11,memorial plaque +35,12,obelisk +35,13,statue +35,14,cross +35,15,dome +35,16,radar scanner +35,17,tower +35,18,windmill +35,19,windmotor +35,20,spire/minaret +36,1,port-hand lateral mark +36,2,starboard-hand lateral mark +36,3,preferred channel to starboard lateral mark +36,4,preferred channel to port lateral mark +37,1,directional function +37,4,leading light +37,5,aero light +37,6,air obstruction light +37,7,fog detector light +37,8,flood light +37,9,strip light +37,10,subsidiary light +37,11,spotlight +37,12,front +37,13,rear +37,14,lower +37,15,upper +37,16,moiré effect +37,17,emergency +37,18,bearing light +37,19,horizontally disposed +37,20,vertically disposed +38,1,crustaceans +38,2,oyster/mussels +38,3,fish +38,4,seaweed +39,2,torpedo exercise area +39,3,submarine exercise area +39,4,firing danger area +39,5,mine-laying practice area +39,6,small arms firing range +40,1,dolphin +40,2,deviation dolphin +40,3,bollard +40,4,tie-up wall +40,5,post or pile +40,6,chain/wire/cable +40,7,mooring buoy +41,1,clearing line +41,2,transit line +41,3,leading line bearing a recommended track +42,1,snag / stump +42,2,wellhead +42,3,diffuser +42,4,crib +42,5,fish haven +42,6,foul area +42,7,foul ground +42,8,ice boom +42,9,ground tackle +43,1,oil derrick / rig +43,2,production platform +43,3,observation / research platform +43,4,articulated loading platform (ALP) +43,5,single anchor leg mooring (SALM) +43,6,mooring tower +43,7,artificial island +43,8,"floating production, storage and off-loading vessel (FPSO)" +43,9,accomodation platform +43,10,"navigation, communication and control buoy (NCCB)" +44,1,oil retention (high pressure pipe) +44,2,floating oil barrier +45,1,stake +45,3,post +45,4,tripodal +46,1,boarding by pilot-cruising vessel +46,2,boarding by helicopter +46,3,pilot comes out from shore +47,2,outfall pipe +47,3,intake pipe +47,4,sewer +47,5,bubbler system +47,6,supply pipe +48,1,quarry +48,2,mine +48,3,stockpile +48,4,power station area +48,5,refinery area +48,6,timber yard +48,7,factory area +48,8,tank farm +48,9,wind farm +49,1,power transmission pylon/pole +49,2,telephone/telegraph pylon/pole +49,3,aerial cableway/sky pylon +49,4,bridge pylon/tower +49,5,bridge pier +50,1,data quality A +50,2,data quality B +50,3,data quality C +50,4,data quality D +50,5,data quality E +50,6,quality not evaluated +51,1,radar surveillance station +51,2,coast radar station +52,1,"ramark, radar beacon transmitting continuously" +52,2,"racon, radar transponder beacon" +52,3,leading racon/radar transponder beacon +53,1,circular (non-directional) marine or aero-marine radiobeacon +53,2,directional radiobeacon +53,3,rotating-pattern radiobeacon +53,4,Consol beacon +53,5,radio direction-finding station +53,6,coast radio station providing QTG service +53,7,aeronautical radiobeacon +53,8,Decca +53,9,Loran C +53,10,Differential GPS +53,11,Toran +53,12,Omega +53,13,Syledis +53,14,Chaika (Chayka) +54,1,based on a system of fixed marks +54,2,not based on a system of fixed marks +55,1,rescue station with lifeboat +55,2,rescue station with rocket +55,4,refuge for shipwrecked mariners +55,5,refuge for intertidal area walkers +55,6,lifeboat lying at a mooring +56,1,offshore safety zone +56,4,nature reserve +56,5,bird sanctuary +56,6,game preserve +56,7,seal sanctuary +56,8,degaussing range +56,9,military area +56,10,historic wreck area +56,12,navigational aid safety zone +56,14,minefield +56,18,swimming area +56,19,waiting area +56,20,research area +56,21,dredging area +56,22,fish sanctuary +56,23,ecological reserve +56,24,no wake area +56,25,swinging area +57,1,motorway +57,2,major road +57,3,minor road +57,4,track / path +57,5,major street +57,6,minor street +57,7,crossing +58,1,aeroplane +58,2,helicopter landing pad +59,2,gat +59,3,bank +59,4,deep +59,5,bay +59,6,trench +59,7,basin +59,8,mud flats +59,9,reef +59,10,ledge +59,11,canyon +59,12,narrows +59,13,shoal +59,14,knoll +59,15,ridge +59,16,seamount +59,17,pinnacle +59,18,abyssal plain +59,19,plateau +59,20,spur +59,21,shelf +59,22,trough +59,23,saddle +59,24,abyssal hills +59,25,apron +59,26,archipelagic apron +59,27,borderland +59,28,continental margin +59,29,continental rise +59,30,escarpment +59,31,fan +59,32,fracture zone +59,33,gap +59,34,guyot +59,35,hill +59,36,hole +59,37,levee +59,38,median valley +59,39,moat +59,40,mountains +59,41,peak +59,42,province +59,43,rise +59,44,seachannel +59,45,seamount chain +59,46,shelf edge +59,47,sill +59,48,slope +59,49,terrace +59,50,valley +59,51,canal +59,52,lake +59,53,river +60,1,breakwater +60,2,groyne (groin) +60,3,mole +60,4,pier ( jetty) +60,5,promenadepier +60,6,wharf (quay) +60,7,training wall +60,8,rip rap +60,9,revetment +60,10,sea wall +60,11,landing steps +60,12,ramp +60,13,slipway +60,14,fender +60,15,solid face wharf +60,16,open face wharf +61,1,port control +61,2,port entry and departure +61,3,International Port Traffic +61,4,berthing +61,5,dock +61,6,lock +61,7,flood barrage +61,8,bridge passage +61,9,dredging +62,1,danger +62,2,maritime obstruction +62,3,cable +62,4,military practice +62,5,distress +62,6,weather +62,7,storm +62,8,ice +62,9,time +62,10,tide +62,11,tidal stream +62,12,tide gauge +62,13,tide scale +62,14,diving +63,1,silo in general +63,2,tank in general +63,3,grain elevator +63,4,water tower +64,1,cutting +64,2,embankment +64,3,dune +64,4,hill +64,5,pingo +64,6,cliff +64,7,scree +65,1,visitor`s berth +65,2,nautical club +65,3,boat hoist +65,4,sailmaker +65,5,boatyard +65,6,public inn +65,7,restaurant +65,8,chandler +65,9,provisions +65,10,doctor +65,11,pharmacy +65,12,water tap +65,13,fuel station +65,14,electricity +65,15,bottle gas +65,16,showers +65,17,launderette +65,18,public toilets +65,19,post box +65,20,public telephone +65,21,refuse bin +65,22,car park +65,23,parking for boats and trailers +65,24,caravan site +65,25,camping site +65,26,sewerage pump-out station +65,27,emergency telephone +65,28,landing / launching place for boats +65,29,visitors mooring +65,30,scrubbing berth +65,31,picnic area +66,1,firing danger area mark +66,2,target mark +66,3,marker ship mark +66,4,degaussing range mark +66,5,barge mark +66,6,cable mark +66,7,spoil ground mark +66,8,outfall mark +66,9,ODAS (Ocean-Data-Acquisition-System) +66,10,recording mark +66,11,seaplane anchorage mark +66,12,recreation zone mark +66,13,private mark +66,14,mooring mark +66,15,LANBY (Large Automatic Navigational Buoy) +66,16,leading mark +66,17,measured distance mark +66,18,notice mark +66,19,TSS mark (Traffic Separation Scheme) +66,20,anchoring prohibited mark +66,21,berthing prohibited mark +66,22,overtaking prohibited mark +66,23,two-way traffic prohibited mark +66,24,'reduced wake' mark +66,25,speed limit mark +66,26,stop mark +66,27,general warning mark +66,28,'sound ship's siren' mark +66,29,restricted vertical clearence mark +66,30,maximum vessel's draught mark +66,31,restricted horizontal clearance mark +66,32,strong current warning mark +66,33,berthing permitted mark +66,34,overhead power cable mark +66,35,'channel edge gradient' mark +66,36,telephone mark +66,37,ferry crossing mark +66,39,pipline mark +66,40,anchorage mark +66,41,clearing mark +66,42,control mark +66,43,diving mark +66,44,refuge beacon +66,45,foul ground mark +66,46,yachting mark +66,47,heliport mark +66,48,GPS mark +66,49,seaplane landing mark +66,50,entry prohibited mark +66,51,work in progress mark +66,52,mark with unknown purpose +67,1,IMO - adopted +67,2,not IMO - adopted +68,1,grassland +68,3,bush +68,4,deciduous wood +68,5,coniferous wood +68,6,wood in general (inc mixed wood) +68,7,mangroves +68,10,mixed crops +68,11,reed +68,12,moos +68,13,tree in general +68,14,evergreen tree +68,15,coniferous tree +68,16,palm tree +68,17,nipa palm tree +68,18,casuarina tree +68,19,eucalypt tree +68,20,deciduous tree +68,21,mangrove tree +68,22,filao tree +69,1,breakers +69,2,eddies +69,3,overfalls +69,4,tide rips +69,5,bombora +70,1,kelp +70,2,sea weed +70,3,sea grass +70,4,saragasso +71,1,non-dangerous wreck +71,2,dangerous wreck +71,3,distributed remains of wreck +71,4,wreck showing mast/masts +71,5,wreck showing any portion of hull or superstructure +72,1,zone of confidence A1 +72,2,zone of confidence A2 +72,3,zone of confidence B +72,4,zone of confidence C +72,5,zone of confidence D +72,6,zone of confidence U (data not assessed) +73,1,expanded/condensed +73,2,standard +75,1,white +75,2,black +75,3,red +75,4,green +75,5,blue +75,6,yellow +75,7,grey +75,8,brown +75,9,amber +75,10,violet +75,11,orange +75,12,magenta +75,13,pink +76,1,horizontal stripes +76,2,vertical stripes +76,3,diagonal stripes +76,4,squared +76,5,stripes (direction unknown) +76,6,border stripes +81,1,under construction +81,2,ruined +81,3,under reclamation +81,4,wingless +81,5,planned construction +82,1,radar conspicuous +82,2,not radar conspicuous +82,3,radar conspicuous (has radar reflector) +83,1,visual conspicuous +83,2,not visual conspicuous +89,1,metres +89,2,fathoms and feet +89,3,feet +89,4,fathoms and fractions +92,1,light shown without change of character +92,2,daytime light +92,3,fog light +92,4,night light +93,1,within the range of depth of the surrounding depth area +93,2,shoaler than range of depth of the surrounding depth area +93,3,deeper than range of depth of the surrounding depth area +94,2,harbour-master's office +94,3,custom office +94,4,health office +94,5,hospital +94,6,post office +94,7,hotel +94,8,railway station +94,9,police station +94,10,water-police station +94,11,pilot office +94,12,pilot lookout +94,13,bank office +94,14,headquarters for district control +94,15,transit shed/warehouse +94,16,factory +94,17,power station +94,18,administrative +94,19,educational facility +94,20,church +94,21,chapel +94,22,temple +94,23,pagoda +94,24,shinto shrine +94,25,buddhist temple +94,26,mosque +94,27,marabout +94,28,lookout +94,29,communication +94,30,television +94,31,radio +94,32,radar +94,33,light support +94,34,microwave +94,35,cooling +94,36,observation +94,37,timeball +94,38,clock +94,39,control +94,40,airship mooring +94,41,stadium +94,42,bus station +96,1,metres +96,2,feet +103,1,international +103,2,national +103,3,national sub-division +104,1,centre justified +104,2,right justified +104,3,left justified +105,1,bottom justified +105,2,centre justified +105,3,top justified +107,1,fixed +107,2,flashing +107,3,long-flashing +107,4,quick-flashing +107,5,very quick-flashing +107,6,ultra quick-flashing +107,7,isophased +107,8,occulting +107,9,interrupted quick-flashing +107,10,interrupted very quick-flashing +107,11,interrupted ultra quick-flashing +107,12,morse +107,13,fixed / flash +107,14,flash / long-flash +107,15,occulting / flash +107,16,fixed / long-flash +107,17,occulting alternating +107,18,long-flash alternating +107,19,flash alternating +107,20,group alternating +107,25,quick-flash plus long-flash +107,26,very quick-flash plus long-flash +107,27,ultra quick-flash plus long-flash +107,28,alternating +107,29,fixed and alternating flashing +108,1,high intensity +108,2,low intensity +108,3,faint +108,4,intensified +108,5,unintensified +108,6,visibility deliberately restricted +108,7,obscured +108,8,partially obscured +109,1,IALA A +109,2,IALA B +109,9,no system +109,10,other sytem +112,1,masonry +112,2,concreted +112,3,loose boulders +112,4,hard surfaced +112,5,unsurfaced +112,6,wooden +112,7,metal +112,8,glass reinforced plastic (GRP) +112,9,painted +113,1,mud +113,2,clay +113,3,silt +113,4,sand +113,5,stone +113,6,gravel +113,7,pebbles +113,8,cobbles +113,9,rock +113,11,lava +113,14,coral +113,17,shells +113,18,boulder +114,1,fine +114,2,medium +114,3,coarse +114,4,broken +114,5,sticky +114,6,soft +114,7,stiff +114,8,volcanic +114,9,calcareous +114,10,hard +123,1,oil +123,2,gas +123,3,water +123,4,stone +123,5,coal +123,6,ore +123,7,chemicals +123,8,drinking water +123,9,milk +123,10,bauxite +123,11,coke +123,12,iron ingots +123,13,salt +123,14,sand +123,15,timber +123,16,sawdust / wood chips +123,17,scrap metal +123,18,liquified natural gas (LNG) +123,19,liquified petroleum gas (LPG) +123,20,wine +123,21,cement +123,22,grain +125,1,depth known +125,2,depth unknown +125,3,doubtful sounding +125,4,unreliable sounding +125,5,no bottom found at value shown +125,6,least depth known +125,7,"least depth unknown, safe clearance at value shown" +125,8,value reported (not surveyed) +125,9,value reported (not confirmed) +125,10,maintained depth +125,11,not reguraly maintained +131,1,anchoring prohibited +131,2,anchoring restricted +131,3,fishing prohibited +131,4,fishing restricted +131,5,trawling prohibited +131,6,trawling restricted +131,7,entry prohibited +131,8,entry restricted +131,9,dredging prohibited +131,10,dredging restricted +131,11,diving prohibited +131,12,diving restricted +131,13,no wake +131,14,area to be avoided +131,15,construction prohibited +140,1,automatically +140,2,by wave action +140,3,by hand +140,4,by wind +149,1,permanent +149,2,occasional +149,3,recommended +149,4,disused +149,5,periodically/intermittent +149,6,reserved +149,7,temporary +149,8,private +149,9,mandatory +149,11,extinguished +149,12,illuminated +149,13,historic +149,14,public +149,15,synchronized +149,16,watched +149,17,un-watched +149,18,existence doubtful +153,1,reconnaissance/sketch survey +153,2,controlled survey +153,4,examintion survey +153,5,passage survey +153,6,remotely sensed +156,1,found by echo-sounder +156,2,found by side scan sonar +156,3,found by multi-beam +156,4,found by diver +156,5,found by lead-line +156,6,swept by wire-drag +156,7,found by laser +156,8,swept by vertical acoustic system +156,9,found by electromagnetic sensor +156,10,photogrammetry +156,11,satelite imagery +156,12,found by levelling +156,13,swept by side-scan sonar +156,14,computer generated +161,1,better than 0.1m and 10 minutes +161,2,worse than 0.1m or 10 minutes +163,1,simplified harmonic method of tidal prediction +163,2,full harmonic method of tidal prediction +163,3,height and time difference non-harmonic method +170,1,darkest blue +170,2,medium blue +170,3,lightest blue +171,1,"cone, point up" +171,2,"cone, point down" +171,3,sphere +171,4,2 sphere +171,5,cylinder (can) +171,6,board +171,7,x-shape (St. Andrew's cross) +171,8,upright cross (St. George cross) +171,9,"cube, point up" +171,10,"2 cones, point to point" +171,11,"2 cones, base to base" +171,12,rhombus (diamond) +171,13,2 cones (points upward) +171,14,2 cones (points downward) +171,15,"besom, point up (broom or perch)" +171,16,"besom, point down (broom or perch)" +171,17,flag +171,18,sphere over rhombus +171,19,square +171,20,"rectangle, horizontal" +171,21,"rectangle, vertical" +171,22,"trapezium, up" +171,23,"trapezium, down" +171,24,"triangle, point up" +171,25,"triangle, point down" +171,26,circle +171,27,two upright crosses (one over the other) +171,28,T-shape +171,29,triangle pointing up over a circle +171,30,upright cross over a circle +171,31,rhombus over a circle +171,32,circle over a triangle pointing up +171,33,other shape (see INFORM) +172,1,inbound +172,2,outbound +172,3,one-way +172,4,two-way +185,1,Mean low water springs +185,2,Mean lower low water springs +185,3,Mean sea level +185,4,Lowest low water +185,5,Mean low water +185,6,Lowest low water springs +185,7,Approximate mean low water springs +185,8,Indian spring low water +185,9,Low water springs +185,10,Approximate lowest astronomical tide +185,11,Nearly lowest low water +185,12,Mean lower low water +185,13,Low water +185,14,Approximate mean low water +185,15,Approximate mean lower low water +185,16,Mean high water +185,17,Mean high water springs +185,18,High water +185,19,Approximate mean sea level +185,20,High water springs +185,21,Mean higher high water +185,22,Equinoctial spring low water +185,23,Lowest astronomical tide +185,24,Local datum +185,25,International Great Lakes Datum 1985 +185,26,Mean water level +185,27,Lower low water large tide +185,28,Higher high water lage tide +185,29,Nearly highest high water +187,1,partly submerged at high water +187,2,always dry +187,3,always under water/submerged +187,4,covers and uncovers +187,5,awash +187,6,subject to inundation or flooding +400,1,WGS 72 +400,2,WGS 84 +400,3,European 1950 +400,4,Potsdam Datum +400,5,Adindan +400,6,Afgooye +400,7,Ain el Abd 1970 +400,8,Anna 1 Astro 1965 +400,9,Antigua Island Astro 1943 +400,10,Arc 1950 +400,11,Arc 1960 +400,12,Ascension Island 1958 +400,13,"Astro beacon \"E\" 1945" +400,14,Astro DOS 71/4 +400,15,Astro Tern Island (FRIG) 1961 +400,16,Astronimical Station 1952 +400,17,Australian Geodetic 1966 +400,18,Australian Geodetic 1984 +400,19,Ayabelle Lighthouse +400,20,Bellevue (IGN) +400,21,Bermuda 1957 +400,22,Bissau +400,23,Bogota Observatory +400,24,Bukit Rimpah +400,25,Camp Area Astro +400,26,Campo Inchauspe 1969 +400,27,Canton Astro 1966 +400,28,Cape +400,29,Cape Canaveral +400,30,Carthage +400,31,Chatam Island Astro 1971 +400,32,Chua Astro +400,33,Corrego Alegre +400,34,Dabola +400,35,Djakarta (Batavia) +400,36,DOS 1968 +400,37,Easter Island 1967 +400,38,European 1979 +400,39,Fort Thomas 1955 +400,40,Gan 1970 +400,41,Geodetic Datum 1949 +400,42,Graciosa Base SW 1948 +400,43,Guam 1963 +400,44,Ganung Segara +400,45,GUX 1 Astro +400,46,Herat North +400,47,Hjorsey 1955 +400,48,Hong Kong 1963 +400,49,Hu-Tzu-Shan +400,50,Indian +400,51,Indian 1954 +400,52,Indian 1975 +400,53,Ireland 1965 +400,54,ISTS 061 Astro 1968 +400,55,ISTS 073 Astro 1969 +400,56,Johnston Island 1961 +400,57,Kandawala +400,58,Kerguelen Island 1949 +400,59,Kertau 1948 +400,60,Kusaie Astro 1951 +400,61, +400,62, +400,63, +400,64, +400,65, +400,66, +400,67, +400,68, +400,69, +400,70, +400,71, +400,72, +400,73, +400,74, +400,75, +400,76, +400,77, +400,78, +400,79, +400,80, +400,81, +400,82, +400,83, +400,84, +400,85, +400,86, +400,87, +400,88, +400,89, +400,90, +400,91, +400,92, +400,93, +400,94, +400,95, +400,96, +400,97, +400,98, +400,99,South Asia +400,100,Tananarive Observatory 1925 +402,1,surveyed +402,2,unsurveyed +402,3,inadequately surveyed +402,4,approximated +402,5,position doubtful +402,6,unreliable +402,7,reported (not surveyed) +402,8,reported (not confirmed) +402,9,estimated +402,10,precisely known +402,11,calculated diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57objectclasses.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57objectclasses.csv new file mode 100644 index 00000000..ae3628fc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/s57objectclasses.csv @@ -0,0 +1,287 @@ +"Code","ObjectClass","Acronym","Attribute_A","Attribute_B","Attribute_C","Class","Primitives" +1,Administration area (Named),ADMARE,JRSDTN;NATION;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +2,Airport / airfield,AIRARE,CATAIR;CONDTN;CONVIS;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +3,Anchor berth,ACHBRT,CATACH;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;RADIUS;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +4,Anchorage area,ACHARE,CATACH;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +5,"Beacon, cardinal",BCNCAR,BCNSHP;CATCAM;COLOUR;COLPAT;CONDTN;CONVIS;CONRAD;DATEND;DATSTA;ELEVAT;HEIGHT;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +6,"Beacon, isolated danger",BCNISD,BCNSHP;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ELEVAT;HEIGHT;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +7,"Beacon, lateral",BCNLAT,BCNSHP;CATLAM;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ELEVAT;HEIGHT;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +8,"Beacon, safe water",BCNSAW,BCNSHP;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ELEVAT;HEIGHT;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +9,"Beacon, special purpose/general",BCNSPP,BCNSHP;CATSPM;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ELEVAT;HEIGHT;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +10,Berth,BERTHS,DATEND;DATSTA;DRVAL1;NOBJNM;OBJNAM;PEREND;PERSTA;QUASOU;SOUACC;STATUS;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +11,Bridge,BRIDGE,CATBRG;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HORACC;HORCLR;NATCON;NOBJNM;OBJNAM;VERACC;VERCCL;VERCLR;VERCOP;VERDAT;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +12,"Building, single",BUISGL,BUISHP;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;ELEVAT;FUNCTN;HEIGHT;NATCON;NOBJNM;OBJNAM;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +13,Built-up area,BUAARE,CATBUA;CONDTN;CONRAD;CONVIS;HEIGHT;NOBJNM;OBJNAM;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +14,"Buoy, cardinal",BOYCAR,BOYSHP;CATCAM;COLOUR;COLPAT;CONRAD;DATEND;DATSTA;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +15,"Buoy, installation",BOYINB,BOYSHP;CATINB;COLOUR;COLPAT;CONRAD;DATEND;DATSTA;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;PRODCT;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +16,"Buoy, isolated danger",BOYISD,BOYSHP;COLOUR;COLPAT;CONRAD;DATEND;DATSTA;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +17,"Buoy, lateral",BOYLAT,BOYSHP;CATLAM;COLOUR;COLPAT;CONRAD;DATEND;DATSTA;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +18,"Buoy, safe water",BOYSAW,BOYSHP;COLOUR;COLPAT;CONRAD;DATEND;DATSTA;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +19,"Buoy, special purpose/general",BOYSPP,BOYSHP;CATSPM;COLOUR;COLPAT;CONRAD;DATEND;DATSTA;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +20,Cable area,CBLARE,CATCBL;DATEND;DATSTA;NOBJNM;OBJNAM;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +21,"Cable, overhead",CBLOHD,CATCBL;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ICEFAC;NOBJNM;OBJNAM;STATUS;VERACC;VERCLR;VERCSA;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +22,"Cable, submarine",CBLSUB,BURDEP;CATCBL;CONDTN;DATEND;DATSTA;DRVAL1;DRVAL2;NOBJNM;OBJNAM;STATUS;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +23,Canal,CANALS,CATCAN;CONDTN;DATEND;DATSTA;HORACC;HORCLR;HORWID;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +24,Canal bank,CANBNK,CONDTN;DATEND;DATSTA;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +25,Cargo transshipment area,CTSARE,DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +26,Causeway,CAUSWY,CONDTN;NATCON;NOBJNM;OBJNAM;STATUS;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +27,Caution area,CTNARE,DATEND;DATSTA;PEREND;PERSTA;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +28,Checkpoint,CHKPNT,CATCHP;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +29,Coastguard station,CGUSTA,DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +30,Coastline,COALNE,CATCOA;COLOUR;CONRAD;CONVIS;ELEVAT;NOBJNM;OBJNAM;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +31,Contiguous zone,CONZNE,DATEND;DATSTA;NATION;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +32,Continental shelf area,COSARE,NATION;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +33,Control point,CTRPNT,CATCTR;DATEND;DATSTA;ELEVAT;NOBJNM;OBJNAM;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +34,Conveyor,CONVYR,CATCON;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;LIFCAP;NOBJNM;OBJNAM;PRODCT;STATUS;VERACC;VERCLR;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +35,Crane,CRANES,CATCRN;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;HEIGHT;LIFCAP;NOBJNM;OBJNAM;ORIENT;RADIUS;STATUS;VERACC;VERCLR;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +36,Current - non - gravitational,CURENT,CURVEL;DATEND;DATSTA;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;,INFORM;NINFOM;SCAMAX;SCAMIN;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +37,Custom zone,CUSZNE,NATION;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +38,Dam,DAMCON,CATDAM;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;NATCON;NOBJNM;OBJNAM;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +39,Daymark,DAYMAR,CATSPM;COLOUR;COLPAT;DATEND;DATSTA;ELEVAT;HEIGHT;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;TOPSHP;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +40,Deep water route centerline,DWRTCL,CATTRK;DATEND;DATSTA;DRVAL1;DRVAL2;NOBJNM;OBJNAM;ORIENT;QUASOU;SOUACC;STATUS;TECSOU;TRAFIC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +41,Deep water route part,DWRTPT,DATEND;DATSTA;DRVAL1;DRVAL2;NOBJNM;OBJNAM;ORIENT;QUASOU;SOUACC;STATUS;TECSOU;TRAFIC;VERDAT;RESTRN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +42,Depth area,DEPARE,DRVAL1;DRVAL2;QUASOU;SOUACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +43,Depth contour,DEPCNT,VALDCO;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;hypcat;,G,Line; +44,Distance mark,DISMAR,CATDIS;DATEND;DATSTA;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +45,Dock area,DOCARE,CATDOC;CONDTN;DATEND;DATSTA;HORACC;HORCLR;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +46,Dredged area,DRGARE,DRVAL1;DRVAL2;NOBJNM;OBJNAM;QUASOU;RESTRN;SOUACC;TECSOU;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +47,Dry dock,DRYDOC,CONDTN;HORACC;HORCLR;HORLEN;HORWID;NOBJNM;OBJNAM;STATUS;DRVAL1;QUASOU;SOUACC;VERDAT;,INFORM;NINFOM;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +48,Dumping ground,DMPGRD,CATDPG;NOBJNM;OBJNAM;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +49,Dyke,DYKCON,CONDTN;CONRAD;DATEND;DATSTA;HEIGHT;NATCON;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +50,Exclusive Economic Zone,EXEZNE,NATION;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +51,Fairway,FAIRWY,DATEND;DATSTA;DRVAL1;NOBJNM;OBJNAM;ORIENT;QUASOU;RESTRN;SOUACC;STATUS;TRAFIC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +52,Fence/wall,FNCLNE,CATFNC;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;ELEVAT;HEIGHT;NATCON;NOBJNM;OBJNAM;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +53,Ferry route,FERYRT,CATFRY;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +54,Fishery zone,FSHZNE,NATION;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +55,Fishing facility,FSHFAC,CATFIF;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +56,Fishing ground,FSHGRD,NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +57,Floating dock,FLODOC,COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;DRVAL1;HORACC;HORCLR;HORLEN;HORWID;LIFCAP;NOBJNM;OBJNAM;STATUS;VERACC;VERLEN;VERDAT;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +58,Fog signal,FOGSIG,CATFOG;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;SIGFRQ;SIGGEN;SIGGRP;SIGPER;SIGSEQ;STATUS;VALMXR;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +59,Fortified structure,FORSTC,CATFOR;CONDTN;CONRAD;CONVIS;HEIGHT;NATCON;NOBJNM;OBJNAM;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +60,Free port area,FRPARE,NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +61,Gate,GATCON,CATGAT;CONDTN;DRVAL1;HORACC;HORCLR;NATCON;NOBJNM;OBJNAM;QUASOU;SOUACC;STATUS;VERACC;VERCLR;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +62,Gridiron,GRIDRN,HORACC;HORLEN;HORWID;NATCON;NOBJNM;OBJNAM;STATUS;VERACC;VERLEN;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +63,Harbour area (administrative),HRBARE,NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +64,Harbour facility,HRBFAC,CATHAF;CONDTN;DATEND;DATSTA;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +65,Hulk,HULKES,CATHLK;COLOUR;COLPAT;CONRAD;CONVIS;HORACC;HORLEN;HORWID;NOBJNM;OBJNAM;VERACC;VERLEN;CONDTN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +66,Ice area,ICEARE,CATICE;CONVIS;ELEVAT;HEIGHT;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +67,Incineration area,ICNARE,NOBJNM;OBJNAM;PEREND;PERSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +68,Inshore traffic zone,ISTZNE,CATTSS;DATEND;DATSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +69,Lake,LAKARE,ELEVAT;NOBJNM;OBJNAM;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +70,Lake shore,LAKSHR,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +71,Land area,LNDARE,CONDTN;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +72,Land elevation,LNDELV,CONVIS;ELEVAT;NOBJNM;OBJNAM;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line; +73,Land region,LNDRGN,CATLND;NATQUA;NATSUR;NOBJNM;OBJNAM;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +74,Landmark,LNDMRK,CATLMK;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;ELEVAT;FUNCTN;HEIGHT;NATCON;NOBJNM;OBJNAM;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +75,Light,LIGHTS,CATLIT;COLOUR;DATEND;DATSTA;EXCLIT;HEIGHT;LITCHR;LITVIS;MARSYS;MLTYLT;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;SECTR1;SECTR2;SIGGRP;SIGPER;SIGSEQ;STATUS;VERACC;VALNMR;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +76,Light float,LITFLT,COLOUR;COLPAT;CONRAD;CONVIS;DATEND;DATSTA;HORACC;HORLEN;HORWID;MARSYS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +77,Light vessel,LITVES,COLOUR;COLPAT;CONRAD;CONVIS;DATEND;DATSTA;HORACC;HORLEN;HORWID;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +78,Local magnetic anomaly,LOCMAG,NOBJNM;OBJNAM;VALLMA;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +79,Lock basin,LOKBSN,DATEND;DATSTA;HORACC;HORCLR;HORLEN;HORWID;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +80,Log pond,LOGPON,NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +81,Magnetic variation,MAGVAR,DATEND;DATSTA;RYRMGV;VALACM;VALMAG;,INFORM;NINFOM;SCAMAX;SCAMIN;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +82,Marine farm/culture,MARCUL,CATMFA;DATEND;DATSTA;EXPSOU;NOBJNM;OBJNAM;PEREND;PERSTA;QUASOU;RESTRN;SOUACC;STATUS;VALSOU;VERACC;VERDAT;VERLEN;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +83,Military practice area,MIPARE,CATMPA;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +84,Mooring/warping facility,MORFAC,BOYSHP;CATMOR;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERDAT;VERLEN;WATLEV;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +85,Navigation line,NAVLNE,CATNAV;DATEND;DATSTA;ORIENT;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +86,Obstruction,OBSTRN,CATOBS;CONDTN;EXPSOU;HEIGHT;NATCON;NATQUA;NOBJNM;OBJNAM;PRODCT;QUASOU;SOUACC;STATUS;TECSOU;VALSOU;VERACC;VERDAT;VERLEN;WATLEV;NATSUR;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +87,Offshore platform,OFSPLF,CATOFP;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;NATCON;NOBJNM;OBJNAM;PRODCT;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +88,Offshore production area,OSPARE,CATPRA;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;NOBJNM;OBJNAM;PRODCT;RESTRN;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +89,Oil barrier,OILBAR,CATOLB;CONDTN;DATEND;DATSTA;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +90,Pile,PILPNT,CATPLE;COLOUR;COLPAT;CONDTN;CONVIS;DATEND;DATSTA;HEIGHT;NOBJNM;OBJNAM;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +91,Pilot boarding place,PILBOP,CATPIL;COMCHA;DATEND;DATSTA;NOBJNM;NPLDST;OBJNAM;PEREND;PERSTA;PILDST;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +92,Pipeline area,PIPARE,CONDTN;DATEND;DATSTA;NOBJNM;OBJNAM;PRODCT;RESTRN;STATUS;CATPIP;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +93,"Pipeline, overhead",PIPOHD,CATPIP;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;NOBJNM;OBJNAM;PRODCT;STATUS;VERACC;VERCLR;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +94,"Pipeline, submarine/on land",PIPSOL,BURDEP;CATPIP;CONDTN;DATEND;DATSTA;DRVAL1;DRVAL2;NOBJNM;OBJNAM;PRODCT;STATUS;VERACC;VERLEN;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line; +95,Pontoon,PONTON,CONDTN;CONRAD;CONVIS;DATEND;DATSTA;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +96,Precautionary area,PRCARE,DATEND;DATSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +97,Production / storage area,PRDARE,CATPRA;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ELEVAT;HEIGHT;NOBJNM;OBJNAM;PRODCT;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +98,Pylon/bridge support,PYLONS,CATPYL;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;NATCON;NOBJNM;OBJNAM;VERACC;VERDAT;VERLEN;WATLEV;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +99,Radar line,RADLNE,NOBJNM;OBJNAM;ORIENT;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +100,Radar range,RADRNG,COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +101,Radar reflector,RADRFL,HEIGHT;STATUS;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +102,Radar station,RADSTA,CATRAS;DATEND;DATSTA;HEIGHT;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;VALMXR;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +103,Radar transponder beacon,RTPBCN,CATRTB;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;RADWAL;SECTR1;SECTR2;SIGGRP;SIGSEQ;STATUS;VALMXR;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +104,Radio calling-in point,RDOCAL,COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;STATUS;TRAFIC;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line; +105,Radio station,RDOSTA,CALSGN;CATROS;COMCHA;DATEND;DATSTA;ESTRNG;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;SIGFRQ;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +106,Railway,RAILWY,CONDTN;HEIGHT;NOBJNM;OBJNAM;STATUS;VERACC;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +107,Rapids,RAPIDS,NOBJNM;OBJNAM;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +108,Recommended route centerline,RCRTCL,CATTRK;DATEND;DATSTA;DRVAL1;DRVAL2;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;QUASOU;SOUACC;STATUS;TECSOU;TRAFIC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +109,Recommended track,RECTRC,CATTRK;DATEND;DATSTA;DRVAL1;DRVAL2;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;QUASOU;SOUACC;STATUS;TECSOU;TRAFIC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +110,Recommended Traffic Lane Part,RCTLPT,DATEND;DATSTA;ORIENT;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +111,Rescue station,RSCSTA,CATRSC;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;SCAMAX;SCAMIN;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +112,Restricted area,RESARE,CATREA;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +113,Retro-reflector,RETRFL,COLOUR;COLPAT;DATEND;DATSTA;HEIGHT;MARSYS;PEREND;PERSTA;STATUS;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +114,River,RIVERS,NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +115,River bank,RIVBNK,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +116,Road,ROADWY,CATROD;CONDTN;NATCON;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +117,Runway,RUNWAY,CATRUN;CONDTN;CONVIS;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +118,Sand waves,SNDWAV,VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +119,Sea area / named water area,SEAARE,CATSEA;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +120,Sea-plane landing area,SPLARE,NOBJNM;OBJNAM;PEREND;PERSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +121,Seabed area,SBDARE,COLOUR;NATQUA;NATSUR;WATLEV;OBJNAM;NOBJNM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +122,Shoreline Construction,SLCONS,CATSLC;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HEIGHT;HORACC;HORCLR;HORLEN;HORWID;NATCON;NOBJNM;OBJNAM;STATUS;VERACC;VERDAT;VERLEN;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +123,"Signal station, traffic",SISTAT,CATSIT;COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +124,"Signal station, warning",SISTAW,CATSIW;COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +125,Silo / tank,SILTNK,BUISHP;CATSIL;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;ELEVAT;HEIGHT;NATCON;NOBJNM;OBJNAM;PRODCT;STATUS;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +126,Slope topline,SLOTOP,CATSLO;COLOUR;CONRAD;CONVIS;ELEVAT;NATCON;NATQUA;NATSUR;NOBJNM;OBJNAM;VERACC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +127,Sloping ground,SLOGRD,CATSLO;COLOUR;CONRAD;CONVIS;NATCON;NATQUA;NATSUR;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +128,Small craft facility,SMCFAC,CATSCF;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +129,Sounding,SOUNDG,EXPSOU;NOBJNM;OBJNAM;QUASOU;SOUACC;TECSOU;VERDAT;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +130,Spring,SPRING,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +131,Square,SQUARE,CONDTN;NATCON;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +132,Straight territorial sea baseline,STSLNE,NATION;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +133,Submarine transit lane,SUBTLN,NOBJNM;OBJNAM;RESTRN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +134,Swept Area,SWPARE,DRVAL1;QUASOU;SOUACC;TECSOU;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +135,Territorial sea area,TESARE,NATION;RESTRN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +136,Tidal stream - harmonic prediction,TS_PRH,NOBJNM;OBJNAM;T_MTOD;T_VAHC;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +137,Tidal stream - non-harmonic prediction,TS_PNH,NOBJNM;OBJNAM;T_MTOD;T_THDF;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +138,Tidal stream panel data,TS_PAD,NOBJNM;OBJNAM;TS_TSP;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +139,Tidal stream - time series,TS_TIS,NOBJNM;OBJNAM;STATUS;TIMEND;TIMSTA;T_TINT;TS_TSV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +140,Tide - harmonic prediction,T_HMON,NOBJNM;OBJNAM;T_ACWL;T_MTOD;T_VAHC;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +141,Tide - non-harmonic prediction,T_NHMN,NOBJNM;OBJNAM;T_ACWL;T_MTOD;T_THDF;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +142,Tidal stream - time series,T_TIMS,NOBJNM;OBJNAM;T_HWLW;T_TINT;T_TSVL;TIMEND;TIMSTA;STATUS;T_ACWL;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +143,Tideway,TIDEWY,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area; +144,Top mark,TOPMAR,COLOUR;COLPAT;DATEND;DATSTA;HEIGHT;MARSYS;PEREND;PERSTA;STATUS;TOPSHP;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +145,Traffic Separation Line,TSELNE,CATTSS;DATEND;DATSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +146,Traffic Separation Scheme Boundary,TSSBND,CATTSS;DATEND;DATSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +147,Traffic Separation Scheme Crossing,TSSCRS,CATTSS;DATEND;DATSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +148,Traffic Separation Scheme Lane part,TSSLPT,CATTSS;DATEND;DATSTA;ORIENT;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +149,Traffic Separation Scheme Roundabout,TSSRON,CATTSS;DATEND;DATSTA;RESTRN;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +150,Traffic Separation Zone,TSEZNE,CATTSS;DATEND;DATSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +151,Tunnel,TUNNEL,BURDEP;CONDTN;HORACC;HORCLR;NOBJNM;OBJNAM;STATUS;VERACC;VERCLR;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +152,Two-way route part,TWRTPT,CATTRK;DATEND;DATSTA;DRVAL1;DRVAL2;ORIENT;QUASOU;SOUACC;STATUS;TECSOU;TRAFIC;VERDAT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +153,Underwater rock / awash rock,UWTROC,EXPSOU;NATSUR;NATQUA;NOBJNM;OBJNAM;QUASOU;SOUACC;STATUS;TECSOU;VALSOU;VERDAT;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point; +154,Unsurveyed area,UNSARE,,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +155,Vegetation,VEGATN,CATVEG;CONVIS;ELEVAT;HEIGHT;NOBJNM;OBJNAM;VERACC;VERDAT;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +156,Water turbulence,WATTUR,CATWAT;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line;Area; +157,Waterfall,WATFAL,CONVIS;NOBJNM;OBJNAM;VERACC;VERLEN;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Line; +158,Weed/Kelp,WEDKLP,CATWED;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +159,Wreck,WRECKS,CATWRK;CONRAD;CONVIS;EXPSOU;HEIGHT;NOBJNM;OBJNAM;QUASOU;SOUACC;STATUS;TECSOU;VALSOU;VERACC;VERDAT;VERLEN;WATLEV;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +160,Tidal stream - flood/ebb,TS_FEB,CAT_TS;CURVEL;DATEND;DATSTA;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Point;Area; +161,Archipelagix Sea Lane,ARCSLN,DATEND;DATSTA;NATION;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Area; +162,Archipelagix Sea Lane axis,ASLXIS,DATEND;DATSTA;NATION;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line; +163,New object,NEWOBJ,CLSDEF;CLSNAM;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;NATION;NOBJNM;OBJNAM;PEREND;PERSTA;RESTRN;STATUS;WATLEV;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;SYMINS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,G,Line;Area;Point; +300,Accuracy of data,M_ACCY,HORACC;POSACC;SOUACC;VERACC;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +301,Compilation scale of data,M_CSCL,CSCALE;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +302,Coverage,M_COVR,CATCOV;,INFORM;NINFOM;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +303,Horizontal datum of data,M_HDAT,HORDAT;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +304,Horizontal datum shift parameters,M_HOPA,HORDAT;SHIPAM;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +305,Nautical publication information,M_NPUB,,INFORM;NINFOM;NTXTDS;PICREP;PUBREF;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +306,Navigational system of marks,M_NSYS,MARSYS;ORIENT;,INFORM;NINFOM;NTXTDS;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +307,Production information,M_PROD,AGENCY;CPDATE;NATION;NMDATE;PRCTRY;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +308,Quality of data,M_QUAL,CATQUA;CATZOC;DRVAL1;DRVAL2;POSACC;SOUACC;SUREND;SURSTA;TECSOU;VERDAT;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +309,Sounding datum,M_SDAT,VERDAT;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +310,Survey reliability,M_SREL,QUAPOS;QUASOU;SCVAL1;SCVAL2;SDISMN;SDISMX;SURATH;SUREND;SURSTA;SURTYP;TECSOU;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +311,Units of measurement of data,M_UNIT,DUNITS;HUNITS;PUNITS;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +312,Vertical datum of data,M_VDAT,VERDAT;,INFORM;NINFOM;NTXTDS;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,M,Area; +400,Aggregation,C_AGGR,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,C, +401,Association,C_ASSO,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,C, +402,Stacked on/stacked under,C_STAC,,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,C, +500,Cartographic area,$AREAS,COLOUR;ORIENT;$SCODE;$TINTS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,$, +501,Cartographic line,$LINES,$SCODE;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,$, +502,Cartographic symbol,$CSYMB,ORIENT;$SCALE;$SCODE;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,$, +503,Compass,$COMPS,$CSIZE;RYRMGV;VALACM;VALMAG;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,$, +504,Text,$TEXTS,$CHARS;COLOUR;$JUSTH;$JUSTV;$NTXST;$SPACE;$TXSTR;,INFORM;NINFOM;NTXTDS;PICREP;SCAMAX;SCAMIN;TXTDSC;,RECDAT;RECIND;SORDAT;SORIND;,$, +0,"###Codes in the 17xxx range come from past s57objectclasses_iw.csv (Inland Waterways)",,,,,, +17000,Anchor berth,achbrt,catach;clsdng;comctn;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;RADIUS;restrn;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Area; +17001,Anchorage area,achare,catach;clsdng;comctn;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;restrn;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Area; +17002,Canal bank,canbnk,catbnk;CONRAD;DATEND;DATSTA;NATSUR;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Line; +17003,Depth area,depare,DRVAL1;DRVAL2;eleva1;eleva2;wtwdis;QUASOU;SOUACC;verdat;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Line;Area; +17004,Distance mark,dismar,catdis;wtwdis;unlocd;DATEND;DATSTA;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17005,Restricted area,resare,CATREA;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;restrn;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17006,River bank,rivbnk,catbnk;CONRAD;NATSUR;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Line; +17007,Signal station traffic,sistat,catsit;COMCHA;DATEND;DATSTA;dirimp;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17008,Signal station warning,sistaw,catsiw;COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17009,Top Mark,topmar,COLOUR;COLPAT;HEIGHT;marsys;STATUS;TOPSHP;VERACC;verdat;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17010,Berth berths,berths,catbrt;clsdng;comctn;DATEND;DATSTA;DRVAL1;NOBJNM;OBJNAM;PEREND;PERSTA;QUASOU;SOUACC;STATUS;trshgd;verdat;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Line;Area; +17011,"Bridge","bridge",catbrg;comctn;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;HORACC;HORCLR;NATCON;NOBJNM;OBJNAM;TIMEND;TIMSTA;VERACC;VERCCL;VERCLR;VERCOP;verdat;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Line;Area; +17012,Cable overhead,cblohd,CATCBL;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;ICEFAC;NOBJNM;OBJNAM;STATUS;VERACC;VERCLR;VERCSA;verdat;,INFORM;NINFOM;NTXTDS;SCAMIN;TXTDSC;updmsg;RECDAT;RECIND;,SORDAT;SORIND;,G,Line; +17013,Ferry route,feryrt,catfry;comctn;DATEND;DATSTA;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;TIMEND;TIMSTA;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Line;Area; +17014,Harbour Area,hrbare,cathbr;comctn;NOBJNM;OBJNAM;STATUS;unlocd;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17015,Harbour Facilities,hrbfac,cathaf;CONDTN;DATEND;DATSTA;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;,INFORM;NINFOM;NTXTDS;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Area; +17016,Lock Basin,lokbsn,HORACC;horcll;horclw;HORLEN;HORWID;NOBJNM;OBJNAM;STATUS;TIMEND;TIMSTA;,INFORM;NINFOM;NTXTDS;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17017,Radio calling-in point,rdocal,catcom;comctn;COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;ORIENT;PEREND;PERSTA;STATUS;TRAFIC;dirimp;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Line; +17018,Navigational system of marks,m_nsys,marsys;ORIENT;,INFORM;NINFOM;NTXTDS;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17050,Notice mark,notmrk,catnmk;fnctnm;dirimp;disipd;disipu;disbk1;disbk2;addmrk;marsys;ORIENT;CONDTN;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17051,Waterway axis,wtwaxs,catccl;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Line; +17052,Waterway profile,wtwprf,wtwdis;HEIGHT;verdat;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Line; +17053,Bridge area,brgare,comctn;NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17054,Bunker station,bunsta,bunves;catbun;comctn;NOBJNM;OBJNAM;TIMEND;TIMSTA;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17055,Communication Area,comare,catcom;COMCHA;DATEND;DATSTA;NOBJNM;OBJNAM;STATUS;TIMEND;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17056,Harbour Basin,hrbbsn,HORACC;HORLEN;HORWID;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17057,Lock area,lokare,comctn;NOBJNM;OBJNAM;STATUS;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17058,Lock basin part,lkbspt,HORACC;horcll;horclw;HORLEN;HORWID;NOBJNM;OBJNAM;STATUS;TIMEND;TIMSTA;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17059,Port Area,prtare,comctn;NOBJNM;OBJNAM;STATUS;unlocd;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Area; +17060,Beacon water-way,bcnwtw,BCNSHP;catwwm;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;dirimp;ELEVAT;HEIGHT;marsys;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERACC;verdat;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17061,Buoy water-way,boywtw,BOYSHP;catwwm;COLOUR;COLPAT;CONDTN;CONRAD;CONVIS;DATEND;DATSTA;marsys;NATCON;NOBJNM;OBJNAM;PEREND;PERSTA;STATUS;VERLEN;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17062,Refuse dump,refdmp,catrfd;comctn;NOBJNM;OBJNAM;STATUS;TIMEND;TIMSTA;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17063,Route planning point,rtplpt,NOBJNM;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point; +17064,Terminal,termnl,cattml;comctn;NOBJNM;OBJNAM;STATUS;TIMEND;TIMSTA;trshgd;unlocd;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Area; +17065,Turning basin,trnbsn,HORCLR;NOBJNM;STATUS;OBJNAM;,INFORM;NINFOM;NTXTDS;PICREP;SCAMIN;TXTDSC;updmsg;,SORDAT;SORIND;,G,Point;Area; +0,"###Codes in the 20xxx and 21xxx range come from past s57objectclasses_aml.csv (Additional_Military_Layers)",,,,,, +20484,"ATS Route Centreline","atsctl","authty;linech;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L" +20485,"Airspace Restriction","airres + catasr","authty;catasr;linech;maxalt;maxftl;minalt;minftl;NOBJNM;OBJNAM;HUNITS;VERDAT","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20486,"Area of Imagery Coverage","imgare","bearng;catimg;ELEVAT;HUNITS;orgntr;SUREND;VERDAT","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20487,"Beach Exit","bchext","ccmidx;exitus;gradnt;HORCLR;HORLEN;HORWID;HUNITS;VERCSA;wbrcap","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;L" +20488,"Beach Profile","bchprf","bearng;gradnt;SUREND","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L" +20489,"Beach Survey","bchare","accres;brktyp;ccmidx;dgmrlf;HORLEN;HORWID;HUNITS;quabch;orgntr;srfhgt;srfzne;stbacv;SUREND;SURSTA;swlhgt;tdlrng;tdltyp","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P A " +20490,"Bedrock area","bedare","N/A","N/A",,"G","A" +20491,"Bottom Feature","botmft + catbot","catbot;DUNITS;gradnt;HORLEN;HORWID;HUNITS;migspd;migdir;NOBJNM;OBJNAM;ORIENT;soudat;stfotn;VALSOU;VERLEN;WATLEV;wavlen","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;L;A" +20492,"Centre Line","centre","N/A","N/A",,"G","L" +20494,"Contact History","histob","orgntr;surdat;SUREND","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20495,"Controlled airspace","ctlasp + catcas","authty;catcas;caircd;linech;maxalt;maxftl;minalt;minftl;NOBJNM;OBJNAM;HUNITS;VERDAT","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L;A" +20496,"Diving Location","divloc","depact;divact;DUNITS;OBJNAM;NOBJNM;timeyr;watclr","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;A" +20497,"Drinking Water Location","watloc","N/A","N/A",,"G","P" +20498,"Drop Zone","drpzne","apprch;extdes;lndcon;OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;A" +20499,"Environmentally Sensitive Area","envare","authty;legsta;OBJNAM;NOBJNM;PEREND;PERSTA","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A;P" +20500,"Fishing Activity Area","fshare","catfsh;STATUS;timeyr","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20501,"Impact Scour","iscour","datfir;datlst;depwat;DUNITS;gendep;HORLEN;HORWID;HUNITS;NATQUA;NATSUR;NOBJNM;OBJNAM;orcard;ORIENT;QUASOU;senfir;senlst;sonsig;sorfir;sorlst;SOUACC;soudat;STATUS;TECSOU;VALSOU;VERLEN;WATLEV","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20502,"Landing Area","lngare","apprch;extdes;lndcon;OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20503,"Landing Place","lndplc","gradnt;STATUS;wbrcap","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20504,"Landing Point","lndpnt","apprch;extdes;lndcon;OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20505,"Landing Site","lndste","apprch;extdes;lndcon;OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20506,"Landing Strip","lndstp","apprch;extdes;lndcon;OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20507,"Landing Zone","lndzne","apprch;extdes;lndcon;OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20508,"Marine management area","marman + catmma","actper;authty;catmma;identy;linech;NOBJNM;OBJNAM;NATION;spcies;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20509,"Maritime Safety Information area","msiare","catmsi;condet;NATION;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20510,"MCM Area","mcmare","mhclas;milden;nomden","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20511,"Military exercise airspace","mexasp + catmea","actper;authty;catmea;linech;maxalt;maxftl;minalt;minftl;NOBJNM;OBJNAM;HUNITS;VERDAT","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20513,"Patrol area","patare + catpat","authty;catpat;identy;linech;NOBJNM;OBJNAM;NATION;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20514,"Q-Route Leg","qroute","actper;dnbear;lftwid;NATION;NOBJNM;OBJNAM;rclass;rgtwid;STATUS;TRAFIC;HUNITS;upbear","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L" +20515,"Radio broadcast area","rdoare","NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20516,"Regulated airspace","regasp","N/A","N/A",,"G","A" +20517,"Geological Layer","sedlay","attutn;bulkdn;COLOUR;deplyr;dttdep;dttnum;DUNITS;gascon;grnsiz;hfbmls;laynum;lfbmls;mgstyp;reflco;migspd;migdir;msstrg;natsed;NATQUA;porsty;revebn;revfqy;revgan;samret;sndvel;snrflc;soudat;WATLEV;wbrcap","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;A" +20518,"Seismic Activity Area","seiare","bearng;ricsca","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20519,"Sensor Anomaly","senanm","datfir;datlst;DUNITS;gendep;HUNITS;madsig;magany;magint;NOBJNM;OBJNAM;orcard;ORIENT;QUASOU;scrdim;senfir;senlst;sonsig;sorfir;sorlst;soudat;SOUACC;STATUS;TECSOU;VALSOU;WATLEV","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20520,"Shelter Location","shlloc","OBJNAM;NOBJNM;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20521,"Superficial Sediment Deposits","seddep","N/A","N/A",,"G","A" +20522,"Trafficability Area","trfare","cattrf","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20523,"Trawl Scours","twlscr","HUNITS;HORWID;ORIENT","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L;A" +20524,"Turning point","turnpt","NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20525,"Viewpoint","viewpt","bearng;discon;DUNITS;shpspd;snrfrq;snrrsc;twdbdp","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20526,"Bottom Tactical Data Area","btdare","mntden;undmnr;umnrwb;umrwob","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20527,"Burial Probability Area","bprare","brmchm;brperd;brprob;tgrfwt","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20528,"Leisure Activity Area","lsrare","lsract;timeyr","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20529,"Performance Data Area","pfdare","clperc;clprob;csprob;cswidt;dsprob;dtprob","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20530,"Resource Location","resloc","typres;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;A" +20531,"Risk Data Area","rkdare","conlev;numrmn;prbrmn;rmnlmn;sminth;znecol","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20532,"Navigation system (NAVAID)","navaid + CATROS","actper;CALSGN;CATROS;COMCHA;NOBJNM;OBJNAM;SIGFRQ","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +20533,"Internal Waters Area ","intwtr","linech;NATION;RESTRN;STATUS","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20534,"Sea Ice","seaice","iceact;icecvt;icesod;icemax;icemin;icerdv;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20535,"Ice Advisory Area","iceadv","iceadc;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20536,"Iceberg Area","brgare","icebnm;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20537,"Land Ice","lndice","icelnd;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20538,"Ice Line","icelin","icelnc;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L" +20539,"Ice Route","icerte","NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L" +20540,"Ice Polynya","icepol","icepst;icepty;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","A" +20541,"Ice Lead","icelea","icelty;icelst;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L;A" +20542,"Iceberg","icebrg","icebsz;icebsh;icebdr;icebsp;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;A" +20543,"Ice Movement","icemov","icebsp;icebdr;NOBJNM;OBJNAM","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;A" +20544,"Traffic route","tfcrte","linech;NOBJNM;OBJNAM;PEREND;PERSTA;traden;TRAFIC;typshp","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","L" +20717,"User Defined","u_defd","txtdes","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P;L;A" +20718,"Small Bottom Object","smalbo","blndzn;brmchm;brpctg;COLOUR;comsys;datfir;datlst;depwat;DUNITS;gendep;HORLEN;HORWID;HUNITS;incltn;layptm;layrfn;laytim;madsig;magany;magint;minern;miscls;miscom;misdat;misnme;mnhsys;mnimnc;mnimnt;mnssys;mulcon;mwdcrn;NATCON;navsys;notfnd;nmprob;objtrn;objshp;onsonr;orbobn;orgdat;orgntr;ORIENT;QUASOU;scrdim;senfir;senlst;snrflc;soudat;stacon;surdat;SUREND;tarstg;TECSOU;unwrfm;VERLEN","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"G","P" +21484,"Completeness for the product specification","m_conf + catcnf","catcnf","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"M","A" +21485,"Security Classification Information","m_clas","","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"M","A" +21486,"Vertical Datum Shift Area","m_vers","vershf","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"M","P;A" +21487,"Defined Straight Lines","m_line","linech","AGENCY;CSCALE;elvacc;errell;HORACC;INFORM;NINFOM;NTXTDS;PICREP;POSACC;PRCTRY;PUBREF;RECDAT;QUAPOS;seccvt;secido;secown;secpmk;SORDAT;SORIND;TXTDSC;VERACC",,"M","N/A" diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/seed_2d.dgn b/.venv/lib/python3.12/site-packages/fiona/gdal_data/seed_2d.dgn new file mode 100644 index 00000000..b99cad81 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/gdal_data/seed_2d.dgn differ diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/seed_3d.dgn b/.venv/lib/python3.12/site-packages/fiona/gdal_data/seed_3d.dgn new file mode 100644 index 00000000..9e11c938 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/gdal_data/seed_3d.dgn differ diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/stateplane.csv b/.venv/lib/python3.12/site-packages/fiona/gdal_data/stateplane.csv new file mode 100644 index 00000000..38089e71 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/stateplane.csv @@ -0,0 +1,259 @@ +"ID","STATE","ZONE","PROJ_METHOD","DATUM","USGS_CODE","EPSG_PCS_CODE" +101,ALABAMA,EAST,1,NAD83,101,26929 +102,ALABAMA,WEST,1,NAD83,102,26930 +201,ARIZONA,EAST,1,NAD83,201,26948 +202,ARIZONA,CENTRAL,1,NAD83,202,26949 +203,ARIZONA,WEST,1,NAD83,203,26950 +301,ARKANSAS,NORTH,2,NAD83,301,26951 +302,ARKANSAS,SOUTH,2,NAD83,302,26952 +401,CALIFORNIA,I,2,NAD83,401,26941 +402,CALIFORNIA,II,2,NAD83,402,26942 +403,CALIFORNIA,III,2,NAD83,403,26943 +404,CALIFORNIA,IV,2,NAD83,404,26944 +405,CALIFORNIA,V,2,NAD83,405,26945 +406,CALIFORNIA,VI,2,NAD83,406,26946 +501,COLORADO,NORTH,2,NAD83,501,26953 +502,COLORADO,CENTRAL,2,NAD83,502,26954 +503,COLORADO,SOUTH,2,NAD83,503,26955 +600,CONNECTICUT,,2,NAD83,600,26956 +700,DELAWARE,,1,NAD83,700,26957 +901,FLORIDA,EAST,1,NAD83,901,26958 +902,FLORIDA,WEST,1,NAD83,902,26959 +903,FLORIDA,NORTH,2,NAD83,903,26960 +1001,GEORGIA,EAST,1,NAD83,1001,26966 +1002,GEORGIA,WEST,1,NAD83,1002,26967 +1101,IDAHO,EAST,1,NAD83,1101,26968 +1102,IDAHO,CENTRAL,1,NAD83,1102,26969 +1103,IDAHO,WEST,1,NAD83,1103,26970 +1201,ILLINOIS,EAST,1,NAD83,1201,26971 +1202,ILLINOIS,WEST,1,NAD83,1202,26972 +1301,INDIANA,EAST,1,NAD83,1301,26973 +1302,INDIANA,WEST,1,NAD83,1302,26974 +1401,IOWA,NORTH,2,NAD83,1401,26975 +1402,IOWA,SOUTH,2,NAD83,1402,26976 +1501,KANSAS,NORTH,2,NAD83,1501,26977 +1502,KANSAS,SOUTH,2,NAD83,1502,26978 +1600,KENTUCKY,SINGLE ZONE,2,NAD83,1600,3088 +1601,KENTUCKY,NORTH,2,NAD83,1601,2205 +1602,KENTUCKY,SOUTH,2,NAD83,1602,26980 +1701,LOUISIANA,NORTH,2,NAD83,1701,26981 +1702,LOUISIANA,SOUTH,2,NAD83,1702,26982 +1703,LOUISIANA,OFFSHORE,2,NAD83,1703, +1801,MAINE,EAST,1,NAD83,1801,26983 +1802,MAINE,WEST,1,NAD83,1802,26984 +1900,MARYLAND,,2,NAD83,1900,26985 +2001,MASSACHUSETTS,MAINLAND,2,NAD83,2001,26986 +2002,MASSACHUSETTS,ISLAND,2,NAD83,2002,26987 +2111,MICHIGAN,NORTH,2,NAD83,2111,26988 +2112,MICHIGAN,CENTRAL,2,NAD83,2112,26989 +2113,MICHIGAN,SOUTH,2,NAD83,2113,26990 +2201,MINNESOTA,NORTH,2,NAD83,2201,26991 +2202,MINNESOTA,CENTRAL,2,NAD83,2202,26992 +2203,MINNESOTA,SOUTH,2,NAD83,2203,26993 +2301,MISSISSIPPI,EAST,1,NAD83,2301,26994 +2302,MISSISSIPPI,WEST,1,NAD83,2302,26995 +2401,MISSOURI,EAST,1,NAD83,2401,26996 +2402,MISSOURI,CENTRAL,1,NAD83,2402,26997 +2403,MISSOURI,WEST,1,NAD83,2403,26998 +2500,MONTANA,,2,NAD83,2500,32100 +2600,NEBRASKA,,2,NAD83,2600,32104 +2701,NEVADA,EAST,1,NAD83,2701,32107 +2702,NEVADA,CENTRAL,1,NAD83,2702,32108 +2703,NEVADA,WEST,1,NAD83,2703,32109 +2800,"NEW HAMPSHIRE",,1,NAD83,2800,32110 +2900,"NEW JERSEY",,1,NAD83,2900,32111 +3001,"NEW MEXICO",EAST,1,NAD83,3001,32112 +3002,"NEW MEXICO",CENTRAL,1,NAD83,3002,32113 +3003,"NEW MEXICO",WEST,1,NAD83,3003,32114 +3101,"NEW YORK",EAST,1,NAD83,3101,32115 +3102,"NEW YORK",CENTRAL,1,NAD83,3102,32116 +3103,"NEW YORK",WEST,1,NAD83,3103,32117 +3104,"NEW YORK","LONG ISLAND",2,NAD83,3104,32118 +3200,"NORTH CAROLINA",,2,NAD83,3200,32119 +3301,"NORTH DAKOTA",NORTH,2,NAD83,3301,32120 +3302,"NORTH DAKOTA",SOUTH,2,NAD83,3302,32121 +3401,OHIO,NORTH,2,NAD83,3401,32122 +3402,OHIO,SOUTH,2,NAD83,3402,32123 +3501,OKLAHOMA,NORTH,2,NAD83,3501,32124 +3502,OKLAHOMA,SOUTH,2,NAD83,3502,32125 +3601,OREGON,NORTH,2,NAD83,3601,32126 +3602,OREGON,SOUTH,2,NAD83,3602,32127 +3701,PENNSYLVANIA,NORTH,2,NAD83,3701,32128 +3702,PENNSYLVANIA,SOUTH,2,NAD83,3702,32129 +3800,"RHODE ISLAND",,1,NAD83,3800,32130 +3900,"SOUTH CAROLINA",,2,NAD83,3900,32133 +4001,"SOUTH DAKOTA",NORTH,2,NAD83,4001,32134 +4002,"SOUTH DAKOTA",SOUTH,2,NAD83,4002,32135 +4100,TENNESSEE,,2,NAD83,4100,32136 +4201,TEXAS,NORTH,2,NAD83,4201,32137 +4202,TEXAS,"NORTH CENTRAL",2,NAD83,4202,32138 +4203,TEXAS,CENTRAL,2,NAD83,4203,32139 +4204,TEXAS,"SOUTH CENTRAL",2,NAD83,4204,32140 +4205,TEXAS,SOUTH,2,NAD83,4205,32141 +4301,UTAH,NORTH,2,NAD83,4301,32142 +4302,UTAH,CENTRAL,2,NAD83,4302,32143 +4303,UTAH,SOUTH,2,NAD83,4303,32144 +4400,VERMONT,,1,NAD83,4400,32145 +4501,VIRGINIA,NORTH,2,NAD83,4501,32146 +4502,VIRGINIA,SOUTH,2,NAD83,4502,32147 +4601,WASHINGTON,NORTH,2,NAD83,4601,32148 +4602,WASHINGTON,SOUTH,2,NAD83,4602,32149 +4701,"WEST VIRGINIA",NORTH,2,NAD83,4701,32150 +4702,"WEST VIRGINIA",SOUTH,2,NAD83,4702,32151 +4801,WISCONSIN,NORTH,2,NAD83,4801,32152 +4802,WISCONSIN,CENTRAL,2,NAD83,4802,32153 +4803,WISCONSIN,SOUTH,2,NAD83,4803,32154 +4901,WYOMING,EAST,1,NAD83,4901,32155 +4902,WYOMING,"EAST CENTRAL",1,NAD83,4902,32156 +4903,WYOMING,"WEST CENTRAL",1,NAD83,4903,32157 +4904,WYOMING,WEST,1,NAD83,4904,32158 +5001,ALASKA,"ZONE NO. 1",4,NAD83,5001,26931 +5002,ALASKA,"ZONE NO. 2",1,NAD83,5002,26932 +5003,ALASKA,"ZONE NO. 3",1,NAD83,5003,26933 +5004,ALASKA,"ZONE NO. 4",1,NAD83,5004,26934 +5005,ALASKA,"ZONE NO. 5",1,NAD83,5005,26935 +5006,ALASKA,"ZONE NO. 6",1,NAD83,5006,26936 +5007,ALASKA,"ZONE NO. 7",1,NAD83,5007,26937 +5008,ALASKA,"ZONE NO. 8",1,NAD83,5008,26938 +5009,ALASKA,"ZONE NO. 9",1,NAD83,5009,26939 +5010,ALASKA,"ZONE NO. 10",2,NAD83,5010,26940 +5101,HAWAII,1,1,NAD83,5101,26961 +5102,HAWAII,2,1,NAD83,5102,26962 +5103,HAWAII,3,1,NAD83,5103,26963 +5104,HAWAII,4,1,NAD83,5104,26964 +5105,HAWAII,5,1,NAD83,5105,26965 +5200,"PUERTO RICO AND","VIRGIN ISLANDS",2,NAD83,5200,32161 +10101,ALABAMA,EAST,1,NAD27,101,26729 +10102,ALABAMA,WEST,1,NAD27,102,26730 +10201,ARIZONA,EAST,1,NAD27,201,26748 +10202,ARIZONA,CENTRAL,1,NAD27,202,26749 +10203,ARIZONA,WEST,1,NAD27,203,26750 +10301,ARKANSAS,NORTH,2,NAD27,301,26751 +10302,ARKANSAS,SOUTH,2,NAD27,302,26752 +10401,CALIFORNIA,I,2,NAD27,401,26741 +10402,CALIFORNIA,II,2,NAD27,402,26742 +10403,CALIFORNIA,III,2,NAD27,403,26743 +10404,CALIFORNIA,IV,2,NAD27,404,26744 +10405,CALIFORNIA,V,2,NAD27,405,26745 +10406,CALIFORNIA,VI,2,NAD27,406,26746 +10407,CALIFORNIA,VII,2,NAD27,407,26799 +10501,COLORADO,NORTH,2,NAD27,501,26753 +10502,COLORADO,CENTRAL,2,NAD27,502,26754 +10503,COLORADO,SOUTH,2,NAD27,503,26755 +10600,CONNECTICUT,,2,NAD27,600,26756 +10700,DELAWARE,,1,NAD27,700,26757 +10901,FLORIDA,EAST,1,NAD27,901,26758 +10902,FLORIDA,WEST,1,NAD27,902,26759 +10903,FLORIDA,NORTH,2,NAD27,903,26760 +11001,GEORGIA,EAST,1,NAD27,1001,26766 +11002,GEORGIA,WEST,1,NAD27,1002,26767 +11101,IDAHO,EAST,1,NAD27,1101,26768 +11102,IDAHO,CENTRAL,1,NAD27,1102,26769 +11103,IDAHO,WEST,1,NAD27,1103,26770 +11201,ILLINOIS,EAST,1,NAD27,1201,26771 +11202,ILLINOIS,WEST,1,NAD27,1202,26772 +11301,INDIANA,EAST,1,NAD27,1301,26773 +11302,INDIANA,WEST,1,NAD27,1302,26774 +11401,IOWA,NORTH,2,NAD27,1401,26775 +11402,IOWA,SOUTH,2,NAD27,1402,26776 +11501,KANSAS,NORTH,2,NAD27,1501,26777 +11502,KANSAS,SOUTH,2,NAD27,1502,26778 +11601,KENTUCKY,NORTH,2,NAD27,1601,26779 +11602,KENTUCKY,SOUTH,2,NAD27,1602,26780 +11701,LOUISIANA,NORTH,2,NAD27,1701,26781 +11702,LOUISIANA,SOUTH,2,NAD27,1702,26782 +11703,LOUISIANA,OFFSHORE,2,NAD27,1703, +11801,MAINE,EAST,1,NAD27,1801,26783 +11802,MAINE,WEST,1,NAD27,1802,26784 +11900,MARYLAND,,2,NAD27,1900,26785 +12001,MASSACHUSETTS,MAINLAND,2,NAD27,2001,26786 +12002,MASSACHUSETTS,ISLAND,2,NAD27,2002,26787 +12101,MICHIGAN,EAST,1,NAD27,2101,26801 +12102,MICHIGAN,"CENTRAL/M",1,NAD27,2102,26802 +12103,MICHIGAN,WEST,1,NAD27,2103,26803 +12111,MICHIGAN,NORTH,2,NAD27,2111,26811 +12112,MICHIGAN,"CENTRAL/L",2,NAD27,2112,26812 +12113,MICHIGAN,SOUTH,2,NAD27,2113,26813 +12201,MINNESOTA,NORTH,2,NAD27,2201,26791 +12202,MINNESOTA,CENTRAL,2,NAD27,2202,26792 +12203,MINNESOTA,SOUTH,2,NAD27,2203,26793 +12301,MISSISSIPPI,EAST,1,NAD27,2301,26794 +12302,MISSISSIPPI,WEST,1,NAD27,2302,26795 +12401,MISSOURI,EAST,1,NAD27,2401,26796 +12402,MISSOURI,CENTRAL,1,NAD27,2402,26797 +12403,MISSOURI,WEST,1,NAD27,2403,26798 +12501,MONTANA,NORTH,2,NAD27,2501,32001 +12502,MONTANA,CENTRAL,2,NAD27,2502,32002 +12503,MONTANA,SOUTH,2,NAD27,2503,32003 +12601,NEBRASKA,NORTH,2,NAD27,2601,32005 +12602,NEBRASKA,SOUTH,2,NAD27,2602,32006 +12701,NEVADA,EAST,1,NAD27,2701,32007 +12702,NEVADA,CENTRAL,1,NAD27,2702,32008 +12703,NEVADA,WEST,1,NAD27,2703,32009 +12800,"NEW HAMPSHIRE",,1,NAD27,2800,32010 +12900,"NEW JERSEY",,1,NAD27,2900,32011 +13001,"NEW MEXICO",EAST,1,NAD27,3001,32012 +13002,"NEW MEXICO",CENTRAL,1,NAD27,3002,32013 +13003,"NEW MEXICO",WEST,1,NAD27,3003,32014 +13101,"NEW YORK",EAST,1,NAD27,3101,32015 +13102,"NEW YORK",CENTRAL,1,NAD27,3102,32016 +13103,"NEW YORK",WEST,1,NAD27,3103,32017 +13104,"NEW YORK","LONG ISLAND",2,NAD27,3104,32018 +13200,"NORTH CAROLINA",,2,NAD27,3200,32019 +13301,"NORTH DAKOTA",NORTH,2,NAD27,3301,32020 +13302,"NORTH DAKOTA",SOUTH,2,NAD27,3302,32021 +13401,OHIO,NORTH,2,NAD27,3401,32022 +13402,OHIO,SOUTH,2,NAD27,3402,32023 +13501,OKLAHOMA,NORTH,2,NAD27,3501,32024 +13502,OKLAHOMA,SOUTH,2,NAD27,3502,32025 +13601,OREGON,NORTH,2,NAD27,3601,32026 +13602,OREGON,SOUTH,2,NAD27,3602,32027 +13701,PENNSYLVANIA,NORTH,2,NAD27,3701,32028 +13702,PENNSYLVANIA,SOUTH,2,NAD27,3702,32029 +13800,"RHODE ISLAND",,1,NAD27,3800,32030 +13901,"SOUTH CAROLINA",NORTH,2,NAD27,3901,32031 +13902,"SOUTH CAROLINA",SOUTH,2,NAD27,3902,32033 +14001,"SOUTH DAKOTA",NORTH,2,NAD27,4001,32034 +14002,"SOUTH DAKOTA",SOUTH,2,NAD27,4002,32035 +14100,TENNESSEE,,2,NAD27,4100,2204 +14201,TEXAS,NORTH,2,NAD27,4201,32037 +14202,TEXAS,"NORTH CENTRAL",2,NAD27,4202,32038 +14203,TEXAS,CENTRAL,2,NAD27,4203,32039 +14204,TEXAS,"SOUTH CENTRAL",2,NAD27,4204,32040 +14205,TEXAS,SOUTH,2,NAD27,4205,32041 +14301,UTAH,NORTH,2,NAD27,4301,32042 +14302,UTAH,CENTRAL,2,NAD27,4302,32043 +14303,UTAH,SOUTH,2,NAD27,4303,32044 +14400,VERMONT,,1,NAD27,4400,32045 +14501,VIRGINIA,NORTH,2,NAD27,4501,32046 +14502,VIRGINIA,SOUTH,2,NAD27,4502,32047 +14601,WASHINGTON,NORTH,2,NAD27,4601,32048 +14602,WASHINGTON,SOUTH,2,NAD27,4602,32049 +14701,"WEST VIRGINIA",NORTH,2,NAD27,4701,32050 +14702,"WEST VIRGINIA",SOUTH,2,NAD27,4702,32051 +14801,WISCONSIN,NORTH,2,NAD27,4801,32052 +14802,WISCONSIN,CENTRAL,2,NAD27,4802,32053 +14803,WISCONSIN,SOUTH,2,NAD27,4803,32054 +14901,WYOMING,EAST,1,NAD27,4901,32055 +14902,WYOMING,"EAST CENTRAL",1,NAD27,4902,32056 +14903,WYOMING,"WEST CENTRAL",1,NAD27,4903,32057 +14904,WYOMING,WEST,1,NAD27,4904,32058 +15001,ALASKA,"ZONE NO. 1",4,NAD27,5001,26731 +15002,ALASKA,"ZONE NO. 2",1,NAD27,5002,26732 +15003,ALASKA,"ZONE NO. 3",1,NAD27,5003,26733 +15004,ALASKA,"ZONE NO. 4",1,NAD27,5004,26734 +15005,ALASKA,"ZONE NO. 5",1,NAD27,5005,26735 +15006,ALASKA,"ZONE NO. 6",1,NAD27,5006,26736 +15007,ALASKA,"ZONE NO. 7",1,NAD27,5007,26737 +15008,ALASKA,"ZONE NO. 8",1,NAD27,5008,26738 +15009,ALASKA,"ZONE NO. 9",1,NAD27,5009,26739 +15010,ALASKA,"ZONE NO. 10",2,NAD27,5010,26740 +15101,HAWAII,1,1,NAD27,5101,3561 +15102,HAWAII,2,1,NAD27,5102,3562 +15103,HAWAII,3,1,NAD27,5103,3563 +15104,HAWAII,4,1,NAD27,5104,3564 +15105,HAWAII,5,1,NAD27,5105,3565 +15201,"PUERTO RICO AND VIRGIN ISLANDS",,2,NAD27,5201,3991 +15202,"VIRGIN ISLANDS","ST. CROIX",2,NAD27,5202,3992 +15300,"AMERICAN SAMOA",,2,NAD27,5300,2155 +15400,"GUAM ISLAND",,3,NAD27,5400, diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_LINZAntarticaMapTileGrid.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_LINZAntarticaMapTileGrid.json new file mode 100644 index 00000000..9f217059 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_LINZAntarticaMapTileGrid.json @@ -0,0 +1,190 @@ +{ + "type": "TileMatrixSetType", + "title": "LINZ Antarctic Map Tile Grid (Ross Sea Region)", + "identifier": "LINZAntarticaMapTilegrid", + "supportedCRS": "http://www.opengis.net/def/crs/EPSG/0/5482", + "tileMatrix": [ + { + "type": "TileMatrixType", + "identifier": "0", + "scaleDenominator": 409600000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 1, + "matrixHeight": 1 + }, + { + "type": "TileMatrixType", + "identifier": "1", + "scaleDenominator": 204800000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 1, + "matrixHeight": 1 + }, + { + "type": "TileMatrixType", + "identifier": "2", + "scaleDenominator": 102400000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 2, + "matrixHeight": 2 + }, + { + "type": "TileMatrixType", + "identifier": "3", + "scaleDenominator": 51200000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 4, + "matrixHeight": 4 + }, + { + "type": "TileMatrixType", + "identifier": "4", + "scaleDenominator": 25600000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 7, + "matrixHeight": 7 + }, + { + "type": "TileMatrixType", + "identifier": "5", + "scaleDenominator": 12800000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 13, + "matrixHeight": 13 + }, + { + "type": "TileMatrixType", + "identifier": "6", + "scaleDenominator": 6400000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 26, + "matrixHeight": 26 + }, + { + "type": "TileMatrixType", + "identifier": "7", + "scaleDenominator": 3200000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 52, + "matrixHeight": 52 + }, + { + "type": "TileMatrixType", + "identifier": "8", + "scaleDenominator": 1600000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 104, + "matrixHeight": 104 + }, + { + "type": "TileMatrixType", + "identifier": "9", + "scaleDenominator": 800000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 207, + "matrixHeight": 207 + }, + { + "type": "TileMatrixType", + "identifier": "10", + "scaleDenominator": 400000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 413, + "matrixHeight": 413 + }, + { + "type": "TileMatrixType", + "identifier": "11", + "scaleDenominator": 200000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 826, + "matrixHeight": 826 + }, + { + "type": "TileMatrixType", + "identifier": "12", + "scaleDenominator": 100000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 1652, + "matrixHeight": 1652 + }, + { + "type": "TileMatrixType", + "identifier": "13", + "scaleDenominator": 50000, + "topLeftCorner": [ + 6918457.73, + -918457.73 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 3303, + "matrixHeight": 3303 + } + ] +} \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_MapML_APSTILE.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_MapML_APSTILE.json new file mode 100644 index 00000000..ec221e1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_MapML_APSTILE.json @@ -0,0 +1,268 @@ +{ + "type": "TileMatrixSetType", + "identifier": "APSTILE", + "title": "Alaska Polar Stereographic-based tiled coordinate reference system for the Arctic region.", + "supportedCRS": "http://www.opengis.net/def/crs/EPSG/0/5936", + "tileMatrix": [ + { + "matrixHeight": 1, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 1, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 852895761.9785715 + }, + { + "matrixHeight": 2, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 2, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 426447880.98928577 + }, + { + "matrixHeight": 4, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 4, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 213223940.49464288 + }, + { + "matrixHeight": 8, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 8, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 106611970.24732144 + }, + { + "matrixHeight": 16, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 16, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 53305985.12366072 + }, + { + "matrixHeight": 32, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 32, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 26652992.56183036 + }, + { + "matrixHeight": 64, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 64, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 13326496.28091518 + }, + { + "matrixHeight": 128, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 128, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 6663248.14045759 + }, + { + "matrixHeight": 256, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 256, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 3331624.070228795 + }, + { + "matrixHeight": 512, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 512, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 1665812.0351143975 + }, + { + "matrixHeight": 1024, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 1024, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 832906.0175571988 + }, + { + "matrixHeight": 2048, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 2048, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 416453.0087785994 + }, + { + "matrixHeight": 4096, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 4096, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 208226.5043892997 + }, + { + "matrixHeight": 8192, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 8192, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 104113.25219464985 + }, + { + "matrixHeight": 16384, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 16384, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 52056.62609732492 + }, + { + "matrixHeight": 32768, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 32768, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 26028.31304866246 + }, + { + "matrixHeight": 65536, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 65536, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 13014.15652433123 + }, + { + "matrixHeight": 131072, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 131072, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 6507.078262165615 + }, + { + "matrixHeight": 262144, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 262144, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 3253.5391310828077 + }, + { + "matrixHeight": 524288, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -28567784.109255, + 32567784.109255 + ], + "matrixWidth": 524288, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 1626.7695655414038 + } + ] +} diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_MapML_CBMTILE.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_MapML_CBMTILE.json new file mode 100644 index 00000000..2a391211 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_MapML_CBMTILE.json @@ -0,0 +1,346 @@ +{ + "type": "TileMatrixSetType", + "identifier": "CBMTILE", + "title": "Lambert Conformal Conic-based tiled coordinate reference system for Canada.", + "supportedCRS": "http://www.opengis.net/def/crs/EPSG/0/3978", + "tileMatrix": [ + { + "matrixHeight": 5, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 5, + "identifier": "0", + "type": "TileMatrixType", + "scaleDenominator": 137016643.08090523 + }, + { + "matrixHeight": 9, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 9, + "identifier": "1", + "type": "TileMatrixType", + "scaleDenominator": 80320101.1163927317 + }, + { + "matrixHeight": 15, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 15, + "identifier": "2", + "type": "TileMatrixType", + "scaleDenominator": 47247118.3037604243 + }, + { + "matrixHeight": 25, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 25, + "identifier": "3", + "type": "TileMatrixType", + "scaleDenominator": 28348270.982256256 + }, + { + "matrixHeight": 42, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 42, + "identifier": "4", + "type": "TileMatrixType", + "scaleDenominator": 16536491.40631615 + }, + { + "matrixHeight": 73, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 73, + "identifier": "5", + "type": "TileMatrixType", + "scaleDenominator": 9449423.66075208597 + }, + { + "matrixHeight": 121, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 121, + "identifier": "6", + "type": "TileMatrixType", + "scaleDenominator": 5669654.1964512514 + }, + { + "matrixHeight": 208, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 208, + "identifier": "7", + "type": "TileMatrixType", + "scaleDenominator": 3307298.2812632299 + }, + { + "matrixHeight": 363, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 363, + "identifier": "8", + "type": "TileMatrixType", + "scaleDenominator": 1889884.73215041705 + }, + { + "matrixHeight": 605, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 605, + "identifier": "9", + "type": "TileMatrixType", + "scaleDenominator": 1133930.83929025033 + }, + { + "matrixHeight": 1036, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 1036, + "identifier": "10", + "type": "TileMatrixType", + "scaleDenominator": 661459.656252646004 + }, + { + "matrixHeight": 1727, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 1727, + "identifier": "11", + "type": "TileMatrixType", + "scaleDenominator": 396875.793751587567 + }, + { + "matrixHeight": 2900, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 2900, + "identifier": "12", + "type": "TileMatrixType", + "scaleDenominator": 236235.591518802132 + }, + { + "matrixHeight": 5000, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 5000, + "identifier": "13", + "type": "TileMatrixType", + "scaleDenominator": 137016.643080905225 + }, + { + "matrixHeight": 8530, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 8530, + "identifier": "14", + "type": "TileMatrixType", + "scaleDenominator": 80320.1011163927178 + }, + { + "matrixHeight": 14501, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 14501, + "identifier": "15", + "type": "TileMatrixType", + "scaleDenominator": 47247.1183037604278 + }, + { + "matrixHeight": 24167, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 24167, + "identifier": "16", + "type": "TileMatrixType", + "scaleDenominator": 28348.2709822562538 + }, + { + "matrixHeight": 41429, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 41429, + "identifier": "17", + "type": "TileMatrixType", + "scaleDenominator": 16536.4914063161486 + }, + { + "matrixHeight": 72500, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 72500, + "identifier": "18", + "type": "TileMatrixType", + "scaleDenominator": 9449.4236607520852 + }, + { + "matrixHeight": 120834, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 120834, + "identifier": "19", + "type": "TileMatrixType", + "scaleDenominator": 5669.65419645125075 + }, + { + "matrixHeight": 207143, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 207143, + "identifier": "20", + "type": "TileMatrixType", + "scaleDenominator": 3307.29828126322991 + }, + { + "matrixHeight": 362501, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 362501, + "identifier": "21", + "type": "TileMatrixType", + "scaleDenominator": 1889.88473215041699 + }, + { + "matrixHeight": 604167, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 604167, + "identifier": "22", + "type": "TileMatrixType", + "scaleDenominator": 1133.93083929025011 + }, + { + "matrixHeight": 1035715, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 1035715, + "identifier": "23", + "type": "TileMatrixType", + "scaleDenominator": 661.459656252645914 + }, + { + "matrixHeight": 1726191, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 1726191, + "identifier": "24", + "type": "TileMatrixType", + "scaleDenominator": 396.875793751587537 + }, + { + "matrixHeight": 2900001, + "tileHeight": 256, + "tileWidth": 256, + "topLeftCorner": [ + -34655800, + 39310000 + ], + "matrixWidth": 2900001, + "identifier": "25", + "type": "TileMatrixType", + "scaleDenominator": 236.235591518802124 + } + ] +} diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_NZTM2000.json b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_NZTM2000.json new file mode 100644 index 00000000..779f9b72 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/tms_NZTM2000.json @@ -0,0 +1,243 @@ +{ + "type": "TileMatrixSetType", + "title": "LINZ NZTM2000 Map Tile Grid", + "abstract": "See https://www.linz.govt.nz/data/linz-data-service/guides-and-documentation/nztm2000-map-tile-service-schema", + "identifier": "NZTM2000", + "supportedCRS": "http://www.opengis.net/def/crs/EPSG/0/2193", + "boundingBox": + { + "type": "BoundingBoxType", + "crs": "http://www.opengis.net/def/crs/EPSG/0/2193", + "lowerCorner": [ + 3087000, + 274000 + ], + "upperCorner": [ + 7173000, + 3327000 + ] + }, + "tileMatrix": [ + { + "type": "TileMatrixType", + "identifier": "0", + "scaleDenominator": 32000000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 2, + "matrixHeight": 4 + }, + { + "type": "TileMatrixType", + "identifier": "1", + "scaleDenominator": 16000000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 4, + "matrixHeight": 8 + }, + { + "type": "TileMatrixType", + "identifier": "2", + "scaleDenominator": 8000000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 8, + "matrixHeight": 16 + }, + { + "type": "TileMatrixType", + "identifier": "3", + "scaleDenominator": 4000000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 16, + "matrixHeight": 32 + }, + { + "type": "TileMatrixType", + "identifier": "4", + "scaleDenominator": 2000000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 32, + "matrixHeight": 64 + }, + { + "type": "TileMatrixType", + "identifier": "5", + "scaleDenominator": 1000000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 64, + "matrixHeight": 128 + }, + { + "type": "TileMatrixType", + "identifier": "6", + "scaleDenominator": 500000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 128, + "matrixHeight": 256 + }, + { + "type": "TileMatrixType", + "identifier": "7", + "scaleDenominator": 250000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 256, + "matrixHeight": 512 + }, + { + "type": "TileMatrixType", + "identifier": "8", + "scaleDenominator": 100000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 640, + "matrixHeight": 1280 + }, + { + "type": "TileMatrixType", + "identifier": "9", + "scaleDenominator": 50000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 1280, + "matrixHeight": 2560 + }, + { + "type": "TileMatrixType", + "identifier": "10", + "scaleDenominator": 25000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 2560, + "matrixHeight": 5120 + }, + { + "type": "TileMatrixType", + "identifier": "11", + "scaleDenominator": 10000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 6400, + "matrixHeight": 12800 + }, + { + "type": "TileMatrixType", + "identifier": "12", + "scaleDenominator": 5000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 12800, + "matrixHeight": 25600 + }, + { + "type": "TileMatrixType", + "identifier": "13", + "scaleDenominator": 2500, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 25600, + "matrixHeight": 51200 + }, + { + "type": "TileMatrixType", + "identifier": "14", + "scaleDenominator": 1000, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 64000, + "matrixHeight": 128000 + }, + { + "type": "TileMatrixType", + "identifier": "15", + "scaleDenominator": 500, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 128000, + "matrixHeight": 256000 + }, + { + "type": "TileMatrixType", + "identifier": "16", + "scaleDenominator": 250, + "topLeftCorner": [ + 10000000, + -1000000 + ], + "tileWidth": 256, + "tileHeight": 256, + "matrixWidth": 256000, + "matrixHeight": 512000 + } + ] +} \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/trailer.dxf b/.venv/lib/python3.12/site-packages/fiona/gdal_data/trailer.dxf new file mode 100644 index 00000000..19ebd400 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/trailer.dxf @@ -0,0 +1,434 @@ + 0 +ENDSEC + 0 +SECTION + 2 +OBJECTS + 0 +DICTIONARY + 5 +C +330 +0 +100 +AcDbDictionary +281 + 1 + 3 +ACAD_GROUP +350 +D + 3 +ACAD_LAYOUT +350 +1A + 3 +ACAD_MLEADERSTYLE +350 +43 + 3 +ACAD_PLOTSETTINGS +350 +19 + 3 +ACAD_PLOTSTYLENAME +350 +E + 3 +ACAD_TABLESTYLE +350 +42 + 0 +DICTIONARY + 5 +D +102 +{ACAD_REACTORS +330 +C +102 +} +330 +C +100 +AcDbDictionary +281 + 1 + 0 +DICTIONARY + 5 +1A +102 +{ACAD_REACTORS +330 +C +102 +} +330 +C +100 +AcDbDictionary +281 + 1 + 3 +Layout1 +350 +1E + 3 +Model +350 +22 + 0 +DICTIONARY + 5 +43 +102 +{ACAD_REACTORS +330 +C +102 +} +330 +C +100 +AcDbDictionary +281 + 1 + 0 +DICTIONARY + 5 +19 +102 +{ACAD_REACTORS +330 +C +102 +} +330 +C +100 +AcDbDictionary +281 + 1 + 0 +ACDBDICTIONARYWDFLT + 5 +E +102 +{ACAD_REACTORS +330 +C +102 +} +330 +C +100 +AcDbDictionary +281 + 1 + 3 +Normal +350 +F +100 +AcDbDictionaryWithDefault +340 +F + 0 +DICTIONARY + 5 +42 +102 +{ACAD_REACTORS +330 +C +102 +} +330 +C +100 +AcDbDictionary +281 + 1 + 0 +LAYOUT + 5 +1E +102 +{ACAD_REACTORS +330 +1A +102 +} +330 +1A +100 +AcDbPlotSettings + 1 + + 2 +none_device + 4 + + 6 + + 40 +0.0 + 41 +0.0 + 42 +0.0 + 43 +0.0 + 44 +0.0 + 45 +0.0 + 46 +0.0 + 47 +0.0 + 48 +0.0 + 49 +0.0 +140 +0.0 +141 +0.0 +142 +1.0 +143 +1.0 + 70 + 688 + 72 + 0 + 73 + 0 + 74 + 5 + 7 + + 75 + 16 + 76 + 0 + 77 + 2 + 78 + 300 +147 +1.0 +148 +0.0 +149 +0.0 +100 +AcDbLayout + 1 +Layout1 + 70 + 1 + 71 + 1 + 10 +0.0 + 20 +0.0 + 11 +12.0 + 21 +9.0 + 12 +0.0 + 22 +0.0 + 32 +0.0 + 14 +1.000000000000000E+20 + 24 +1.000000000000000E+20 + 34 +1.000000000000000E+20 + 15 +-1.000000000000000E+20 + 25 +-1.000000000000000E+20 + 35 +-1.000000000000000E+20 +146 +0.0 + 13 +0.0 + 23 +0.0 + 33 +0.0 + 16 +1.0 + 26 +0.0 + 36 +0.0 + 17 +0.0 + 27 +1.0 + 37 +0.0 + 76 + 0 +330 +1B + 0 +LAYOUT + 5 +22 +102 +{ACAD_REACTORS +330 +1A +102 +} +330 +1A +100 +AcDbPlotSettings + 1 + + 2 +none_device + 4 + + 6 + + 40 +0.0 + 41 +0.0 + 42 +0.0 + 43 +0.0 + 44 +0.0 + 45 +0.0 + 46 +0.0 + 47 +0.0 + 48 +0.0 + 49 +0.0 +140 +0.0 +141 +0.0 +142 +1.0 +143 +1.0 + 70 + 1712 + 72 + 0 + 73 + 0 + 74 + 0 + 7 + + 75 + 0 + 76 + 0 + 77 + 2 + 78 + 300 +147 +1.0 +148 +0.0 +149 +0.0 +100 +AcDbLayout + 1 +Model + 70 + 1 + 71 + 0 + 10 +0.0 + 20 +0.0 + 11 +12.0 + 21 +9.0 + 12 +0.0 + 22 +0.0 + 32 +0.0 + 14 +30.0 + 24 +49.75 + 34 +0.0 + 15 +130.5 + 25 +163.1318914119703 + 35 +0.0 +146 +0.0 + 13 +0.0 + 23 +0.0 + 33 +0.0 + 16 +1.0 + 26 +0.0 + 36 +0.0 + 17 +0.0 + 27 +1.0 + 37 +0.0 + 76 + 0 +330 +1F +331 +29 +0 +ACDBPLACEHOLDER + 5 +F +102 +{ACAD_REACTORS +330 +E +102 +} +330 +E + 0 +ENDSEC + 0 +EOF diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/vdv452.xml b/.venv/lib/python3.12/site-packages/fiona/gdal_data/vdv452.xml new file mode 100644 index 00000000..d010fa0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/vdv452.xml @@ -0,0 +1,367 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/gdal_data/vdv452.xsd b/.venv/lib/python3.12/site-packages/fiona/gdal_data/vdv452.xsd new file mode 100644 index 00000000..a42774bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/gdal_data/vdv452.xsd @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/fiona/inspector.py b/.venv/lib/python3.12/site-packages/fiona/inspector.py new file mode 100644 index 00000000..9174fa18 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/inspector.py @@ -0,0 +1,36 @@ +import code +import logging +import sys + +import fiona + + +logging.basicConfig(stream=sys.stderr, level=logging.INFO) +logger = logging.getLogger('fiona.inspector') + + +def main(srcfile): + """Open a dataset in an interactive session.""" + with fiona.drivers(): + with fiona.open(srcfile) as src: + code.interact( + 'Fiona %s Interactive Inspector (Python %s)\n' + 'Type "src.schema", "next(src)", or "help(src)" ' + "for more information." + % (fiona.__version__, ".".join(map(str, sys.version_info[:3]))), + local=locals(), + ) + + return 1 + + +if __name__ == '__main__': + import argparse + + parser = argparse.ArgumentParser( + prog="python -m fiona.inspector", + description="Open a data file and drop into an interactive interpreter", + ) + parser.add_argument("src", metavar="FILE", help="Input dataset file name") + args = parser.parse_args() + main(args.src) diff --git a/.venv/lib/python3.12/site-packages/fiona/io.py b/.venv/lib/python3.12/site-packages/fiona/io.py new file mode 100644 index 00000000..c793b1a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/io.py @@ -0,0 +1,222 @@ +"""Classes capable of reading and writing collections +""" + +import logging + +from fiona.ogrext import MemoryFileBase, _listdir, _listlayers +from fiona.collection import Collection +from fiona.meta import supports_vsi +from fiona.errors import DriverError + +log = logging.getLogger(__name__) + + +class MemoryFile(MemoryFileBase): + """A BytesIO-like object, backed by an in-memory file. + + This allows formatted files to be read and written without I/O. + + A MemoryFile created with initial bytes becomes immutable. A + MemoryFile created without initial bytes may be written to using + either file-like or dataset interfaces. + + Parameters + ---------- + file_or_bytes : an open Python file, bytes, or None + If not None, the MemoryFile becomes immutable and read-only. + If None, it is write-only. + filename : str + An optional filename. The default is a UUID-based name. + ext : str + An optional file extension. Some format drivers require a + specific value. + + """ + def __init__(self, file_or_bytes=None, filename=None, ext=""): + if ext and not ext.startswith("."): + ext = "." + ext + super().__init__( + file_or_bytes=file_or_bytes, filename=filename, ext=ext) + + def open( + self, + mode=None, + driver=None, + schema=None, + crs=None, + encoding=None, + layer=None, + vfs=None, + enabled_drivers=None, + crs_wkt=None, + allow_unsupported_drivers=False, + **kwargs + ): + """Open the file and return a Fiona collection object. + + If data has already been written, the file is opened in 'r' + mode. Otherwise, the file is opened in 'w' mode. + + Parameters + ---------- + Note well that there is no `path` parameter: a `MemoryFile` + contains a single dataset and there is no need to specify a + path. + + Other parameters are optional and have the same semantics as the + parameters of `fiona.open()`. + """ + if self.closed: + raise OSError("I/O operation on closed file.") + + if ( + not allow_unsupported_drivers + and driver is not None + and not supports_vsi(driver) + ): + raise DriverError(f"Driver {driver} does not support virtual files.") + + if mode in ('r', 'a') and not self.exists(): + raise OSError("MemoryFile does not exist.") + if layer is None and mode == 'w' and self.exists(): + raise OSError("MemoryFile already exists.") + + if not self.exists() or mode == 'w': + if driver is not None: + self._ensure_extension(driver) + mode = 'w' + elif mode is None: + mode = 'r' + + return Collection( + self.name, + mode, + crs=crs, + driver=driver, + schema=schema, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + + def listdir(self, path=None): + """List files in a directory. + + Parameters + ---------- + path : URI (str or pathlib.Path) + A dataset resource identifier. + + Returns + ------- + list + A list of filename strings. + + """ + if self.closed: + raise OSError("I/O operation on closed file.") + if path: + vsi_path = f"{self.name}/{path.lstrip('/')}" + else: + vsi_path = f"{self.name}" + return _listdir(vsi_path) + + def listlayers(self, path=None): + """List layer names in their index order + + Parameters + ---------- + path : URI (str or pathlib.Path) + A dataset resource identifier. + + Returns + ------- + list + A list of layer name strings. + + """ + if self.closed: + raise OSError("I/O operation on closed file.") + if path: + vsi_path = f"{self.name}/{path.lstrip('/')}" + else: + vsi_path = f"{self.name}" + return _listlayers(vsi_path) + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs): + self.close() + + +class ZipMemoryFile(MemoryFile): + """A read-only BytesIO-like object backed by an in-memory zip file. + + This allows a zip file containing formatted files to be read + without I/O. + + Parameters + ---------- + file_or_bytes : an open Python file, bytes, or None + If not None, the MemoryFile becomes immutable and read-only. If + None, it is write-only. + filename : str + An optional filename. The default is a UUID-based name. + ext : str + An optional file extension. Some format drivers require a + specific value. The default is ".zip". + """ + + def __init__(self, file_or_bytes=None, filename=None, ext=".zip"): + super().__init__(file_or_bytes, filename=filename, ext=ext) + self.name = f"/vsizip{self.name}" + + def open( + self, + path=None, + driver=None, + encoding=None, + layer=None, + enabled_drivers=None, + allow_unsupported_drivers=False, + **kwargs + ): + """Open a dataset within the zipped stream. + + Parameters + ---------- + path : str + Path to a dataset in the zip file, relative to the root of the + archive. + + Returns + ------- + A Fiona collection object + + """ + if path: + vsi_path = f"/vsizip{self.name}/{path.lstrip('/')}" + else: + vsi_path = f"/vsizip{self.name}" + + if self.closed: + raise OSError("I/O operation on closed file.") + if path: + vsi_path = f"{self.name}/{path.lstrip('/')}" + else: + vsi_path = f"{self.name}" + + return Collection( + vsi_path, + "r", + driver=driver, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) diff --git a/.venv/lib/python3.12/site-packages/fiona/logutils.py b/.venv/lib/python3.12/site-packages/fiona/logutils.py new file mode 100644 index 00000000..081e1c14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/logutils.py @@ -0,0 +1,36 @@ +"""Logging helper classes.""" + +import logging + + +class FieldSkipLogFilter(logging.Filter): + """Filter field skip log messages. + + At most, one message per field skipped per loop will be passed. + """ + + def __init__(self, name=''): + super().__init__(name) + self.seen_msgs = set() + + def filter(self, record): + """Pass record if not seen.""" + msg = record.getMessage() + if msg.startswith("Skipping field"): + retval = msg not in self.seen_msgs + self.seen_msgs.add(msg) + return retval + else: + return 1 + + +class LogFiltering: + def __init__(self, logger, filter): + self.logger = logger + self.filter = filter + + def __enter__(self): + self.logger.addFilter(self.filter) + + def __exit__(self, *args, **kwargs): + self.logger.removeFilter(self.filter) diff --git a/.venv/lib/python3.12/site-packages/fiona/meta.py b/.venv/lib/python3.12/site-packages/fiona/meta.py new file mode 100644 index 00000000..7c0e2a56 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/meta.py @@ -0,0 +1,271 @@ +import logging +import xml.etree.ElementTree as ET + +from fiona.env import require_gdal_version +from fiona.ogrext import _get_metadata_item + +log = logging.getLogger(__name__) + + +class MetadataItem: + # since GDAL 2.0 + CREATION_FIELD_DATA_TYPES = "DMD_CREATIONFIELDDATATYPES" + # since GDAL 2.3 + CREATION_FIELD_DATA_SUB_TYPES = "DMD_CREATIONFIELDDATASUBTYPES" + CREATION_OPTION_LIST = "DMD_CREATIONOPTIONLIST" + LAYER_CREATION_OPTION_LIST = "DS_LAYER_CREATIONOPTIONLIST" + # since GDAL 2.0 + DATASET_OPEN_OPTIONS = "DMD_OPENOPTIONLIST" + # since GDAL 2.0 + EXTENSIONS = "DMD_EXTENSIONS" + EXTENSION = "DMD_EXTENSION" + VIRTUAL_IO = "DCAP_VIRTUALIO" + # since GDAL 2.0 + NOT_NULL_FIELDS = "DCAP_NOTNULL_FIELDS" + # since gdal 2.3 + NOT_NULL_GEOMETRY_FIELDS = "DCAP_NOTNULL_GEOMFIELDS" + # since GDAL 3.2 + UNIQUE_FIELDS = "DCAP_UNIQUE_FIELDS" + # since GDAL 2.0 + DEFAULT_FIELDS = "DCAP_DEFAULT_FIELDS" + OPEN = "DCAP_OPEN" + CREATE = "DCAP_CREATE" + + +def _parse_options(xml): + """Convert metadata xml to dict""" + options = {} + if len(xml) > 0: + + root = ET.fromstring(xml) + for option in root.iter('Option'): + + option_name = option.attrib['name'] + opt = {} + opt.update((k, v) for k, v in option.attrib.items() if not k == 'name') + + values = [] + for value in option.iter('Value'): + values.append(value.text) + if len(values) > 0: + opt['values'] = values + + options[option_name] = opt + + return options + + +@require_gdal_version('2.0') +def dataset_creation_options(driver): + """ Returns dataset creation options for driver + + Parameters + ---------- + driver : str + + Returns + ------- + dict + Dataset creation options + + """ + + xml = _get_metadata_item(driver, MetadataItem.CREATION_OPTION_LIST) + + if xml is None: + return {} + + if len(xml) == 0: + return {} + + return _parse_options(xml) + + +@require_gdal_version('2.0') +def layer_creation_options(driver): + """ Returns layer creation options for driver + + Parameters + ---------- + driver : str + + Returns + ------- + dict + Layer creation options + + """ + xml = _get_metadata_item(driver, MetadataItem.LAYER_CREATION_OPTION_LIST) + + if xml is None: + return {} + + if len(xml) == 0: + return {} + + return _parse_options(xml) + + +@require_gdal_version('2.0') +def dataset_open_options(driver): + """ Returns dataset open options for driver + + Parameters + ---------- + driver : str + + Returns + ------- + dict + Dataset open options + + """ + xml = _get_metadata_item(driver, MetadataItem.DATASET_OPEN_OPTIONS) + + if xml is None: + return {} + + if len(xml) == 0: + return {} + + return _parse_options(xml) + + +@require_gdal_version('2.0') +def print_driver_options(driver): + """ Print driver options for dataset open, dataset creation, and layer creation. + + Parameters + ---------- + driver : str + + """ + + for option_type, options in [("Dataset Open Options", dataset_open_options(driver)), + ("Dataset Creation Options", dataset_creation_options(driver)), + ("Layer Creation Options", layer_creation_options(driver))]: + + print(f"{option_type}:") + if len(options) == 0: + print("\tNo options available.") + else: + for option_name in options: + print(f"\t{option_name}:") + if 'description' in options[option_name]: + print(f"\t\tDescription: {options[option_name]['description']}") + if 'type' in options[option_name]: + print(f"\t\tType: {options[option_name]['type']}") + if 'values' in options[option_name] and len(options[option_name]['values']) > 0: + print(f"\t\tAccepted values: {','.join(options[option_name]['values'])}") + for attr_text, attribute in [('Default value', 'default'), + ('Required', 'required'), + ('Alias', 'aliasOf'), + ('Min', 'min'), + ('Max', 'max'), + ('Max size', 'maxsize'), + ('Scope', 'scope'), + ('Alternative configuration option', 'alt_config_option')]: + if attribute in options[option_name]: + print(f"\t\t{attr_text}: {options[option_name][attribute]}") + print("") + + +@require_gdal_version('2.0') +def extensions(driver): + """ Returns file extensions supported by driver + + Parameters + ---------- + driver : str + + Returns + ------- + list + List with file extensions or None if not specified by driver + + """ + + exts = _get_metadata_item(driver, MetadataItem.EXTENSIONS) + + if exts is None: + return None + + return [ext for ext in exts.split(" ") if len(ext) > 0] + + +def extension(driver): + """ Returns file extension of driver + + Parameters + ---------- + driver : str + + Returns + ------- + str + File extensions or None if not specified by driver + + """ + + return _get_metadata_item(driver, MetadataItem.EXTENSION) + + +@require_gdal_version('2.0') +def supports_vsi(driver): + """ Returns True if driver supports GDAL's VSI*L API + + Parameters + ---------- + driver : str + + Returns + ------- + bool + + """ + virtual_io = _get_metadata_item(driver, MetadataItem.VIRTUAL_IO) + return virtual_io is not None and virtual_io.upper() == "YES" + + +@require_gdal_version('2.0') +def supported_field_types(driver): + """ Returns supported field types + + Parameters + ---------- + driver : str + + Returns + ------- + list + List with supported field types or None if not specified by driver + + """ + field_types_str = _get_metadata_item(driver, MetadataItem.CREATION_FIELD_DATA_TYPES) + + if field_types_str is None: + return None + + return [field_type for field_type in field_types_str.split(" ") if len(field_type) > 0] + + +@require_gdal_version('2.3') +def supported_sub_field_types(driver): + """ Returns supported sub field types + + Parameters + ---------- + driver : str + + Returns + ------- + list + List with supported field types or None if not specified by driver + + """ + field_types_str = _get_metadata_item(driver, MetadataItem.CREATION_FIELD_DATA_SUB_TYPES) + + if field_types_str is None: + return None + + return [field_type for field_type in field_types_str.split(" ") if len(field_type) > 0] diff --git a/.venv/lib/python3.12/site-packages/fiona/model.py b/.venv/lib/python3.12/site-packages/fiona/model.py new file mode 100644 index 00000000..30f4f186 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/model.py @@ -0,0 +1,461 @@ +"""Fiona data model""" + +from binascii import hexlify +from collections.abc import MutableMapping +from enum import Enum +import itertools +from json import JSONEncoder +import reprlib +from warnings import warn + +from fiona.errors import FionaDeprecationWarning + +_model_repr = reprlib.Repr() +_model_repr.maxlist = 1 +_model_repr.maxdict = 5 + + +class OGRGeometryType(Enum): + Unknown = 0 + Point = 1 + LineString = 2 + Polygon = 3 + MultiPoint = 4 + MultiLineString = 5 + MultiPolygon = 6 + GeometryCollection = 7 + CircularString = 8 + CompoundCurve = 9 + CurvePolygon = 10 + MultiCurve = 11 + MultiSurface = 12 + Curve = 13 + Surface = 14 + PolyhedralSurface = 15 + TIN = 16 + Triangle = 17 + NONE = 100 + LinearRing = 101 + CircularStringZ = 1008 + CompoundCurveZ = 1009 + CurvePolygonZ = 1010 + MultiCurveZ = 1011 + MultiSurfaceZ = 1012 + CurveZ = 1013 + SurfaceZ = 1014 + PolyhedralSurfaceZ = 1015 + TINZ = 1016 + TriangleZ = 1017 + PointM = 2001 + LineStringM = 2002 + PolygonM = 2003 + MultiPointM = 2004 + MultiLineStringM = 2005 + MultiPolygonM = 2006 + GeometryCollectionM = 2007 + CircularStringM = 2008 + CompoundCurveM = 2009 + CurvePolygonM = 2010 + MultiCurveM = 2011 + MultiSurfaceM = 2012 + CurveM = 2013 + SurfaceM = 2014 + PolyhedralSurfaceM = 2015 + TINM = 2016 + TriangleM = 2017 + PointZM = 3001 + LineStringZM = 3002 + PolygonZM = 3003 + MultiPointZM = 3004 + MultiLineStringZM = 3005 + MultiPolygonZM = 3006 + GeometryCollectionZM = 3007 + CircularStringZM = 3008 + CompoundCurveZM = 3009 + CurvePolygonZM = 3010 + MultiCurveZM = 3011 + MultiSurfaceZM = 3012 + CurveZM = 3013 + SurfaceZM = 3014 + PolyhedralSurfaceZM = 3015 + TINZM = 3016 + TriangleZM = 3017 + Point25D = 0x80000001 + LineString25D = 0x80000002 + Polygon25D = 0x80000003 + MultiPoint25D = 0x80000004 + MultiLineString25D = 0x80000005 + MultiPolygon25D = 0x80000006 + GeometryCollection25D = 0x80000007 + + +# Mapping of OGR integer geometry types to GeoJSON type names. +_GEO_TYPES = { + OGRGeometryType.Unknown.value: "Unknown", + OGRGeometryType.Point.value: "Point", + OGRGeometryType.LineString.value: "LineString", + OGRGeometryType.Polygon.value: "Polygon", + OGRGeometryType.MultiPoint.value: "MultiPoint", + OGRGeometryType.MultiLineString.value: "MultiLineString", + OGRGeometryType.MultiPolygon.value: "MultiPolygon", + OGRGeometryType.GeometryCollection.value: "GeometryCollection" +} + +GEOMETRY_TYPES = { + **_GEO_TYPES, + OGRGeometryType.NONE.value: "None", + OGRGeometryType.LinearRing.value: "LinearRing", + OGRGeometryType.Point25D.value: "3D Point", + OGRGeometryType.LineString25D.value: "3D LineString", + OGRGeometryType.Polygon25D.value: "3D Polygon", + OGRGeometryType.MultiPoint25D.value: "3D MultiPoint", + OGRGeometryType.MultiLineString25D.value: "3D MultiLineString", + OGRGeometryType.MultiPolygon25D.value: "3D MultiPolygon", + OGRGeometryType.GeometryCollection25D.value: "3D GeometryCollection", +} + + +class Object(MutableMapping): + """Base class for CRS, geometry, and feature objects + + In Fiona 2.0, the implementation of those objects will change. They + will no longer be dicts or derive from dict, and will lose some + features like mutability and default JSON serialization. + + Object will be used for these objects in Fiona 1.9. This class warns + about future deprecation of features. + """ + + _delegated_properties = [] + + def __init__(self, **kwds): + self._data = dict(**kwds) + + def _props(self): + return { + k: getattr(self._delegate, k) + for k in self._delegated_properties + if k is not None # getattr(self._delegate, k) is not None + } + + def __getitem__(self, item): + if item in self._delegated_properties: + return getattr(self._delegate, item) + else: + props = { + k: (dict(v) if isinstance(v, Object) else v) + for k, v in self._props().items() + } + props.update(**self._data) + return props[item] + + def __iter__(self): + props = self._props() + return itertools.chain(iter(props), iter(self._data)) + + def __len__(self): + props = self._props() + return len(props) + len(self._data) + + def __repr__(self): + kvs = [ + f"{k}={v!r}" + for k, v in itertools.chain(self._props().items(), self._data.items()) + ] + return "fiona.{}({})".format(self.__class__.__name__, ", ".join(kvs)) + + def __setitem__(self, key, value): + warn( + "instances of this class -- CRS, geometry, and feature objects -- will become immutable in fiona version 2.0", + FionaDeprecationWarning, + stacklevel=2, + ) + if key in self._delegated_properties: + setattr(self._delegate, key, value) + else: + self._data[key] = value + + def __delitem__(self, key): + warn( + "instances of this class -- CRS, geometry, and feature objects -- will become immutable in fiona version 2.0", + FionaDeprecationWarning, + stacklevel=2, + ) + if key in self._delegated_properties: + setattr(self._delegate, key, None) + else: + del self._data[key] + + def __eq__(self, other): + return dict(**self) == dict(**other) + + +class _Geometry: + def __init__(self, coordinates=None, type=None, geometries=None): + self.coordinates = coordinates + self.type = type + self.geometries = geometries + + +class Geometry(Object): + """A GeoJSON-like geometry + + Notes + ----- + Delegates coordinates and type properties to an instance of + _Geometry, which will become an extension class in Fiona 2.0. + + """ + + _delegated_properties = ["coordinates", "type", "geometries"] + + def __init__(self, coordinates=None, type=None, geometries=None, **data): + self._delegate = _Geometry( + coordinates=coordinates, type=type, geometries=geometries + ) + super().__init__(**data) + + def __repr__(self): + kvs = [f"{k}={_model_repr.repr(v)}" for k, v in self.items() if v is not None] + return "fiona.Geometry({})".format(", ".join(kvs)) + + @classmethod + def from_dict(cls, ob=None, **kwargs): + if ob is not None: + data = dict(getattr(ob, "__geo_interface__", ob)) + data.update(kwargs) + else: + data = kwargs + + if "geometries" in data and data["type"] == "GeometryCollection": + _ = data.pop("coordinates", None) + _ = data.pop("type", None) + return Geometry( + type="GeometryCollection", + geometries=[ + Geometry.from_dict(part) for part in data.pop("geometries") + ], + **data + ) + else: + _ = data.pop("geometries", None) + return Geometry( + type=data.pop("type", None), + coordinates=data.pop("coordinates", []), + **data + ) + + @property + def coordinates(self): + """The geometry's coordinates + + Returns + ------- + Sequence + + """ + return self._delegate.coordinates + + @property + def type(self): + """The geometry's type + + Returns + ------- + str + + """ + return self._delegate.type + + @property + def geometries(self): + """A collection's geometries. + + Returns + ------- + list + + """ + return self._delegate.geometries + + @property + def __geo_interface__(self): + return ObjectEncoder().default(self) + + +class _Feature: + def __init__(self, geometry=None, id=None, properties=None): + self.geometry = geometry + self.id = id + self.properties = properties + + +class Feature(Object): + """A GeoJSON-like feature + + Notes + ----- + Delegates geometry and properties to an instance of _Feature, which + will become an extension class in Fiona 2.0. + + """ + + _delegated_properties = ["geometry", "id", "properties"] + + def __init__(self, geometry=None, id=None, properties=None, **data): + if properties is None: + properties = Properties() + self._delegate = _Feature(geometry=geometry, id=id, properties=properties) + super().__init__(**data) + + @classmethod + def from_dict(cls, ob=None, **kwargs): + if ob is not None: + data = dict(getattr(ob, "__geo_interface__", ob)) + data.update(kwargs) + else: + data = kwargs + geom_data = data.pop("geometry", None) + + if isinstance(geom_data, Geometry): + geom = geom_data + else: + geom = Geometry.from_dict(geom_data) if geom_data is not None else None + + props_data = data.pop("properties", None) + + if isinstance(props_data, Properties): + props = props_data + else: + props = Properties(**props_data) if props_data is not None else None + + fid = data.pop("id", None) + return Feature(geometry=geom, id=fid, properties=props, **data) + + def __eq__(self, other): + return ( + self.geometry == other.geometry + and self.id == other.id + and self.properties == other.properties + ) + + @property + def geometry(self): + """The feature's geometry object + + Returns + ------- + Geometry + + """ + return self._delegate.geometry + + @property + def id(self): + """The feature's id + + Returns + ------ + object + + """ + return self._delegate.id + + @property + def properties(self): + """The feature's properties + + Returns + ------- + object + + """ + return self._delegate.properties + + @property + def type(self): + """The Feature's type + + Returns + ------- + str + + """ + return "Feature" + + @property + def __geo_interface__(self): + return ObjectEncoder().default(self) + + +class Properties(Object): + """A GeoJSON-like feature's properties""" + + def __init__(self, **kwds): + super().__init__(**kwds) + + @classmethod + def from_dict(cls, mapping=None, **kwargs): + if mapping: + return Properties(**mapping, **kwargs) + return Properties(**kwargs) + + +class ObjectEncoder(JSONEncoder): + """Encodes Geometry, Feature, and Properties.""" + + def default(self, o): + if isinstance(o, Object): + o_dict = { + k: self.default(v) + for k, v in itertools.chain(o._props().items(), o._data.items()) + } + if isinstance(o, Geometry): + if o.type == "GeometryCollection": + _ = o_dict.pop("coordinates", None) + else: + _ = o_dict.pop("geometries", None) + elif isinstance(o, Feature): + o_dict["type"] = "Feature" + return o_dict + elif isinstance(o, bytes): + return hexlify(o) + else: + return o + + +def decode_object(obj): + """A json.loads object_hook + + Parameters + ---------- + obj : dict + A decoded dict. + + Returns + ------- + Feature, Geometry, or dict + + """ + if isinstance(obj, Object): + return obj + else: + obj = obj.get("__geo_interface__", obj) + + _type = obj.get("type", None) + if (_type == "Feature") or "geometry" in obj: + return Feature.from_dict(obj) + elif _type in _GEO_TYPES.values(): + return Geometry.from_dict(obj) + else: + return obj + + +def to_dict(val): + """Converts an object to a dict""" + try: + obj = ObjectEncoder().default(val) + except TypeError: + return val + else: + return obj diff --git a/.venv/lib/python3.12/site-packages/fiona/ogrext.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/ogrext.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..04abcab6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/ogrext.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/ogrext1.pxd b/.venv/lib/python3.12/site-packages/fiona/ogrext1.pxd new file mode 100644 index 00000000..b55493b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/ogrext1.pxd @@ -0,0 +1,287 @@ +# Copyright (c) 2007, Sean C. Gillies +# All rights reserved. +# See ../LICENSE.txt + +from libc.stdio cimport FILE + + +cdef extern from "gdal.h": + ctypedef void * GDALDriverH + ctypedef void * GDALMajorObjectH + + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) + char * GDALVersionInfo (char *pszRequest) + + +cdef extern from "gdal_version.h": + int GDAL_COMPUTE_VERSION(int maj, int min, int rev) + + +cdef extern from "cpl_conv.h": + void * CPLMalloc (size_t) + void CPLFree (void *ptr) + void CPLSetThreadLocalConfigOption (char *key, char *val) + void CPLSetConfigOption (char *key, char *val) + const char *CPLGetConfigOption (char *, char *) + int CPLCheckForFile(char *, char **) + + +cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, char *name, char *value) + char ** CSLSetNameValue (char **list, char *name, char *value) + void CSLDestroy (char **list) + char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) + + +cdef extern from "sys/stat.h" nogil: + struct stat: + int st_mode + + +cdef extern from "cpl_vsi.h" nogil: + + ctypedef int vsi_l_offset + ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) + + +ctypedef int OGRErr +ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + +cdef extern from "ogr_core.h": + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTMaxType + + char * OGRGeometryTypeToName(int) + + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + +cdef extern from "ogr_srs_api.h": + + ctypedef void * OGRSpatialReferenceH + + void OSRCleanup () + OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) + int OSRFixup (OGRSpatialReferenceH srs) + int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) + int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) + int OSRImportFromEPSG (OGRSpatialReferenceH, int code) + int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) + int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) + int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) + const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) + const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) + OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) + void OSRRelease (OGRSpatialReferenceH srs) + void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) + void OCTDestroyCoordinateTransformation (void *source) + int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) + +cdef extern from "ogr_api.h": + const char * OGR_Dr_GetName (void *driver) + void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) + int OGR_Dr_DeleteDataSource (void *driver, char *) + void * OGR_Dr_Open (void *driver, const char *path, int bupdate) + int OGR_Dr_TestCapability (void *driver, const char *) + int OGR_DS_DeleteLayer (void *datasource, int n) + void * OGR_DS_CreateLayer (void *datasource, char *name, void *crs, int geomType, char **options) + void * OGR_DS_ExecuteSQL (void *datasource, char *name, void *filter, char *dialext) + void OGR_DS_Destroy (void *datasource) + void * OGR_DS_GetDriver (void *layer_defn) + void * OGR_DS_GetLayerByName (void *datasource, char *name) + int OGR_DS_GetLayerCount (void *datasource) + void * OGR_DS_GetLayer (void *datasource, int n) + void OGR_DS_ReleaseResultSet (void *datasource, void *results) + int OGR_DS_SyncToDisk (void *datasource) + int OGR_DS_TestCapability(void *datasource, char *capability) + void * OGR_F_Create (void *featuredefn) + void OGR_F_Destroy (void *feature) + long OGR_F_GetFID (void *feature) + int OGR_F_IsFieldSet (void *feature, int n) + int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) + double OGR_F_GetFieldAsDouble (void *feature, int n) + int OGR_F_GetFieldAsInteger (void *feature, int n) + char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) + int OGR_F_GetFieldCount (void *feature) + void * OGR_F_GetFieldDefnRef (void *feature, int n) + int OGR_F_GetFieldIndex (void *feature, char *name) + void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) + void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDouble (void *feature, int n, double value) + void OGR_F_SetFieldInteger (void *feature, int n, int value) + void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) + int OGR_F_SetGeometryDirectly (void *feature, void *geometry) + void * OGR_FD_Create (char *name) + int OGR_FD_GetFieldCount (void *featuredefn) + void * OGR_FD_GetFieldDefn (void *featuredefn, int n) + int OGR_FD_GetGeomType (void *featuredefn) + char * OGR_FD_GetName (void *featuredefn) + void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) + void OGR_Fld_Destroy (void *fielddefn) + char * OGR_Fld_GetNameRef (void *fielddefn) + int OGR_Fld_GetPrecision (void *fielddefn) + int OGR_Fld_GetType (void *fielddefn) + int OGR_Fld_GetWidth (void *fielddefn) + void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) + void OGR_Fld_SetPrecision (void *fielddefn, int n) + void OGR_Fld_SetWidth (void *fielddefn, int n) + OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) + void OGR_G_AddPoint (void *geometry, double x, double y, double z) + void OGR_G_AddPoint_2D (void *geometry, double x, double y) + void OGR_G_CloseRings (void *geometry) + void * OGR_G_CreateGeometry (int wkbtypecode) + void OGR_G_DestroyGeometry (void *geometry) + unsigned char * OGR_G_ExportToJson (void *geometry) + void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) + int OGR_G_GetCoordinateDimension (void *geometry) + int OGR_G_GetGeometryCount (void *geometry) + unsigned char * OGR_G_GetGeometryName (void *geometry) + int OGR_G_GetGeometryType (void *geometry) + void * OGR_G_GetGeometryRef (void *geometry, int n) + int OGR_G_GetPointCount (void *geometry) + double OGR_G_GetX (void *geometry, int n) + double OGR_G_GetY (void *geometry, int n) + double OGR_G_GetZ (void *geometry, int n) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + int OGR_G_WkbSize (void *geometry) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) + OGRErr OGR_L_CreateFeature (void *layer, void *feature) + OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) + OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) + void * OGR_L_GetFeature (void *layer, int n) + int OGR_L_GetFeatureCount (void *layer, int m) + void * OGR_L_GetLayerDefn (void *layer) + char * OGR_L_GetName (void *layer) + void * OGR_L_GetNextFeature (void *layer) + void * OGR_L_GetSpatialFilter (void *layer) + void * OGR_L_GetSpatialRef (void *layer) + void OGR_L_ResetReading (void *layer) + void OGR_L_SetSpatialFilter (void *layer, void *geometry) + void OGR_L_SetSpatialFilterRect ( + void *layer, double minx, double miny, double maxx, double maxy + ) + int OGR_L_TestCapability (void *layer, char *name) + void * OGRGetDriverByName (char *) + void * OGROpen (char *path, int mode, void *x) + void * OGROpenShared (char *path, int mode, void *x) + int OGRReleaseDataSource (void *datasource) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) + OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) diff --git a/.venv/lib/python3.12/site-packages/fiona/ogrext2.pxd b/.venv/lib/python3.12/site-packages/fiona/ogrext2.pxd new file mode 100644 index 00000000..fe1b20ad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/ogrext2.pxd @@ -0,0 +1,337 @@ +# Copyright (c) 2007, Sean C. Gillies +# All rights reserved. +# See ../LICENSE.txt + +from libc.stdio cimport FILE + + +cdef extern from "ogr_core.h": + + ctypedef int OGRErr + + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTInteger64 + OFTInteger64List + OFTMaxType + + ctypedef int OGRFieldSubType + cdef int OFSTNone = 0 + cdef int OFSTBoolean = 1 + cdef int OFSTInt16 = 2 + cdef int OFSTFloat32 = 3 + cdef int OFSTMaxSubType = 3 + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + char * OGRGeometryTypeToName(int) + + + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + char * ODsCTransactions = "Transactions" + + +cdef extern from "gdal.h": + ctypedef void * GDALDriverH + ctypedef void * GDALMajorObjectH + + char * GDALVersionInfo (char *pszRequest) + void * GDALGetDriverByName(const char * pszName) + void * GDALOpenEx(const char * pszFilename, + unsigned int nOpenFlags, + const char *const *papszAllowedDrivers, + const char *const *papszOpenOptions, + const char *const *papszSiblingFiles + ) + int GDAL_OF_UPDATE + int GDAL_OF_READONLY + int GDAL_OF_VECTOR + int GDAL_OF_VERBOSE_ERROR + int GDALDatasetGetLayerCount(void * hds) + void * GDALDatasetGetLayer(void * hDS, int iLayer) + void * GDALDatasetGetLayerByName(void * hDS, char * pszName) + void GDALClose(void * hDS) + void * GDALCreate(void * hDriver, + const char * pszFilename, + int nXSize, + int nYSize, + int nBands, + GDALDataType eBandType, + char ** papszOptions) + void * GDALDatasetCreateLayer(void * hDS, + const char * pszName, + void * hSpatialRef, + int eType, + char ** papszOptions) + int GDALDatasetDeleteLayer(void * hDS, int iLayer) + void GDALFlushCache(void * hDS) + char * GDALGetDriverShortName(void * hDriver) + char * GDALGetDatasetDriver (void * hDataset) + int GDALDeleteDataset(void * hDriver, const char * pszFilename) + OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) + OGRErr GDALDatasetCommitTransaction (void * hDataset) + OGRErr GDALDatasetRollbackTransaction (void * hDataset) + int GDALDatasetTestCapability (void * hDataset, char *) + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) + + ctypedef enum GDALDataType: + GDT_Unknown + GDT_Byte + GDT_UInt16 + GDT_Int16 + GDT_UInt32 + GDT_Int32 + GDT_Float32 + GDT_Float64 + GDT_CInt16 + GDT_CInt32 + GDT_CFloat32 + GDT_CFloat64 + GDT_TypeCount + +cdef extern from "gdal_version.h": + int GDAL_COMPUTE_VERSION(int maj, int min, int rev) + +cdef extern from "cpl_conv.h": + void * CPLMalloc (size_t) + void CPLFree (void *ptr) + void CPLSetThreadLocalConfigOption (char *key, char *val) + const char *CPLGetConfigOption (char *, char *) + int CPLCheckForFile(char *, char **) + + +cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, const char *name, const char *value) + char ** CSLSetNameValue (char **list, const char *name, const char *value) + void CSLDestroy (char **list) + char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) + + +cdef extern from "sys/stat.h" nogil: + struct stat: + int st_mode + + +cdef extern from "cpl_vsi.h" nogil: + + ctypedef int vsi_l_offset + ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) + + +cdef extern from "ogr_srs_api.h": + + ctypedef void * OGRSpatialReferenceH + + void OSRCleanup () + OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) + int OSRFixup (OGRSpatialReferenceH srs) + int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) + int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) + int OSRImportFromEPSG (OGRSpatialReferenceH, int code) + int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) + int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) + int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) + const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) + const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) + OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) + void OSRRelease (OGRSpatialReferenceH srs) + void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) + void OCTDestroyCoordinateTransformation (void *source) + int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) + +cdef extern from "ogr_api.h": + + const char * OGR_Dr_GetName (void *driver) + int OGR_Dr_TestCapability (void *driver, const char *) + void * OGR_F_Create (void *featuredefn) + void OGR_F_Destroy (void *feature) + long OGR_F_GetFID (void *feature) + int OGR_F_IsFieldSet (void *feature, int n) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) + double OGR_F_GetFieldAsDouble (void *feature, int n) + int OGR_F_GetFieldAsInteger (void *feature, int n) + char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) + int OGR_F_GetFieldCount (void *feature) + void * OGR_F_GetFieldDefnRef (void *feature, int n) + int OGR_F_GetFieldIndex (void *feature, char *name) + void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) + void OGR_F_SetFieldDouble (void *feature, int n, double value) + void OGR_F_SetFieldInteger (void *feature, int n, int value) + void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) + void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 + int OGR_F_SetGeometryDirectly (void *feature, void *geometry) + void * OGR_FD_Create (char *name) + int OGR_FD_GetFieldCount (void *featuredefn) + void * OGR_FD_GetFieldDefn (void *featuredefn, int n) + int OGR_FD_GetGeomType (void *featuredefn) + char * OGR_FD_GetName (void *featuredefn) + void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) + void OGR_Fld_Destroy (void *fielddefn) + char * OGR_Fld_GetNameRef (void *fielddefn) + int OGR_Fld_GetPrecision (void *fielddefn) + int OGR_Fld_GetType (void *fielddefn) + int OGR_Fld_GetWidth (void *fielddefn) + void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) + void OGR_Fld_SetPrecision (void *fielddefn, int n) + void OGR_Fld_SetWidth (void *fielddefn, int n) + OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) + void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) + OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) + void OGR_G_AddPoint (void *geometry, double x, double y, double z) + void OGR_G_AddPoint_2D (void *geometry, double x, double y) + void OGR_G_CloseRings (void *geometry) + void * OGR_G_CreateGeometry (int wkbtypecode) + void OGR_G_DestroyGeometry (void *geometry) + unsigned char * OGR_G_ExportToJson (void *geometry) + OGRErr OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) + int OGR_G_GetCoordinateDimension (void *geometry) + int OGR_G_GetGeometryCount (void *geometry) + unsigned char * OGR_G_GetGeometryName (void *geometry) + int OGR_G_GetGeometryType (void *geometry) + void * OGR_G_GetGeometryRef (void *geometry, int n) + int OGR_G_GetPointCount (void *geometry) + double OGR_G_GetX (void *geometry, int n) + double OGR_G_GetY (void *geometry, int n) + double OGR_G_GetZ (void *geometry, int n) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + int OGR_G_WkbSize (void *geometry) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) + OGRErr OGR_L_CreateFeature (void *layer, void *feature) + OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) + OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) + void * OGR_L_GetFeature (void *layer, int n) + int OGR_L_GetFeatureCount (void *layer, int m) + void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) + void * OGR_L_GetLayerDefn (void *layer) + char * OGR_L_GetName (void *layer) + void * OGR_L_GetNextFeature (void *layer) + void * OGR_L_GetSpatialFilter (void *layer) + void * OGR_L_GetSpatialRef (void *layer) + void OGR_L_ResetReading (void *layer) + void OGR_L_SetSpatialFilter (void *layer, void *geometry) + void OGR_L_SetSpatialFilterRect ( + void *layer, double minx, double miny, double maxx, double maxy + ) + int OGR_L_TestCapability (void *layer, char *name) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) + OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) + long long OGR_F_GetFieldAsInteger64 (void *feature, int n) + void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) diff --git a/.venv/lib/python3.12/site-packages/fiona/ogrext3.pxd b/.venv/lib/python3.12/site-packages/fiona/ogrext3.pxd new file mode 100644 index 00000000..2a445eb6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/ogrext3.pxd @@ -0,0 +1,339 @@ +# Copyright (c) 2007, Sean C. Gillies +# All rights reserved. +# See ../LICENSE.txt + +from libc.stdio cimport FILE + + +cdef extern from "ogr_core.h": + + ctypedef int OGRErr + + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTInteger64 + OFTInteger64List + OFTMaxType + + ctypedef int OGRFieldSubType + cdef int OFSTNone = 0 + cdef int OFSTBoolean = 1 + cdef int OFSTInt16 = 2 + cdef int OFSTFloat32 = 3 + cdef int OFSTMaxSubType = 3 + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + char * OGRGeometryTypeToName(int) + + + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + char * ODsCTransactions = "Transactions" + + +cdef extern from "gdal.h": + ctypedef void * GDALDriverH + ctypedef void * GDALMajorObjectH + + char * GDALVersionInfo (char *pszRequest) + void * GDALGetDriverByName(const char * pszName) + void * GDALOpenEx(const char * pszFilename, + unsigned int nOpenFlags, + const char *const *papszAllowedDrivers, + const char *const *papszOpenOptions, + const char *const *papszSiblingFiles + ) + int GDAL_OF_UPDATE + int GDAL_OF_READONLY + int GDAL_OF_VECTOR + int GDAL_OF_VERBOSE_ERROR + int GDALDatasetGetLayerCount(void * hds) + void * GDALDatasetGetLayer(void * hDS, int iLayer) + void * GDALDatasetGetLayerByName(void * hDS, char * pszName) + void GDALClose(void * hDS) + void * GDALCreate(void * hDriver, + const char * pszFilename, + int nXSize, + int nYSize, + int nBands, + GDALDataType eBandType, + char ** papszOptions) + void * GDALDatasetCreateLayer(void * hDS, + const char * pszName, + void * hSpatialRef, + int eType, + char ** papszOptions) + int GDALDatasetDeleteLayer(void * hDS, int iLayer) + void GDALFlushCache(void * hDS) + char * GDALGetDriverShortName(void * hDriver) + char * GDALGetDatasetDriver (void * hDataset) + int GDALDeleteDataset(void * hDriver, const char * pszFilename) + OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) + OGRErr GDALDatasetCommitTransaction (void * hDataset) + OGRErr GDALDatasetRollbackTransaction (void * hDataset) + int GDALDatasetTestCapability (void * hDataset, char *) + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) + + ctypedef enum GDALDataType: + GDT_Unknown + GDT_Byte + GDT_UInt16 + GDT_Int16 + GDT_UInt32 + GDT_Int32 + GDT_Float32 + GDT_Float64 + GDT_CInt16 + GDT_CInt32 + GDT_CFloat32 + GDT_CFloat64 + GDT_TypeCount + + +cdef extern from "gdal_version.h": + int GDAL_COMPUTE_VERSION(int maj, int min, int rev) + + +cdef extern from "cpl_conv.h": + void * CPLMalloc (size_t) + void CPLFree (void *ptr) + void CPLSetThreadLocalConfigOption (char *key, char *val) + const char *CPLGetConfigOption (char *, char *) + int CPLCheckForFile(char *, char **) + + +cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, const char *name, const char *value) + char ** CSLSetNameValue (char **list, const char *name, const char *value) + void CSLDestroy (char **list) + char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) + + +cdef extern from "sys/stat.h" nogil: + struct stat: + int st_mode + + +cdef extern from "cpl_vsi.h" nogil: + + ctypedef int vsi_l_offset + ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) + + +cdef extern from "ogr_srs_api.h": + + ctypedef void * OGRSpatialReferenceH + + void OSRCleanup () + OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) + int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) + int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) + int OSRImportFromEPSG (OGRSpatialReferenceH, int code) + int OSRImportFromProj4 (OGRSpatialReferenceH srs, const char *proj) + int OSRSetFromUserInput (OGRSpatialReferenceH srs, const char *input) + int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) + const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) + const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) + OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) + void OSRRelease (OGRSpatialReferenceH srs) + void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) + void OCTDestroyCoordinateTransformation (void *source) + int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) + void OSRGetPROJVersion (int *pnMajor, int *pnMinor, int *pnPatch) + + +cdef extern from "ogr_api.h": + + const char * OGR_Dr_GetName (void *driver) + int OGR_Dr_TestCapability (void *driver, const char *) + void * OGR_F_Create (void *featuredefn) + void OGR_F_Destroy (void *feature) + long OGR_F_GetFID (void *feature) + int OGR_F_IsFieldSet (void *feature, int n) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) + double OGR_F_GetFieldAsDouble (void *feature, int n) + int OGR_F_GetFieldAsInteger (void *feature, int n) + char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) + int OGR_F_GetFieldCount (void *feature) + void * OGR_F_GetFieldDefnRef (void *feature, int n) + int OGR_F_GetFieldIndex (void *feature, char *name) + void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) + void OGR_F_SetFieldDouble (void *feature, int n, double value) + void OGR_F_SetFieldInteger (void *feature, int n, int value) + void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) + void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 + int OGR_F_SetGeometryDirectly (void *feature, void *geometry) + void * OGR_FD_Create (char *name) + int OGR_FD_GetFieldCount (void *featuredefn) + void * OGR_FD_GetFieldDefn (void *featuredefn, int n) + int OGR_FD_GetGeomType (void *featuredefn) + char * OGR_FD_GetName (void *featuredefn) + void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) + void OGR_Fld_Destroy (void *fielddefn) + char * OGR_Fld_GetNameRef (void *fielddefn) + int OGR_Fld_GetPrecision (void *fielddefn) + int OGR_Fld_GetType (void *fielddefn) + int OGR_Fld_GetWidth (void *fielddefn) + void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) + void OGR_Fld_SetPrecision (void *fielddefn, int n) + void OGR_Fld_SetWidth (void *fielddefn, int n) + OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) + void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) + OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) + void OGR_G_AddPoint (void *geometry, double x, double y, double z) + void OGR_G_AddPoint_2D (void *geometry, double x, double y) + void OGR_G_CloseRings (void *geometry) + void * OGR_G_CreateGeometry (int wkbtypecode) + void OGR_G_DestroyGeometry (void *geometry) + unsigned char * OGR_G_ExportToJson (void *geometry) + OGRErr OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) + int OGR_G_GetGeometryCount (void *geometry) + unsigned char * OGR_G_GetGeometryName (void *geometry) + int OGR_G_GetGeometryType (void *geometry) + void * OGR_G_GetGeometryRef (void *geometry, int n) + int OGR_G_GetPointCount (void *geometry) + double OGR_G_GetX (void *geometry, int n) + double OGR_G_GetY (void *geometry, int n) + double OGR_G_GetZ (void *geometry, int n) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + int OGR_G_WkbSize (void *geometry) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) + OGRErr OGR_L_CreateFeature (void *layer, void *feature) + OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) + OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) + void * OGR_L_GetFeature (void *layer, int n) + int OGR_L_GetFeatureCount (void *layer, int m) + void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) + void * OGR_L_GetLayerDefn (void *layer) + char * OGR_L_GetName (void *layer) + void * OGR_L_GetNextFeature (void *layer) + void * OGR_L_GetSpatialFilter (void *layer) + void * OGR_L_GetSpatialRef (void *layer) + void OGR_L_ResetReading (void *layer) + void OGR_L_SetSpatialFilter (void *layer, void *geometry) + void OGR_L_SetSpatialFilterRect ( + void *layer, double minx, double miny, double maxx, double maxy + ) + int OGR_L_TestCapability (void *layer, char *name) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) + OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) + long long OGR_F_GetFieldAsInteger64 (void *feature, int n) + void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) diff --git a/.venv/lib/python3.12/site-packages/fiona/path.py b/.venv/lib/python3.12/site-packages/fiona/path.py new file mode 100644 index 00000000..72f25ac8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/path.py @@ -0,0 +1,17 @@ +"""Dataset paths, identifiers, and filenames + +Note well: this module is deprecated in 1.3.0 and will be removed in a +future version. +""" + +import warnings + +from fiona._path import _ParsedPath as ParsedPath +from fiona._path import _UnparsedPath as UnparsedPath +from fiona._path import _parse_path as parse_path +from fiona._path import _vsi_path as vsi_path +from fiona.errors import FionaDeprecationWarning + +warnings.warn( + "fiona.path will be removed in version 2.0.", FionaDeprecationWarning +) diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/CH b/.venv/lib/python3.12/site-packages/fiona/proj_data/CH new file mode 100644 index 00000000..725328f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/CH @@ -0,0 +1,22 @@ +# This init file provides definitions for CH1903 and CH1903/LV03 +# projections using the distortion grids developed by Swisstopo. +# See: https://shop.swisstopo.admin.ch/en/products/geo_software/GIS_info +# +# You'll need to download the grids separately and put in a directory +# scanned by libproj. +# +# Note that an independent effort was made to derive an usable grid +# from the CH1903->CH1903+ grid initially available from the Swisstopo +# website. You can read about this other effort here: +# http://lists.maptools.org/pipermail/proj/2012-February/006093.html +# It may be of interest because the latter was by some reported as being +# more accurate than the former: +# http://lists.maptools.org/pipermail/proj/2012-February/006119.html +# +# This init file uses the official one +# + +origin=Swisstopo +lastupdate=2012-02-27 +# CH1903/LV03 +<1903_LV03> +proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel +units=m +nadgrids=CHENyx06_ETRS.gsb +no_defs +# CH1903 +<1903> +proj=longlat +ellps=bessel +nadgrids=CHENyx06_ETRS.gsb +no_defs <> diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/GL27 b/.venv/lib/python3.12/site-packages/fiona/proj_data/GL27 new file mode 100644 index 00000000..73fa9754 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/GL27 @@ -0,0 +1,23 @@ +# SCCSID @(#)GL27 1.1 93/08/25 GIE REL +# Great Lakes Grids + +lastupdate=1993-08-25 + # Lake Erie, Ontario and St. Lawrence River. + proj=omerc ellps=clrk66 k_0=0.9999 + lonc=78d00'W lat_0=44d00'N alpha=55d40' + x_0=-3950000 y_0=-3430000 + no_defs <> + # Lake Huron + proj=omerc ellps=clrk66 k_0=0.9999 + lonc=82d00'W lat_0=43d00'N alpha=350d37' + x_0=1200000 y_0=-3500000 + no_defs <> + # Lake Michigan + proj=omerc ellps=clrk66 k_0=0.9999 + lonc=87d00'W lat_0=44d00'N alpha=15d00' + x_0=-1000000 y_0=-4300000 + no_defs <> + # Lake Superior, Lake of the Woods + proj=omerc ellps=clrk66 k_0=0.9999 + lonc=88d50'0.256"W lat_0=47d12'21.554"N alpha=285d41'42.593" + x_0=9000000 y_0=-1600000 + no_defs <> diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2000 b/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2000 new file mode 100644 index 00000000..439d1970 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2000 @@ -0,0 +1,24 @@ +# ITRF2000 params are in cm/year, PJ_helmert uses m/year + +version=1.0.0 +origin=ftp://itrf.ensg.ign.fr/pub/itrf/ITRF.TP +lastupdate=2017-07-25 + +# ITRF2000 -> ITRF2005 is only defined the opposite way, so we flip the sign on all +# parameters to get the opposite transformation. Parameters from http://itrf.ign.fr/ITRF_solutions/2005/tp_05-00.php + +proj=helmert +x=-0.0001 +y=0.0008 +z=0.0058 +s=-0.0004 +dx=0.0002 +dy=-0.0001 +dz=0.0018 +ds=-0.00008 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0067 +y=0.0061 +z=-0.0185 +s=0.00155 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1997.0 +convention=position_vector + + +proj=helmert +x=0.0067 +y=0.0061 +z=-0.0185 +s=0.00155 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1997.0 +convention=position_vector + + +proj=helmert +x=0.0067 +y=0.0061 +z=-0.0185 +s=0.00155 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1997.0 +convention=position_vector + + +proj=helmert +x=0.0127 +y=0.0065 +z=-0.0209 +s=0.00195 +rx=-0.00039 +ry=0.00080 +rz=-0.00114 +dx=-0.0029 +dy=-0.0002 +dz=-0.0006 +ds=0.00001 +drx=-0.00011 +dry=-0.00019 +drz=0.00007 +t_epoch=1988.0 +convention=position_vector + + +proj=helmert +x=0.0147 +y=0.0135 +z=-0.0139 +s=0.00075 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector + + +proj=helmert +x=0.0267 +y=0.0275 +z=-0.0199 +s=0.00215 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector + + +proj=helmert +x=0.0247 +y=0.0235 +z=-0.0359 +s=0.00245 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector + + +proj=helmert +x=0.0297 +y=0.0475 +z=-0.0739 +s=0.00585 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector + + +proj=helmert +x=0.0247 +y=0.0115 +z=-0.0979 +s=0.00895 +rx=0.0001 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2008 b/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2008 new file mode 100644 index 00000000..bd5f7cee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2008 @@ -0,0 +1,94 @@ +# ITRF2008 params are in mm/year, PJ_helmert uses m/year + +version=1.0.0 +origin=http://itrf.ign.fr/doc_ITRF/Transfo-ITRF2008_ITRFs.txt +lastupdate=2017-07-26 + + +proj=helmert +x=-0.002 +y=-0.0009 +z=-0.0047 +s=0.00094 +dx=0.0003 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=-0.0019 +y=-0.0017 +z=-0.0105 +s=0.00134 +dx=0.0001 +dy=0.0001 +dz=-0.0018 +ds=0.00008 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0048 +y=0.0026 +z=-0.0332 +s=0.00292 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0048 +y=0.0026 +z=-0.0332 +s=0.00292 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0048 +y=0.0026 +z=-0.0332 +s=0.00292 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=-0.024 +y=0.0024 +z=-0.00386 +s=0.00341 +rx=-0.00171 +ry=-0.00148 +rz=-0.0003 +dx=-0.0028 +dy=-0.0001 +dz=-0.0024 +ds=0.00009 +drx=-0.00011 +dry=-0.00019 +drz=0.00007 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0128 +y=0.0046 +z=-0.0412 +s=0.00221 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0248 +y=0.0186 +z=-0.0472 +s=0.00361 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0228 +y=0.0146 +z=-0.0632 +s=0.00391 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0278 +y=0.0386 +z=-0.1012 +s=0.00731 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +proj=helmert +x=0.0228 +y=0.0026 +z=-0.1252 +s=0.01041 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector + + +# ITRF2008 Plate Motion Model parameters +# +# As described in +# +# Altamimi, Z., L. Métivier, and X. Collilieux (2012), ITRF2008 plate motion model, +# J. Geophys. Res., 117, B07402, doi:10.1029/2011JB008930. + + + +proj=helmert +drx=-0.000190 +dry=-0.000442 +drz=0.000915 +convention=position_vector + + +proj=helmert +drx=-0.000252 +dry=-0.000302 +drz=0.000643 +convention=position_vector + + +proj=helmert +drx=0.001202 +dry=-0.000054 +drz=0.001485 +convention=position_vector + + +proj=helmert +drx=0.001504 +dry=0.001172 +drz=0.001228 +convention=position_vector + + +proj=helmert +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector + + +proj=helmert +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector + + +proj=helmert +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector + + +proj=helmert +drx=-0.000330 +dry=-0.001551 +drz=0.001625 +convention=position_vector + + +proj=helmert +drx=0.000035 +dry=-0.000662 +drz=-0.0001 +convention=position_vector + + +proj=helmert +drx=0.000095 +dry=-0.000598 +drz=0.000723 +convention=position_vector + + +proj=helmert +drx=-0.000411 +dry=0.001036 +drz=-0.002166 +convention=position_vector + + +proj=helmert +drx=-0.000243 +dry=-0.000311 +drz=-0.000154 +convention=position_vector + + +proj=helmert +drx=-0.000080 +dry=-0.000745 +drz=0.000897 +convention=position_vector + + +proj=helmert +drx=0.000047 +dry=-0.001 +drz=0.000975 +convention=position_vector + + +# Plate names suffixed by _T (for Translation) that includes the translation +# rates +dx=0.00041 +dy=0.00022 +dz=0.00041 given by Table 2 of the ITRF2008 plate motion model +# paper + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000190 +dry=-0.000442 +drz=0.000915 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000252 +dry=-0.000302 +drz=0.000643 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001202 +dry=-0.000054 +drz=0.001485 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001504 +dry=0.001172 +drz=0.001228 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000330 +dry=-0.001551 +drz=0.001625 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000035 +dry=-0.000662 +drz=-0.0001 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000095 +dry=-0.000598 +drz=0.000723 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000411 +dry=0.001036 +drz=-0.002166 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000243 +dry=-0.000311 +drz=-0.000154 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000080 +dry=-0.000745 +drz=0.000897 +convention=position_vector + + +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000047 +dry=-0.001 +drz=0.000975 +convention=position_vector diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2014 b/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2014 new file mode 100644 index 00000000..e16fb88c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/ITRF2014 @@ -0,0 +1,55 @@ +# ITRF2014 params are in mm/year, PJ_helmert uses m/year + +version=1.0.0 +origin=http://itrf.ign.fr/doc_ITRF/Transfo-ITRF2014_ITRFs.txt +lastupdate=2017-07-26 + + +proj=helmert +x=0.0016 +y=0.0019 +z=0.0024 +s=-0.00002 +dz=-0.0001 +ds=0.00003 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0026 +y=0.001 +z=-0.0023 +s=0.00092 +dx=0.0003 +dz=-0.0001 +ds=0.00003 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0007 +y=0.0012 +z=-0.0261 +s=0.00212 +dx=0.0001 +dy=0.0001 +dz=-0.0019 +ds=0.00011 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +d=0.0038 +rz=0.00026 +dx0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=-0.0504 +y=0.0033 +z=-0.0602 +s=0.00429 +rx=-0.00281 +ry=-0.00338 +rz=0.0004 +dx=-0.0028 +dy=-0.0001 +dz=-0.0025 +ds=0.00012 +drx=-0.00011 +dry=-0.00019 +drz=0.00007 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0154 +y=0.0015 +z=-0.0708 +s=0.00309 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0274 +y=0.0155 +z=-0.0768 +s=0.00449 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0254 +y=0.0115 +z=-0.0928 +s=0.00479 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0304 +y=0.0355 +z=-0.1308 +s=0.00819 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + + +proj=helmert +x=0.0254 +y=-0.0005 +z=-0.1548 +s=0.01129 +rx=0.0001 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector + +# ITRF2014 Plate Motion Model parameters +# +# As described in +# +# Z. Altamimi et al, 2017, ITRF2014 plate motion model, +# doi: 10.1093/gji/ggx136 + + +proj=helmert +drx=-0.000248 +dry=-0.000324 +drz=0.000675 +convention=position_vector + + +proj=helmert +drx=0.001154 +dry=-0.000136 +drz=0.001444 +convention=position_vector + + +proj=helmert +drx=0.001510 +dry=0.001182 +drz=0.001215 +convention=position_vector + + +proj=helmert +drx=-0.000085 +dry=-0.000531 +drz=0.000770 +convention=position_vector + + +proj=helmert +drx=0.001154 +dry=-0.000005 +drz=0.001454 +convention=position_vector + + +proj=helmert +drx=-0.000333 +dry=-0.001544 +drz=0.001623 +convention=position_vector + + +proj=helmert +drx=0.000024 +dry=-0.000694 +drz=-0.000063 +convention=position_vector + + +proj=helmert +drx=0.000099 +dry=-0.000614 +drz=0.000733 +convention=position_vector + + +proj=helmert +drx=-0.000409 +dry=0.001047 +drz=-0.002169 +convention=position_vector + + +proj=helmert +drx=-0.000270 +dry=-0.000301 +drz=-0.000140 +convention=position_vector + + +proj=helmert +drx=-0.000121 +dry=-0.000794 +drz=0.000884 +convention=position_vector diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/deformation_model.schema.json b/.venv/lib/python3.12/site-packages/fiona/proj_data/deformation_model.schema.json new file mode 100644 index 00000000..d7a6d162 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/deformation_model.schema.json @@ -0,0 +1,582 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "Schema for deformation models", + "type": "object", + "properties": { + "file_type": { + "type": "string", + "enum": [ + "deformation_model_master_file" + ], + "description": "File type. Always \"deformation_model_master_file\"" + }, + "format_version": { + "type": "string", + "enum": [ + "1.0" + ] + }, + "name": { + "type": "string", + "description": "A brief descriptive name of the deformation model" + }, + "version": { + "type": "string", + "description": "A string identifying the version of the deformation model. The format for specifying version will be defined by the agency responsible for the deformation model" + }, + "publication_date": { + "$ref": "#/definitions/datetime", + "description": "The date on which this version of the deformation model was published (or possibly the date on which it takes effect?)" + }, + "license": { + "type": "string", + "description": "License under which the model is published" + }, + "description": { + "type": "string", + "description": "A text description of the model" + }, + "authority": { + "type": "object", + "description": "Basic information about the agency responsible for the data set", + "properties": { + "name": { + "type": "string", + "description": "The name of the agency" + }, + "url": { + "type": "string", + "description": "The url of the agency website", + "format": "uri" + }, + "address": { + "type": "string", + "description": "The postal address of the agency" + }, + "email": { + "type": "string", + "description": "An email contact address for the agency", + "format": "email" + } + }, + "required": [ + "name" + ], + "additionalProperties": false + }, + "links": { + "type": "array", + "description": "Links to related information", + "items": { + "type": "object", + "properties": { + "href": { + "type": "string", + "description": "The URL holding the information", + "format": "uri" + }, + "rel": { + "type": "string", + "description": "The relationship to the dataset. Proposed relationships are:\n- \"about\": a web page for human consumption describing the model\n- \"source\": the authoritative source data from which the deformation model is built.\n- \"metadata\": ISO 19115 XML metadata regarding the deformation model." + }, + "type": { + "type": "string", + "description": "MIME type" + }, + "title": { + "type": "string", + "description": "Description of the link" + } + }, + "required": [ + "href" + ], + "additionalProperties": false + } + }, + "source_crs": { + "$ref": "#/definitions/crs", + "description": "The coordinate reference system to which the deformation model applies" + }, + "target_crs": { + "$ref": "#/definitions/crs", + "description": "For a time dependent coordinate transformation the coordinate reference system resulting from applying the deformation" + }, + "definition_crs": { + "$ref": "#/definitions/crs", + "description": "The coordinate reference system used to define the component spatial models. This proposal only supports using the same value for the source and definition coordinate reference system." + }, + "reference_epoch": { + "$ref": "#/definitions/datetime", + "description": "A nominal reference epoch of the deformation model. This is not necessarily used to calculate the deformation model - each component defines its own time function." + }, + "uncertainty_reference_epoch": { + "$ref": "#/definitions/datetime", + "description": "The uncertainties of the deformation model are calculated in terms of this epoch. This is described below in the Time functions section." + }, + "horizontal_offset_unit": { + "type": "string", + "enum": [ + "metre", + "degree" + ] + }, + "vertical_offset_unit": { + "type": "string", + "enum": [ + "metre" + ] + }, + "horizontal_uncertainty_type": { + "type": "string", + "enum": [ + "circular 95% confidence limit" + ] + }, + "horizontal_uncertainty_unit": { + "type": "string", + "enum": [ + "metre" + ] + }, + "vertical_uncertainty_type": { + "type": "string", + "enum": [ + "95% confidence limit" + ] + }, + "vertical_uncertainty_unit": { + "type": "string", + "enum": [ + "metre" + ] + }, + "horizontal_offset_method": { + "type": "string", + "description": "Defines how the horizontal offsets are applied to geographic coordinates", + "enum": [ + "addition", + "geocentric" + ] + }, + "extent": { + "$ref": "#/definitions/extent", + "description": "Defines the region within which the deformation model is defined. It cannot be calculated outside this region. The region is specified by a type and value. This proposal only supports using a bounding box as an array of [west,south,east,north] coordinate values" + }, + "time_extent": { + "type": "object", + "description": "Defines the range of times for which the model is valid, specified by a first and a last value. The deformation model is undefined for dates outside this range.", + "properties": { + "first": { + "$ref": "#/definitions/datetime" + }, + "last": { + "$ref": "#/definitions/datetime" + } + }, + "required": [ + "first", + "last" + ], + "additionalProperties": false + }, + "components": { + "type": "array", + "items": { + "$ref": "#/definitions/component" + } + } + }, + "required": [ + "file_type", + "format_version", + "source_crs", + "target_crs", + "definition_crs", + "extent", + "time_extent", + "components" + ], + "additionalProperties": false, + "definitions": { + "component": { + "type": "object", + "definition": "A component describes an aspect of the deformation, such as glacial isostatic adjustment, secular deformation, earthquakes, etc.", + "properties": { + "description": { + "type": "string", + "description": "A text description of this component of the model" + }, + "extent": { + "$ref": "#/definitions/extent", + "description": "The region within the component is defined. Outside this region the component evaluates to 0. The region is specified by a type and value. This proposal only supports using a bounding box as an array of [west,south,east,north] coordinate values" + }, + "displacement_type": { + "type": "string", + "description": "The displacement parameters defined by the model. The \"none\" option allows for a component which defines uncertainty with different grids to those defining displacement", + "enum": [ + "none", + "horizontal", + "vertical", + "3d" + ] + }, + "uncertainty_type": { + "type": "string", + "description": "The uncertainty parameters defined by the model", + "enum": [ + "none", + "horizontal", + "vertical", + "3d" + ] + }, + "horizontal_uncertainty": { + "type": "number", + "description": "The horizontal uncertainty to use if it is not defined explicitly in the spatial model" + }, + "vertical_uncertainty": { + "type": "number", + "description": "The vertical uncertainty to use if it is not defined explicitly in the spatial model" + }, + "spatial_model": { + "type": "object", + "description": "Defines the spatial model", + "properties": { + "type": { + "type": "string", + "description": "Specifies the type of the spatial model data file. Initially it is proposed that only GeoTIFF is supported", + "enum": [ + "GeoTIFF" + ] + }, + "interpolation_method": { + "type": "string", + "description": "Interpolation method", + "enum": [ + "bilinear", + "geocentric_bilinear" + ] + }, + "filename": { + "type": "string", + "description": "Specifies location of the spatial model GeoTIFF file relative to this JSON file" + }, + "md5_checksum": { + "type": "string", + "description": "A hex encoded MD5 checksum of the grid file that can be used to validate that it is the correct version of the file" + } + }, + "required": [ + "type", + "interpolation_method", + "filename" + ], + "additionalProperties": false + }, + "time_function": { + "$ref": "#/definitions/time_function" + } + }, + "required": [ + "description", + "extent", + "displacement_type", + "spatial_model", + "time_function" + ], + "additionalProperties": false + }, + "crs": { + "type": "string", + "pattern": "^[a-zA-Z]+:[a-zA-Z0-9]+$" + }, + "datetime": { + "type": "string", + "format": "date-time", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + }, + "extent": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "bbox" + ] + }, + "parameters": { + "type": "object", + "properties": { + "bbox": { + "type": "array", + "minItems": 4, + "maxItems": 4, + "items": { + "type": "number" + } + } + } + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + }, + "time_function": { + "description": "Function describing a multiplicative factor to apply to the spatial_model depending on the time", + "oneOf": [ + { + "$ref": "#/definitions/time_function_constant" + }, + { + "$ref": "#/definitions/time_function_velocity" + }, + { + "$ref": "#/definitions/time_function_step" + }, + { + "$ref": "#/definitions/time_function_reverse_step" + }, + { + "$ref": "#/definitions/time_function_piecewise" + }, + { + "$ref": "#/definitions/time_function_exponential" + } + ] + }, + "time_function_constant": { + "description": "The valuation of this function is 1 at any epoch", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "constant" + ] + }, + "parameters": { + "type": "object", + "properties": { + }, + "additionalProperties": false + } + }, + "required": [ + "type" + ], + "additionalProperties": false + }, + "time_function_velocity": { + "description": "The valuation of this function is 0 at reference_epoch, and proportional to the time difference to it at other times", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "velocity" + ] + }, + "parameters": { + "type": "object", + "properties": { + "reference_epoch": { + "$ref": "#/definitions/datetime" + } + }, + "required": [ + "reference_epoch" + ], + "additionalProperties": false + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + }, + "time_function_step": { + "description": "The valuation of this function is 0 before step_epoch, and 1 starting from it", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "step" + ] + }, + "parameters": { + "type": "object", + "properties": { + "step_epoch": { + "$ref": "#/definitions/datetime" + } + }, + "required": [ + "step_epoch" + ], + "additionalProperties": false + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + }, + "time_function_reverse_step": { + "description": "The valuation of this function is 1 before step_epoch, and 0 starting from it", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "reverse_step" + ] + }, + "parameters": { + "type": "object", + "properties": { + "step_epoch": { + "$ref": "#/definitions/datetime" + } + }, + "required": [ + "step_epoch" + ], + "additionalProperties": false + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + }, + "time_function_piecewise": { + "description": "Piecewise time function", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "piecewise" + ] + }, + "parameters": { + "type": "object", + "properties": { + "before_first": { + "type": "string", + "description": "Defines the behaviour of the function before the first defined epoch", + "enum": [ + "zero", + "constant", + "linear" + ] + }, + "after_last": { + "type": "string", + "description": "Defines the behaviour of the function after the last defined epoch", + "enum": [ + "zero", + "constant", + "linear" + ] + }, + "model": { + "type": "array", + "description": "A sorted array data points each defined by two elements, \"epoch\" defines the date/time of the data point, and \"scale_factor\" is the corresponding function value. The array is sorted in order of increasing epoch. Note: where the time function includes a step it is represented by two consecutive data points with the same epoch. The first defines the scale factor that applies before the epoch and the second the scale factor that applies after the epoch", + "items": { + "type": "object", + "properties": { + "epoch": { + "$ref": "#/definitions/datetime" + }, + "scale_factor": { + "type": "number" + } + }, + "required": [ + "epoch", + "scale_factor" + ], + "additionalProperties": false + }, + "minItems": 2 + } + }, + "required": [ + "before_first", + "after_last", + "model" + ], + "additionalProperties": false + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + }, + "time_function_exponential": { + "description": "The valuation of this function is an exponential function with a time-based relaxation constant", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "exponential" + ] + }, + "parameters": { + "type": "object", + "properties": { + "reference_epoch": { + "$ref": "#/definitions/datetime", + "description": "The date/time at which the exponential decay starts" + }, + "end_epoch": { + "$ref": "#/definitions/datetime", + "description": "The date/time at which the exponential decay ends (optional)" + }, + "relaxation_constant": { + "type": "number", + "description": "Relaxation constant in years" + }, + "before_scale_factor": { + "type": "number", + "description": "The scale factor that applies before the reference epoch" + }, + "initial_scale_factor": { + "type": "number", + "description": "The initial scale factor" + }, + "final_scale_factor": { + "type": "number", + "description": "The scale factor the exponential function approaches" + } + }, + "required": [ + "reference_epoch", + "relaxation_constant", + "before_scale_factor", + "initial_scale_factor", + "final_scale_factor" + ], + "additionalProperties": false + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + } + } +} \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/nad.lst b/.venv/lib/python3.12/site-packages/fiona/proj_data/nad.lst new file mode 100644 index 00000000..cc427722 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/nad.lst @@ -0,0 +1,142 @@ + Listing of State Plane North American Datum Zones + + NGS zone number + State and zone 1927 1983 + +Alabama east .................. 101 101 +Alabama west .................. 102 102 +Alaska zone no. 1 ............. 5001 5001 +Alaska zone no. 2 ............. 5002 5002 +Alaska zone no. 3 ............. 5003 5003 +Alaska zone no. 4 ............. 5004 5004 +Alaska zone no. 5 ............. 5005 5005 +Alaska zone no. 6 ............. 5006 5006 +Alaska zone no. 7 ............. 5007 5007 +Alaska zone no. 8 ............. 5008 5008 +Alaska zone no. 9 ............. 5009 5009 +Alaska zone no. 10 ............ 5010 5010 +American Samoa ................ 5300 +Arizona central ............... 202 202 +Arizona east .................. 201 201 +Arizona west .................. 203 203 +Arkansas north ................ 301 301 +Arkansas south ................ 302 302 +California I .................. 401 401 +California II ................. 402 402 +California III ................ 403 403 +California IV ................. 404 404 +California V .................. 405 405 +California VI ................. 406 406 +California VII ................ 407 +Colorado central .............. 502 502 +Colorado north ................ 501 501 +Colorado south ................ 503 503 +Connecticut ................... 600 600 +Delaware ...................... 700 700 +Florida east .................. 901 901 +Florida north ................. 903 903 +Florida west .................. 902 902 +Georgia east .................. 1001 1001 +Georgia west .................. 1002 1002 +Guam Island ................... 5400 +Hawaii 1 ...................... 5101 5101 +Hawaii 2 ...................... 5102 5102 +Hawaii 3 ...................... 5103 5103 +Hawaii 4 ...................... 5104 5104 +Hawaii 5 ...................... 5105 5105 +Idaho central ................. 1102 1102 +Idaho east .................... 1101 1101 +Idaho west .................... 1103 1103 +Illinois east ................. 1201 1201 +Illinois west ................. 1202 1202 +Indiana east .................. 1301 1301 +Indiana west .................. 1302 1302 +Iowa north .................... 1401 1401 +Iowa south .................... 1402 1402 +Kansas north .................. 1501 1501 +Kansas south .................. 1502 1502 +Kentucky north ................ 1601 1601 +Kentucky south ................ 1602 1602 +Louisiana north ............... 1701 1701 +Louisiana offshore ............ 1703 1703 +Louisiana south ............... 1702 1702 +Maine east .................... 1801 1801 +Maine west .................... 1802 1802 +Maryland ...................... 1900 1900 +Massachusetts island .......... 2002 2002 +Massachusetts mainland ........ 2001 2001 +Michigan central/l ............ 2112 2112 current +Michigan central/m ............ 2102 old +Michigan east ................. 2101 old +Michigan north ................ 2111 2111 current +Michigan south ................ 2113 2113 current +Michigan west ................. 2103 old +Minnesota central ............. 2202 2202 +Minnesota north ............... 2201 2201 +Minnesota south ............... 2203 2203 +Mississippi east .............. 2301 2301 +Mississippi west .............. 2302 2302 +Missouri central .............. 2402 2402 +Missouri east ................. 2401 2401 +Missouri west ................. 2403 2403 +Montana ....................... 2500 +Montana central ............... 2502 +Montana north ................. 2501 +Montana south ................. 2503 +Nebraska ...................... 2600 +Nebraska north ................ 2601 +Nebraska south ................ 2602 +Nevada central ................ 2702 2702 +Nevada east ................... 2701 2701 +Nevada west ................... 2703 2703 +New hampshire ................. 2800 2800 +New jersey .................... 2900 2900 +New mexico central ............ 3002 3002 +New mexico east ............... 3001 3001 +New mexico west ............... 3003 3003 +New york central .............. 3102 3102 +New york east ................. 3101 3101 +New york long island .......... 3104 3104 +New york west ................. 3103 3103 +North carolina ................ 3200 3200 +North dakota north ............ 3301 3301 +North dakota south ............ 3302 3302 +Ohio north .................... 3401 3401 +Ohio south .................... 3402 3402 +Oklahoma north ................ 3501 3501 +Oklahoma south ................ 3502 3502 +Oregon north .................. 3601 3601 +Oregon south .................. 3602 3602 +Pennsylvania north ............ 3701 3701 +Pennsylvania south ............ 3702 3702 +Puerto Rico, Virgin Islands ... 5201 5200 +Rhode Island .................. 3800 3800 +South Carolina ................ 3900 +South Carolina north .......... 3901 +South Carolina south .......... 3902 +South Dakota north ............ 4001 4001 +South Dakota south ............ 4002 4002 +Tennessee ..................... 4100 4100 +Texas central ................. 4203 4203 +Texas north ................... 4201 4201 +Texas north central ........... 4202 4202 +Texas south ................... 4205 4205 +Texas south central ........... 4204 4204 +Utah central .................. 4302 4302 +Utah north .................... 4301 4301 +Utah south .................... 4303 4303 +Vermont ....................... 4400 4400 +Virgin Islands, St. Croix ..... 5202 +Virginia north ................ 4501 4501 +Virginia south ................ 4502 4502 +Washington north .............. 4601 4601 +Washington south .............. 4602 4602 +West Virginia north ........... 4701 4701 +West Virginia south ........... 4702 4702 +Wisconsin central ............. 4802 4802 +Wisconsin north ............... 4801 4801 +Wisconsin south ............... 4803 4803 +Wyoming east .................. 4901 4901 +Wyoming east central .......... 4902 4902 +Wyoming west .................. 4904 4904 +Wyoming west central .......... 4903 4903 diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/nad27 b/.venv/lib/python3.12/site-packages/fiona/proj_data/nad27 new file mode 100644 index 00000000..c5e43962 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/nad27 @@ -0,0 +1,810 @@ +# SCCSID @(#)nad27 4.1 92/12/20 GIE +# proj +init files for: +# +# State Plane Coordinate Systems, +# North American Datum 1927 + + +lastupdate=1992-12-20 +# 101: alabama east: nad27 +<101> proj=tmerc datum=NAD27 +lon_0=-85d50 lat_0=30d30 k=.99996 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 102: alabama west: nad27 +<102> proj=tmerc datum=NAD27 +lon_0=-87d30 lat_0=30 k=.9999333333333333 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5010: alaska zone no. 10: nad27 +<5010> proj=lcc datum=NAD27 +lon_0=-176 lat_1=53d50 lat_2=51d50 lat_0=51 +x_0=914401.8288036576 y_0=0 +no_defs <> + +# 5300: american samoa: nad27 +<5300> proj=lcc datum=NAD27 +lon_0=-170 lat_1=-14d16 lat_2=-14d16 lat_0=-14d16 +x_0=152400.3048006096 y_0=95169.31165862332 +no_defs <> + +# 201: arizona east: nad27 +<201> proj=tmerc datum=NAD27 +lon_0=-110d10 lat_0=31 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 202: arizona central: nad27 +<202> proj=tmerc datum=NAD27 +lon_0=-111d55 lat_0=31 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 203: arizona west: nad27 +<203> proj=tmerc datum=NAD27 +lon_0=-113d45 lat_0=31 k=.9999333333333333 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 301: arkansas north: nad27 +<301> proj=lcc datum=NAD27 +lon_0=-92 lat_1=36d14 lat_2=34d56 lat_0=34d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 302: arkansas south: nad27 +<302> proj=lcc datum=NAD27 +lon_0=-92 lat_1=34d46 lat_2=33d18 lat_0=32d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 401: california i: nad27 +<401> proj=lcc datum=NAD27 +lon_0=-122 lat_1=41d40 lat_2=40 lat_0=39d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 402: california ii: nad27 +<402> proj=lcc datum=NAD27 +lon_0=-122 lat_1=39d50 lat_2=38d20 lat_0=37d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 403: california iii: nad27 +<403> proj=lcc datum=NAD27 +lon_0=-120d30 lat_1=38d26 lat_2=37d4 lat_0=36d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 404: california iv: nad27 +<404> proj=lcc datum=NAD27 +lon_0=-119 lat_1=37d15 lat_2=36 lat_0=35d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 405: california v: nad27 +<405> proj=lcc datum=NAD27 +lon_0=-118 lat_1=35d28 lat_2=34d2 lat_0=33d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 406: california vi: nad27 +<406> proj=lcc datum=NAD27 +lon_0=-116d15 lat_1=33d53 lat_2=32d47 lat_0=32d10 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 407: california vii: nad27 +<407> proj=lcc datum=NAD27 +lon_0=-118d20 lat_1=34d25 lat_2=33d52 lat_0=34d8 +x_0=1276106.450596901 y_0=1268253.006858014 +no_defs <> + +# 501: colorado north: nad27 +<501> proj=lcc datum=NAD27 +lon_0=-105d30 lat_1=40d47 lat_2=39d43 lat_0=39d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 502: colorado central: nad27 +<502> proj=lcc datum=NAD27 +lon_0=-105d30 lat_1=39d45 lat_2=38d27 lat_0=37d50 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 503: colorado south: nad27 +<503> proj=lcc datum=NAD27 +lon_0=-105d30 lat_1=38d26 lat_2=37d14 lat_0=36d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 600: connecticut ---: nad27 +<600> proj=lcc datum=NAD27 +lon_0=-72d45 lat_1=41d52 lat_2=41d12 lat_0=40d50 +x_0=182880.3657607315 y_0=0 +no_defs <> + +# 700: delaware ---: nad27 +<700> proj=tmerc datum=NAD27 +lon_0=-75d25 lat_0=38 k=.999995 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 901: florida east: nad27 +<901> proj=tmerc datum=NAD27 +lon_0=-81 lat_0=24d20 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 902: florida west: nad27 +<902> proj=tmerc datum=NAD27 +lon_0=-82 lat_0=24d20 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 903: florida north: nad27 +<903> proj=lcc datum=NAD27 +lon_0=-84d30 lat_1=30d45 lat_2=29d35 lat_0=29 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1001: georgia east: nad27 +<1001> proj=tmerc datum=NAD27 +lon_0=-82d10 lat_0=30 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1002: georgia west: nad27 +<1002> proj=tmerc datum=NAD27 +lon_0=-84d10 lat_0=30 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5101: hawaii 1: nad27 +<5101> proj=tmerc datum=NAD27 +lon_0=-155d30 lat_0=18d50 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5102: hawaii 2: nad27 +<5102> proj=tmerc datum=NAD27 +lon_0=-156d40 lat_0=20d20 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5103: hawaii 3: nad27 +<5103> proj=tmerc datum=NAD27 +lon_0=-158 lat_0=21d10 k=.99999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5104: hawaii 4: nad27 +<5104> proj=tmerc datum=NAD27 +lon_0=-159d30 lat_0=21d50 k=.99999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5105: hawaii 5: nad27 +<5105> proj=tmerc datum=NAD27 +lon_0=-160d10 lat_0=21d40 k=1 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1101: idaho east: nad27 +<1101> proj=tmerc datum=NAD27 +lon_0=-112d10 lat_0=41d40 k=.9999473684210526 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1102: idaho central: nad27 +<1102> proj=tmerc datum=NAD27 +lon_0=-114 lat_0=41d40 k=.9999473684210526 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1103: idaho west: nad27 +<1103> proj=tmerc datum=NAD27 +lon_0=-115d45 lat_0=41d40 k=.9999333333333333 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1201: illinois east: nad27 +<1201> proj=tmerc datum=NAD27 +lon_0=-88d20 lat_0=36d40 k=.999975 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1202: illinois west: nad27 +<1202> proj=tmerc datum=NAD27 +lon_0=-90d10 lat_0=36d40 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1301: indiana east: nad27 +<1301> proj=tmerc datum=NAD27 +lon_0=-85d40 lat_0=37d30 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1302: indiana west: nad27 +<1302> proj=tmerc datum=NAD27 +lon_0=-87d5 lat_0=37d30 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1401: iowa north: nad27 +<1401> proj=lcc datum=NAD27 +lon_0=-93d30 lat_1=43d16 lat_2=42d4 lat_0=41d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1402: iowa south: nad27 +<1402> proj=lcc datum=NAD27 +lon_0=-93d30 lat_1=41d47 lat_2=40d37 lat_0=40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1501: kansas north: nad27 +<1501> proj=lcc datum=NAD27 +lon_0=-98 lat_1=39d47 lat_2=38d43 lat_0=38d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1502: kansas south: nad27 +<1502> proj=lcc datum=NAD27 +lon_0=-98d30 lat_1=38d34 lat_2=37d16 lat_0=36d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1601: kentucky north: nad27 +<1601> proj=lcc datum=NAD27 +lon_0=-84d15 lat_1=38d58 lat_2=37d58 lat_0=37d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1602: kentucky south: nad27 +<1602> proj=lcc datum=NAD27 +lon_0=-85d45 lat_1=37d56 lat_2=36d44 lat_0=36d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1701: louisiana north: nad27 +<1701> proj=lcc datum=NAD27 +lon_0=-92d30 lat_1=32d40 lat_2=31d10 lat_0=30d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1702: louisiana south: nad27 +<1702> proj=lcc datum=NAD27 +lon_0=-91d20 lat_1=30d42 lat_2=29d18 lat_0=28d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1703: louisiana offshore: nad27 +<1703> proj=lcc datum=NAD27 +lon_0=-91d20 lat_1=27d50 lat_2=26d10 lat_0=25d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 1801: maine east: nad27 +<1801> proj=tmerc datum=NAD27 +lon_0=-68d30 lat_0=43d50 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1802: maine west: nad27 +<1802> proj=tmerc datum=NAD27 +lon_0=-70d10 lat_0=42d50 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 1900: maryland ---: nad27 +<1900> proj=lcc datum=NAD27 +lon_0=-77 lat_1=39d27 lat_2=38d18 lat_0=37d50 +x_0=243840.4876809754 y_0=0 +no_defs <> + +# 2001: massachusetts mainland: nad27 +<2001> proj=lcc datum=NAD27 +lon_0=-71d30 lat_1=42d41 lat_2=41d43 lat_0=41 +x_0=182880.3657607315 y_0=0 +no_defs <> + +# 2002: massachusetts island: nad27 +<2002> proj=lcc datum=NAD27 +lon_0=-70d30 lat_1=41d29 lat_2=41d17 lat_0=41 +x_0=60960.12192024384 y_0=0 +no_defs <> + +# 2101: michigan east: nad27 +<2101> proj=tmerc datum=NAD27 +lon_0=-83d40 lat_0=41d30 k=.9999428571428571 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2102: michigan central/m: nad27 +<2102> proj=tmerc datum=NAD27 +lon_0=-85d45 lat_0=41d30 k=.9999090909090909 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2103: michigan west: nad27 +<2103> proj=tmerc datum=NAD27 +lon_0=-88d45 lat_0=41d30 k=.9999090909090909 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2111: michigan north: nad27 +<2111> proj=lcc a=6378450.047 es=.006768657997291094 +lon_0=-87 lat_1=47d5 lat_2=45d29 lat_0=44d47 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2112: michigan central/l: nad27 +<2112> proj=lcc a=6378450.047 es=.006768657997291094 +lon_0=-84d20 lat_1=45d42 lat_2=44d11 lat_0=43d19 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2113: michigan south: nad27 +<2113> proj=lcc a=6378450.047 es=.006768657997291094 +lon_0=-84d20 lat_1=43d40 lat_2=42d6 lat_0=41d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2201: minnesota north: nad27 +<2201> proj=lcc datum=NAD27 +lon_0=-93d6 lat_1=48d38 lat_2=47d2 lat_0=46d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2202: minnesota central: nad27 +<2202> proj=lcc datum=NAD27 +lon_0=-94d15 lat_1=47d3 lat_2=45d37 lat_0=45 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2203: minnesota south: nad27 +<2203> proj=lcc datum=NAD27 +lon_0=-94 lat_1=45d13 lat_2=43d47 lat_0=43 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2301: mississippi east: nad27 +<2301> proj=tmerc datum=NAD27 +lon_0=-88d50 lat_0=29d40 k=.99996 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2302: mississippi west: nad27 +<2302> proj=tmerc datum=NAD27 +lon_0=-90d20 lat_0=30d30 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2401: missouri east: nad27 +<2401> proj=tmerc datum=NAD27 +lon_0=-90d30 lat_0=35d50 k=.9999333333333333 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2402: missouri central: nad27 +<2402> proj=tmerc datum=NAD27 +lon_0=-92d30 lat_0=35d50 k=.9999333333333333 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2403: missouri west: nad27 +<2403> proj=tmerc datum=NAD27 +lon_0=-94d30 lat_0=36d10 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2501: montana north: nad27 +<2501> proj=lcc datum=NAD27 +lon_0=-109d30 lat_1=48d43 lat_2=47d51 lat_0=47 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2502: montana central: nad27 +<2502> proj=lcc datum=NAD27 +lon_0=-109d30 lat_1=47d53 lat_2=46d27 lat_0=45d50 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2503: montana south: nad27 +<2503> proj=lcc datum=NAD27 +lon_0=-109d30 lat_1=46d24 lat_2=44d52 lat_0=44 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2601: nebraska north: nad27 +<2601> proj=lcc datum=NAD27 +lon_0=-100 lat_1=42d49 lat_2=41d51 lat_0=41d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2602: nebraska south: nad27 +<2602> proj=lcc datum=NAD27 +lon_0=-99d30 lat_1=41d43 lat_2=40d17 lat_0=39d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 2701: nevada east: nad27 +<2701> proj=tmerc datum=NAD27 +lon_0=-115d35 lat_0=34d45 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2702: nevada central: nad27 +<2702> proj=tmerc datum=NAD27 +lon_0=-116d40 lat_0=34d45 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2703: nevada west: nad27 +<2703> proj=tmerc datum=NAD27 +lon_0=-118d35 lat_0=34d45 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2800: new hampshire ---: nad27 +<2800> proj=tmerc datum=NAD27 +lon_0=-71d40 lat_0=42d30 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 2900: new jersey ---: nad27 +<2900> proj=tmerc datum=NAD27 +lon_0=-74d40 lat_0=38d50 k=.999975 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3001: new mexico east: nad27 +<3001> proj=tmerc datum=NAD27 +lon_0=-104d20 lat_0=31 k=.9999090909090909 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3002: new mexico central: nad27 +<3002> proj=tmerc datum=NAD27 +lon_0=-106d15 lat_0=31 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3003: new mexico west: nad27 +<3003> proj=tmerc datum=NAD27 +lon_0=-107d50 lat_0=31 k=.9999166666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3101: new york east: nad27 +<3101> proj=tmerc datum=NAD27 +lon_0=-74d20 lat_0=40 k=.9999666666666667 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3102: new york central: nad27 +<3102> proj=tmerc datum=NAD27 +lon_0=-76d35 lat_0=40 k=.9999375 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3103: new york west: nad27 +<3103> proj=tmerc datum=NAD27 +lon_0=-78d35 lat_0=40 k=.9999375 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3104: new york long island: nad27 +<3104> proj=lcc datum=NAD27 +lon_0=-74 lat_1=41d2 lat_2=40d40 lat_0=40d30 +x_0=609601.2192024384 y_0=30480.06096012192 +no_defs <> + +# 3200: north carolina ---: nad27 +<3200> proj=lcc datum=NAD27 +lon_0=-79 lat_1=36d10 lat_2=34d20 lat_0=33d45 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3301: north dakota north: nad27 +<3301> proj=lcc datum=NAD27 +lon_0=-100d30 lat_1=48d44 lat_2=47d26 lat_0=47 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3302: north dakota south: nad27 +<3302> proj=lcc datum=NAD27 +lon_0=-100d30 lat_1=47d29 lat_2=46d11 lat_0=45d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3401: ohio north: nad27 +<3401> proj=lcc datum=NAD27 +lon_0=-82d30 lat_1=41d42 lat_2=40d26 lat_0=39d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3402: ohio south: nad27 +<3402> proj=lcc datum=NAD27 +lon_0=-82d30 lat_1=40d2 lat_2=38d44 lat_0=38 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3501: oklahoma north: nad27 +<3501> proj=lcc datum=NAD27 +lon_0=-98 lat_1=36d46 lat_2=35d34 lat_0=35 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3502: oklahoma south: nad27 +<3502> proj=lcc datum=NAD27 +lon_0=-98 lat_1=35d14 lat_2=33d56 lat_0=33d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3601: oregon north: nad27 +<3601> proj=lcc datum=NAD27 +lon_0=-120d30 lat_1=46 lat_2=44d20 lat_0=43d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3602: oregon south: nad27 +<3602> proj=lcc datum=NAD27 +lon_0=-120d30 lat_1=44 lat_2=42d20 lat_0=41d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3701: pennsylvania north: nad27 +<3701> proj=lcc datum=NAD27 +lon_0=-77d45 lat_1=41d57 lat_2=40d53 lat_0=40d10 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3702: pennsylvania south: nad27 +<3702> proj=lcc datum=NAD27 +lon_0=-77d45 lat_1=40d58 lat_2=39d56 lat_0=39d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3800: rhode island ---: nad27 +<3800> proj=tmerc datum=NAD27 +lon_0=-71d30 lat_0=41d5 k=.99999375 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 3901: south carolina north: nad27 +<3901> proj=lcc datum=NAD27 +lon_0=-81 lat_1=34d58 lat_2=33d46 lat_0=33 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 3902: south carolina south: nad27 +<3902> proj=lcc datum=NAD27 +lon_0=-81 lat_1=33d40 lat_2=32d20 lat_0=31d50 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4001: south dakota north: nad27 +<4001> proj=lcc datum=NAD27 +lon_0=-100 lat_1=45d41 lat_2=44d25 lat_0=43d50 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4002: south dakota south: nad27 +<4002> proj=lcc datum=NAD27 +lon_0=-100d20 lat_1=44d24 lat_2=42d50 lat_0=42d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4100: tennessee ---: nad27 +<4100> proj=lcc datum=NAD27 +lon_0=-86 lat_1=36d25 lat_2=35d15 lat_0=34d40 +x_0=609601.2192024384 y_0=30480.06096012192 +no_defs <> + +# 4201: texas north: nad27 +<4201> proj=lcc datum=NAD27 +lon_0=-101d30 lat_1=36d11 lat_2=34d39 lat_0=34 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4202: texas north central: nad27 +<4202> proj=lcc datum=NAD27 +lon_0=-97d30 lat_1=33d58 lat_2=32d8 lat_0=31d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4203: texas central: nad27 +<4203> proj=lcc datum=NAD27 +lon_0=-100d20 lat_1=31d53 lat_2=30d7 lat_0=29d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4204: texas south central: nad27 +<4204> proj=lcc datum=NAD27 +lon_0=-99 lat_1=30d17 lat_2=28d23 lat_0=27d50 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4205: texas south: nad27 +<4205> proj=lcc datum=NAD27 +lon_0=-98d30 lat_1=27d50 lat_2=26d10 lat_0=25d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4301: utah north: nad27 +<4301> proj=lcc datum=NAD27 +lon_0=-111d30 lat_1=41d47 lat_2=40d43 lat_0=40d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4302: utah central: nad27 +<4302> proj=lcc datum=NAD27 +lon_0=-111d30 lat_1=40d39 lat_2=39d1 lat_0=38d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4303: utah south: nad27 +<4303> proj=lcc datum=NAD27 +lon_0=-111d30 lat_1=38d21 lat_2=37d13 lat_0=36d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4400: vermont ---: nad27 +<4400> proj=tmerc datum=NAD27 +lon_0=-72d30 lat_0=42d30 k=.9999642857142857 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 4501: virginia north: nad27 +<4501> proj=lcc datum=NAD27 +lon_0=-78d30 lat_1=39d12 lat_2=38d2 lat_0=37d40 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4502: virginia south: nad27 +<4502> proj=lcc datum=NAD27 +lon_0=-78d30 lat_1=37d58 lat_2=36d46 lat_0=36d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4601: washington north: nad27 +<4601> proj=lcc datum=NAD27 +lon_0=-120d50 lat_1=48d44 lat_2=47d30 lat_0=47 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4602: washington south: nad27 +<4602> proj=lcc datum=NAD27 +lon_0=-120d30 lat_1=47d20 lat_2=45d50 lat_0=45d20 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4701: west virginia north: nad27 +<4701> proj=lcc datum=NAD27 +lon_0=-79d30 lat_1=40d15 lat_2=39 lat_0=38d30 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4702: west virginia south: nad27 +<4702> proj=lcc datum=NAD27 +lon_0=-81 lat_1=38d53 lat_2=37d29 lat_0=37 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4801: wisconsin north: nad27 +<4801> proj=lcc datum=NAD27 +lon_0=-90 lat_1=46d46 lat_2=45d34 lat_0=45d10 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4802: wisconsin central: nad27 +<4802> proj=lcc datum=NAD27 +lon_0=-90 lat_1=45d30 lat_2=44d15 lat_0=43d50 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4803: wisconsin south: nad27 +<4803> proj=lcc datum=NAD27 +lon_0=-90 lat_1=44d4 lat_2=42d44 lat_0=42 +x_0=609601.2192024384 y_0=0 +no_defs <> + +# 4901: wyoming east: nad27 +<4901> proj=tmerc datum=NAD27 +lon_0=-105d10 lat_0=40d40 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 4902: wyoming east central: nad27 +<4902> proj=tmerc datum=NAD27 +lon_0=-107d20 lat_0=40d40 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 4903: wyoming west central: nad27 +<4903> proj=tmerc datum=NAD27 +lon_0=-108d45 lat_0=40d40 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 4904: wyoming west: nad27 +<4904> proj=tmerc datum=NAD27 +lon_0=-110d5 lat_0=40d40 k=.9999411764705882 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5001: alaska zone no. 1: nad27 +<5001> proj=omerc datum=NAD27 +k=.9999 lonc=-133d40 lat_0=57 alpha=-36d52'11.6315 +x_0=818585.5672270928 y_0=575219.2451072642 +no_defs <> + +# 5002: alaska zone no. 2: nad27 +<5002> proj=tmerc datum=NAD27 +lon_0=-142 lat_0=54 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5003: alaska zone no. 3: nad27 +<5003> proj=tmerc datum=NAD27 +lon_0=-146 lat_0=54 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5004: alaska zone no. 4: nad27 +<5004> proj=tmerc datum=NAD27 +lon_0=-150 lat_0=54 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5005: alaska zone no. 5: nad27 +<5005> proj=tmerc datum=NAD27 +lon_0=-154 lat_0=54 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5006: alaska zone no. 6: nad27 +<5006> proj=tmerc datum=NAD27 +lon_0=-158 lat_0=54 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5007: alaska zone no. 7: nad27 +<5007> proj=tmerc datum=NAD27 +lon_0=-162 lat_0=54 k=.9999 +x_0=213360.4267208534 y_0=0 +no_defs <> + +# 5008: alaska zone no. 8: nad27 +<5008> proj=tmerc datum=NAD27 +lon_0=-166 lat_0=54 k=.9999 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5009: alaska zone no. 9: nad27 +<5009> proj=tmerc datum=NAD27 +lon_0=-170 lat_0=54 k=.9999 +x_0=182880.3657607315 y_0=0 +no_defs <> + +# 5201: puerto rico and virgin islands: nad27 +<5201> proj=lcc datum=NAD27 +lon_0=-66d26 lat_1=18d26 lat_2=18d2 lat_0=17d50 +x_0=152400.3048006096 y_0=0 +no_defs <> + +# 5202: virgin islands st. croix: nad27 +<5202> proj=lcc datum=NAD27 +lon_0=-66d26 lat_1=18d26 lat_2=18d2 lat_0=17d50 +x_0=152400.3048006096 y_0=30480.06096012192 +no_defs <> + +# 5400: guam island: nad27 +<5400> proj=poly datum=NAD27 +x_0=50000 y_0=50000 lon_0=144d44'55.50254 lat_0=13d28'20.87887 +no_defs <> + diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/nad83 b/.venv/lib/python3.12/site-packages/fiona/proj_data/nad83 new file mode 100644 index 00000000..1b65f519 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/nad83 @@ -0,0 +1,745 @@ +# SCCSID @(#)nad83 4.1 92/12/20 GIE +# proj +init files for: +# +# State Plane Coordinate Systems, +# North American Datum 1983 + + +lastupdate=1992-12-20 +# 101: alabama east: nad83 +<101> proj=tmerc datum=NAD83 +lon_0=-85d50 lat_0=30d30 k=.99996 +x_0=200000 y_0=0 +no_defs <> + +# 102: alabama west: nad83 +<102> proj=tmerc datum=NAD83 +lon_0=-87d30 lat_0=30 k=.9999333333333333 +x_0=600000 y_0=0 +no_defs <> + +# 5010: alaska zone no. 10: nad83 +<5010> proj=lcc datum=NAD83 +lon_0=-176 lat_1=53d50 lat_2=51d50 lat_0=51 +x_0=1000000 y_0=0 +no_defs <> + +# 201: arizona east: nad83 +<201> proj=tmerc datum=NAD83 +lon_0=-110d10 lat_0=31 k=.9999 +x_0=213360 y_0=0 +no_defs <> + +# 202: arizona central: nad83 +<202> proj=tmerc datum=NAD83 +lon_0=-111d55 lat_0=31 k=.9999 +x_0=213360 y_0=0 +no_defs <> + +# 203: arizona west: nad83 +<203> proj=tmerc datum=NAD83 +lon_0=-113d45 lat_0=31 k=.9999333333333333 +x_0=213360 y_0=0 +no_defs <> + +# 301: arkansas north: nad83 +<301> proj=lcc datum=NAD83 +lon_0=-92 lat_1=36d14 lat_2=34d56 lat_0=34d20 +x_0=400000 y_0=0 +no_defs <> + +# 302: arkansas south: nad83 +<302> proj=lcc datum=NAD83 +lon_0=-92 lat_1=34d46 lat_2=33d18 lat_0=32d40 +x_0=400000 y_0=400000 +no_defs <> + +# 401: california i: nad83 +<401> proj=lcc datum=NAD83 +lon_0=-122 lat_1=41d40 lat_2=40 lat_0=39d20 +x_0=2000000 y_0=500000 +no_defs <> + +# 402: california ii: nad83 +<402> proj=lcc datum=NAD83 +lon_0=-122 lat_1=39d50 lat_2=38d20 lat_0=37d40 +x_0=2000000 y_0=500000 +no_defs <> + +# 403: california iii: nad83 +<403> proj=lcc datum=NAD83 +lon_0=-120d30 lat_1=38d26 lat_2=37d4 lat_0=36d30 +x_0=2000000 y_0=500000 +no_defs <> + +# 404: california iv: nad83 +<404> proj=lcc datum=NAD83 +lon_0=-119 lat_1=37d15 lat_2=36 lat_0=35d20 +x_0=2000000 y_0=500000 +no_defs <> + +# 405: california v: nad83 +<405> proj=lcc datum=NAD83 +lon_0=-118 lat_1=35d28 lat_2=34d2 lat_0=33d30 +x_0=2000000 y_0=500000 +no_defs <> + +# 406: california vi: nad83 +<406> proj=lcc datum=NAD83 +lon_0=-116d15 lat_1=33d53 lat_2=32d47 lat_0=32d10 +x_0=2000000 y_0=500000 +no_defs <> + +# 501: colorado north: nad83 +<501> proj=lcc datum=NAD83 +lon_0=-105d30 lat_1=40d47 lat_2=39d43 lat_0=39d20 +x_0=914401.8289 y_0=304800.6096 +no_defs <> + +# 502: colorado central: nad83 +<502> proj=lcc datum=NAD83 +lon_0=-105d30 lat_1=39d45 lat_2=38d27 lat_0=37d50 +x_0=914401.8289 y_0=304800.6096 +no_defs <> + +# 503: colorado south: nad83 +<503> proj=lcc datum=NAD83 +lon_0=-105d30 lat_1=38d26 lat_2=37d14 lat_0=36d40 +x_0=914401.8289 y_0=304800.6096 +no_defs <> + +# 600: connecticut ---: nad83 +<600> proj=lcc datum=NAD83 +lon_0=-72d45 lat_1=41d52 lat_2=41d12 lat_0=40d50 +x_0=304800.6096 y_0=152400.3048 +no_defs <> + +# 700: delaware ---: nad83 +<700> proj=tmerc datum=NAD83 +lon_0=-75d25 lat_0=38 k=.999995 +x_0=200000 y_0=0 +no_defs <> + +# 901: florida east: nad83 +<901> proj=tmerc datum=NAD83 +lon_0=-81 lat_0=24d20 k=.9999411764705882 +x_0=200000 y_0=0 +no_defs <> + +# 902: florida west: nad83 +<902> proj=tmerc datum=NAD83 +lon_0=-82 lat_0=24d20 k=.9999411764705882 +x_0=200000 y_0=0 +no_defs <> + +# 903: florida north: nad83 +<903> proj=lcc datum=NAD83 +lon_0=-84d30 lat_1=30d45 lat_2=29d35 lat_0=29 +x_0=600000 y_0=0 +no_defs <> + +# 1001: georgia east: nad83 +<1001> proj=tmerc datum=NAD83 +lon_0=-82d10 lat_0=30 k=.9999 +x_0=200000 y_0=0 +no_defs <> + +# 1002: georgia west: nad83 +<1002> proj=tmerc datum=NAD83 +lon_0=-84d10 lat_0=30 k=.9999 +x_0=700000 y_0=0 +no_defs <> + +# 5101: hawaii 1: nad83 +<5101> proj=tmerc datum=NAD83 +lon_0=-155d30 lat_0=18d50 k=.9999666666666667 +x_0=500000 y_0=0 +no_defs <> + +# 5102: hawaii 2: nad83 +<5102> proj=tmerc datum=NAD83 +lon_0=-156d40 lat_0=20d20 k=.9999666666666667 +x_0=500000 y_0=0 +no_defs <> + +# 5103: hawaii 3: nad83 +<5103> proj=tmerc datum=NAD83 +lon_0=-158 lat_0=21d10 k=.99999 +x_0=500000 y_0=0 +no_defs <> + +# 5104: hawaii 4: nad83 +<5104> proj=tmerc datum=NAD83 +lon_0=-159d30 lat_0=21d50 k=.99999 +x_0=500000 y_0=0 +no_defs <> + +# 5105: hawaii 5: nad83 +<5105> proj=tmerc datum=NAD83 +lon_0=-160d10 lat_0=21d40 k=1 +x_0=500000 y_0=0 +no_defs <> + +# 1101: idaho east: nad83 +<1101> proj=tmerc datum=NAD83 +lon_0=-112d10 lat_0=41d40 k=.9999473684210526 +x_0=200000 y_0=0 +no_defs <> + +# 1102: idaho central: nad83 +<1102> proj=tmerc datum=NAD83 +lon_0=-114 lat_0=41d40 k=.9999473684210526 +x_0=500000 y_0=0 +no_defs <> + +# 1103: idaho west: nad83 +<1103> proj=tmerc datum=NAD83 +lon_0=-115d45 lat_0=41d40 k=.9999333333333333 +x_0=800000 y_0=0 +no_defs <> + +# 1201: illinois east: nad83 +<1201> proj=tmerc datum=NAD83 +lon_0=-88d20 lat_0=36d40 k=.999975 +x_0=300000 y_0=0 +no_defs <> + +# 1202: illinois west: nad83 +<1202> proj=tmerc datum=NAD83 +lon_0=-90d10 lat_0=36d40 k=.9999411764705882 +x_0=700000 y_0=0 +no_defs <> + +# 1301: indiana east: nad83 +<1301> proj=tmerc datum=NAD83 +lon_0=-85d40 lat_0=37d30 k=.9999666666666667 +x_0=100000 y_0=250000 +no_defs <> + +# 1302: indiana west: nad83 +<1302> proj=tmerc datum=NAD83 +lon_0=-87d5 lat_0=37d30 k=.9999666666666667 +x_0=900000 y_0=250000 +no_defs <> + +# 1401: iowa north: nad83 +<1401> proj=lcc datum=NAD83 +lon_0=-93d30 lat_1=43d16 lat_2=42d4 lat_0=41d30 +x_0=1500000 y_0=1000000 +no_defs <> + +# 1402: iowa south: nad83 +<1402> proj=lcc datum=NAD83 +lon_0=-93d30 lat_1=41d47 lat_2=40d37 lat_0=40 +x_0=500000 y_0=0 +no_defs <> + +# 1501: kansas north: nad83 +<1501> proj=lcc datum=NAD83 +lon_0=-98 lat_1=39d47 lat_2=38d43 lat_0=38d20 +x_0=400000 y_0=0 +no_defs <> + +# 1502: kansas south: nad83 +<1502> proj=lcc datum=NAD83 +lon_0=-98d30 lat_1=38d34 lat_2=37d16 lat_0=36d40 +x_0=400000 y_0=400000 +no_defs <> + +# 1601: kentucky north: nad83 +<1601> proj=lcc datum=NAD83 +lon_0=-84d15 lat_1=38d58 lat_2=37d58 lat_0=37d30 +x_0=500000 y_0=0 +no_defs <> + +# 1602: kentucky south: nad83 +<1602> proj=lcc datum=NAD83 +lon_0=-85d45 lat_1=37d56 lat_2=36d44 lat_0=36d20 +x_0=500000 y_0=500000 +no_defs <> + +# 1701: louisiana north: nad83 +<1701> proj=lcc datum=NAD83 +lon_0=-92d30 lat_1=32d40 lat_2=31d10 lat_0=30d30 +x_0=1000000 y_0=0 +no_defs <> + +# 1702: louisiana south: nad83 +<1702> proj=lcc datum=NAD83 +lon_0=-91d20 lat_1=30d42 lat_2=29d18 lat_0=28d30 +x_0=1000000 y_0=0 +no_defs <> + +# 1703: louisiana offshore: nad83 +<1703> proj=lcc datum=NAD83 +lon_0=-91d20 lat_1=27d50 lat_2=26d10 lat_0=25d30 +x_0=1000000 y_0=0 +no_defs <> + +# 1801: maine east: nad83 +<1801> proj=tmerc datum=NAD83 +lon_0=-68d30 lat_0=43d40 k=.9999 +x_0=300000 y_0=0 +no_defs <> + +# 1802: maine west: nad83 +<1802> proj=tmerc datum=NAD83 +lon_0=-70d10 lat_0=42d50 k=.9999666666666667 +x_0=900000 y_0=0 +no_defs <> + +# 1900: maryland ---: nad83 +<1900> proj=lcc datum=NAD83 +lon_0=-77 lat_1=39d27 lat_2=38d18 lat_0=37d40 +x_0=400000 y_0=0 +no_defs <> + +# 2001: massachusetts mainland: nad83 +<2001> proj=lcc datum=NAD83 +lon_0=-71d30 lat_1=42d41 lat_2=41d43 lat_0=41 +x_0=200000 y_0=750000 +no_defs <> + +# 2002: massachusetts island: nad83 +<2002> proj=lcc datum=NAD83 +lon_0=-70d30 lat_1=41d29 lat_2=41d17 lat_0=41 +x_0=500000 y_0=0 +no_defs <> + +# 2111: michigan north: nad83 +<2111> proj=lcc datum=NAD83 +lon_0=-87 lat_1=47d5 lat_2=45d29 lat_0=44d47 +x_0=8000000 y_0=0 +no_defs <> + +# 2112: michigan central/l: nad83 +<2112> proj=lcc datum=NAD83 +lon_0=-84d22 lat_1=45d42 lat_2=44d11 lat_0=43d19 +x_0=6000000 y_0=0 +no_defs <> + +# 2113: michigan south: nad83 +<2113> proj=lcc datum=NAD83 +lon_0=-84d22 lat_1=43d40 lat_2=42d6 lat_0=41d30 +x_0=4000000 y_0=0 +no_defs <> + +# 2201: minnesota north: nad83 +<2201> proj=lcc datum=NAD83 +lon_0=-93d6 lat_1=48d38 lat_2=47d2 lat_0=46d30 +x_0=800000 y_0=100000 +no_defs <> + +# 2202: minnesota central: nad83 +<2202> proj=lcc datum=NAD83 +lon_0=-94d15 lat_1=47d3 lat_2=45d37 lat_0=45 +x_0=800000 y_0=100000 +no_defs <> + +# 2203: minnesota south: nad83 +<2203> proj=lcc datum=NAD83 +lon_0=-94 lat_1=45d13 lat_2=43d47 lat_0=43 +x_0=800000 y_0=100000 +no_defs <> + +# 2301: mississippi east: nad83 +<2301> proj=tmerc datum=NAD83 +lon_0=-88d50 lat_0=29d30 k=.99995 +x_0=300000 y_0=0 +no_defs <> + +# 2302: mississippi west: nad83 +<2302> proj=tmerc datum=NAD83 +lon_0=-90d20 lat_0=29d30 k=.99995 +x_0=700000 y_0=0 +no_defs <> + +# 2401: missouri east: nad83 +<2401> proj=tmerc datum=NAD83 +lon_0=-90d30 lat_0=35d50 k=.9999333333333333 +x_0=250000 y_0=0 +no_defs <> + +# 2402: missouri central: nad83 +<2402> proj=tmerc datum=NAD83 +lon_0=-92d30 lat_0=35d50 k=.9999333333333333 +x_0=500000 y_0=0 +no_defs <> + +# 2403: missouri west: nad83 +<2403> proj=tmerc datum=NAD83 +lon_0=-94d30 lat_0=36d10 k=.9999411764705882 +x_0=850000 y_0=0 +no_defs <> + +# 2500: montana: nad83 +<2500> proj=lcc datum=NAD83 +lon_0=-109d30 lat_1=49 lat_2=45 lat_0=44d15 +x_0=600000 y_0=0 +no_defs <> + +# 2600: nebraska: nad83 +<2600> proj=lcc datum=NAD83 +lon_0=-100 lat_1=43 lat_2=40 lat_0=39d50 +x_0=500000 y_0=0 +no_defs <> + +# 2701: nevada east: nad83 +<2701> proj=tmerc datum=NAD83 +lon_0=-115d35 lat_0=34d45 k=.9999 +x_0=200000 y_0=8000000 +no_defs <> + +# 2702: nevada central: nad83 +<2702> proj=tmerc datum=NAD83 +lon_0=-116d40 lat_0=34d45 k=.9999 +x_0=500000 y_0=6000000 +no_defs <> + +# 2703: nevada west: nad83 +<2703> proj=tmerc datum=NAD83 +lon_0=-118d35 lat_0=34d45 k=.9999 +x_0=800000 y_0=4000000 +no_defs <> + +# 2800: new hampshire ---: nad83 +<2800> proj=tmerc datum=NAD83 +lon_0=-71d40 lat_0=42d30 k=.9999666666666667 +x_0=300000 y_0=0 +no_defs <> + +# 2900: new jersey ---: nad83 +<2900> proj=tmerc datum=NAD83 +lon_0=-74d30 lat_0=38d50 k=.9999 +x_0=150000 y_0=0 +no_defs <> + +# 3001: new mexico east: nad83 +<3001> proj=tmerc datum=NAD83 +lon_0=-104d20 lat_0=31 k=.9999090909090909 +x_0=165000 y_0=0 +no_defs <> + +# 3002: new mexico central: nad83 +<3002> proj=tmerc datum=NAD83 +lon_0=-106d15 lat_0=31 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 3003: new mexico west: nad83 +<3003> proj=tmerc datum=NAD83 +lon_0=-107d50 lat_0=31 k=.9999166666666667 +x_0=830000 y_0=0 +no_defs <> + +# 3101: new york east: nad83 +<3101> proj=tmerc datum=NAD83 +lon_0=-74d30 lat_0=38d50 k=.9999 +x_0=150000 y_0=0 +no_defs <> + +# 3102: new york central: nad83 +<3102> proj=tmerc datum=NAD83 +lon_0=-76d35 lat_0=40 k=.9999375 +x_0=250000 y_0=0 +no_defs <> + +# 3103: new york west: nad83 +<3103> proj=tmerc datum=NAD83 +lon_0=-78d35 lat_0=40 k=.9999375 +x_0=350000 y_0=0 +no_defs <> + +# 3104: new york long island: nad83 +<3104> proj=lcc datum=NAD83 +lon_0=-74 lat_1=41d2 lat_2=40d40 lat_0=40d10 +x_0=300000 y_0=0 +no_defs <> + +# 3200: north carolina ---: nad83 +<3200> proj=lcc datum=NAD83 +lon_0=-79 lat_1=36d10 lat_2=34d20 lat_0=33d45 +x_0=609601.22 y_0=0 +no_defs <> + +# 3301: north dakota north: nad83 +<3301> proj=lcc datum=NAD83 +lon_0=-100d30 lat_1=48d44 lat_2=47d26 lat_0=47 +x_0=600000 y_0=0 +no_defs <> + +# 3302: north dakota south: nad83 +<3302> proj=lcc datum=NAD83 +lon_0=-100d30 lat_1=47d29 lat_2=46d11 lat_0=45d40 +x_0=600000 y_0=0 +no_defs <> + +# 3401: ohio north: nad83 +<3401> proj=lcc datum=NAD83 +lon_0=-82d30 lat_1=41d42 lat_2=40d26 lat_0=39d40 +x_0=600000 y_0=0 +no_defs <> + +# 3402: ohio south: nad83 +<3402> proj=lcc datum=NAD83 +lon_0=-82d30 lat_1=40d2 lat_2=38d44 lat_0=38 +x_0=600000 y_0=0 +no_defs <> + +# 3501: oklahoma north: nad83 +<3501> proj=lcc datum=NAD83 +lon_0=-98 lat_1=36d46 lat_2=35d34 lat_0=35 +x_0=600000 y_0=0 +no_defs <> + +# 3502: oklahoma south: nad83 +<3502> proj=lcc datum=NAD83 +lon_0=-98 lat_1=35d14 lat_2=33d56 lat_0=33d20 +x_0=600000 y_0=0 +no_defs <> + +# 3601: oregon north: nad83 +<3601> proj=lcc datum=NAD83 +lon_0=-120d30 lat_1=46 lat_2=44d20 lat_0=43d40 +x_0=2500000 y_0=0 +no_defs <> + +# 3602: oregon south: nad83 +<3602> proj=lcc datum=NAD83 +lon_0=-120d30 lat_1=44 lat_2=42d20 lat_0=41d40 +x_0=1500000 y_0=0 +no_defs <> + +# 3701: pennsylvania north: nad83 +<3701> proj=lcc datum=NAD83 +lon_0=-77d45 lat_1=41d57 lat_2=40d53 lat_0=40d10 +x_0=600000 y_0=0 +no_defs <> + +# 3702: pennsylvania south: nad83 +<3702> proj=lcc datum=NAD83 +lon_0=-77d45 lat_1=40d58 lat_2=39d56 lat_0=39d20 +x_0=600000 y_0=0 +no_defs <> + +# 3800: rhode island ---: nad83 +<3800> proj=tmerc datum=NAD83 +lon_0=-71d30 lat_0=41d5 k=.99999375 +x_0=100000 y_0=0 +no_defs <> + +# 3900: south carolina: nad83 +<3900> proj=lcc datum=NAD83 +lon_0=-81 lat_1=34d50 lat_2=32d30 lat_0=31d50 +x_0=609600 y_0=0 +no_defs <> + +# 4001: south dakota north: nad83 +<4001> proj=lcc datum=NAD83 +lon_0=-100 lat_1=45d41 lat_2=44d25 lat_0=43d50 +x_0=600000 y_0=0 +no_defs <> + +# 4002: south dakota south: nad83 +<4002> proj=lcc datum=NAD83 +lon_0=-100d20 lat_1=44d24 lat_2=42d50 lat_0=42d20 +x_0=600000 y_0=0 +no_defs <> + +# 4100: tennessee ---: nad83 +<4100> proj=lcc datum=NAD83 +lon_0=-86 lat_1=36d25 lat_2=35d15 lat_0=34d20 +x_0=600000 y_0=0 +no_defs <> + +# 4201: texas north: nad83 +<4201> proj=lcc datum=NAD83 +lon_0=-101d30 lat_1=36d11 lat_2=34d39 lat_0=34 +x_0=200000 y_0=1000000 +no_defs <> + +# 4202: texas north central: nad83 +<4202> proj=lcc datum=NAD83 +lon_0=-98d30 lat_1=33d58 lat_2=32d8 lat_0=31d40 +x_0=600000 y_0=2000000 +no_defs <> + +# 4203: texas central: nad83 +<4203> proj=lcc datum=NAD83 +lon_0=-100d20 lat_1=31d53 lat_2=30d7 lat_0=29d40 +x_0=700000 y_0=3000000 +no_defs <> + +# 4204: texas south central: nad83 +<4204> proj=lcc datum=NAD83 +lon_0=-99 lat_1=30d17 lat_2=28d23 lat_0=27d50 +x_0=600000 y_0=4000000 +no_defs <> + +# 4205: texas south: nad83 +<4205> proj=lcc datum=NAD83 +lon_0=-98d30 lat_1=27d50 lat_2=26d10 lat_0=25d40 +x_0=300000 y_0=5000000 +no_defs <> + +# 4301: utah north: nad83 +<4301> proj=lcc datum=NAD83 +lon_0=-111d30 lat_1=41d47 lat_2=40d43 lat_0=40d20 +x_0=500000 y_0=1000000 +no_defs <> + +# 4302: utah central: nad83 +<4302> proj=lcc datum=NAD83 +lon_0=-111d30 lat_1=40d39 lat_2=39d1 lat_0=38d20 +x_0=500000 y_0=2000000 +no_defs <> + +# 4303: utah south: nad83 +<4303> proj=lcc datum=NAD83 +lon_0=-111d30 lat_1=38d21 lat_2=37d13 lat_0=36d40 +x_0=500000 y_0=3000000 +no_defs <> + +# 4400: vermont ---: nad83 +<4400> proj=tmerc datum=NAD83 +lon_0=-72d30 lat_0=42d30 k=.9999642857142857 +x_0=500000 y_0=0 +no_defs <> + +# 4501: virginia north: nad83 +<4501> proj=lcc datum=NAD83 +lon_0=-78d30 lat_1=39d12 lat_2=38d2 lat_0=37d40 +x_0=3500000 y_0=2000000 +no_defs <> + +# 4502: virginia south: nad83 +<4502> proj=lcc datum=NAD83 +lon_0=-78d30 lat_1=37d58 lat_2=36d46 lat_0=36d20 +x_0=3500000 y_0=1000000 +no_defs <> + +# 4601: washington north: nad83 +<4601> proj=lcc datum=NAD83 +lon_0=-120d50 lat_1=48d44 lat_2=47d30 lat_0=47 +x_0=500000 y_0=0 +no_defs <> + +# 4602: washington south: nad83 +<4602> proj=lcc datum=NAD83 +lon_0=-120d30 lat_1=47d20 lat_2=45d50 lat_0=45d20 +x_0=500000 y_0=0 +no_defs <> + +# 4701: west virginia north: nad83 +<4701> proj=lcc datum=NAD83 +lon_0=-79d30 lat_1=40d15 lat_2=39 lat_0=38d30 +x_0=600000 y_0=0 +no_defs <> + +# 4702: west virginia south: nad83 +<4702> proj=lcc datum=NAD83 +lon_0=-81 lat_1=38d53 lat_2=37d29 lat_0=37 +x_0=600000 y_0=0 +no_defs <> + +# 4801: wisconsin north: nad83 +<4801> proj=lcc datum=NAD83 +lon_0=-90 lat_1=46d46 lat_2=45d34 lat_0=45d10 +x_0=600000 y_0=0 +no_defs <> + +# 4802: wisconsin central: nad83 +<4802> proj=lcc datum=NAD83 +lon_0=-90 lat_1=45d30 lat_2=44d15 lat_0=43d50 +x_0=600000 y_0=0 +no_defs <> + +# 4803: wisconsin south: nad83 +<4803> proj=lcc datum=NAD83 +lon_0=-90 lat_1=44d4 lat_2=42d44 lat_0=42 +x_0=600000 y_0=0 +no_defs <> + +# 4901: wyoming east: nad83 +<4901> proj=tmerc datum=NAD83 +lon_0=-105d10 lat_0=40d30 k=.9999375 +x_0=200000 y_0=0 +no_defs <> + +# 4902: wyoming east central: nad83 +<4902> proj=tmerc datum=NAD83 +lon_0=-107d20 lat_0=40d30 k=.9999375 +x_0=400000 y_0=100000 +no_defs <> + +# 4903: wyoming west central: nad83 +<4903> proj=tmerc datum=NAD83 +lon_0=-108d45 lat_0=40d30 k=.9999375 +x_0=600000 y_0=0 +no_defs <> + +# 4904: wyoming west: nad83 +<4904> proj=tmerc datum=NAD83 +lon_0=-110d5 lat_0=40d30 k=.9999375 +x_0=800000 y_0=100000 +no_defs <> + +# 5001: alaska zone no. 1: nad83 +<5001> proj=omerc datum=NAD83 +k=.9999 lonc=-133d40 lat_0=57 alpha=-36d52'11.6315 +x_0=818676.7344011233 y_0=575097.6888751927 +no_defs <> + +# 5002: alaska zone no. 2: nad83 +<5002> proj=tmerc datum=NAD83 +lon_0=-142 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5003: alaska zone no. 3: nad83 +<5003> proj=tmerc datum=NAD83 +lon_0=-146 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5004: alaska zone no. 4: nad83 +<5004> proj=tmerc datum=NAD83 +lon_0=-150 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5005: alaska zone no. 5: nad83 +<5005> proj=tmerc datum=NAD83 +lon_0=-154 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5006: alaska zone no. 6: nad83 +<5006> proj=tmerc datum=NAD83 +lon_0=-158 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5007: alaska zone no. 7: nad83 +<5007> proj=tmerc datum=NAD83 +lon_0=-162 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5008: alaska zone no. 8: nad83 +<5008> proj=tmerc datum=NAD83 +lon_0=-166 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5009: alaska zone no. 9: nad83 +<5009> proj=tmerc datum=NAD83 +lon_0=-170 lat_0=54 k=.9999 +x_0=500000 y_0=0 +no_defs <> + +# 5200: puerto rico and virgin islands: nad83 +<5200> proj=lcc datum=NAD83 +lon_0=-66d26 lat_1=18d26 lat_2=18d2 lat_0=17d50 +x_0=200000 y_0=200000 +no_defs <> + diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/other.extra b/.venv/lib/python3.12/site-packages/fiona/proj_data/other.extra new file mode 100644 index 00000000..4b5797e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/other.extra @@ -0,0 +1,53 @@ +## NAD83 / BC Albers (this has been superseded but is kept for compatibility) +<42102> +proj=aea +ellps=GRS80 +lat_0=45 +lon_0=-126.0 +lat_1=50.0 +lat_2=58.5 +x_0=1000000.0 +y_0=0 +datum=NAD83 +units=m no_defs <> + + +# +# OGC-defined extended codes (41000--41999) +# see http://www.digitalearth.gov/wmt/auto.html +# +# WGS84 / Simple Mercator +<41001> +proj=merc +lat_ts=0 +lon_0=0 +k=1.000000 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs no_defs <> +# +# CubeWerx-defined extended codes (42100--42199) +# +# WGS 84 / LCC Canada +<42101> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=0 +lon_0=-95 +x_0=0 +y_0=-8000000 +ellps=WGS84 +datum=WGS84 +units=m +no_defs no_defs <> +#EPSG:42102,"PROJCS[\"NAD83 / BC Albers\",GEOGCS[\"NAD83\",DATUM[\"North_American_Datum_1983\",SPHEROID[\"GRS_1980\",6378137,298.257222101]],PRIMEM[\"Greenwich\",0],UNIT[\"Decimal_Degree\",0.0174532925199433]],PROJECTION[\"Albers_conic_equal_area\"],PARAMETER[\"central_meridian\",-126.0],PARAMETER[\"latitude_of_origin\",45],PARAMETER[\"standard_parallel_1\",50.0],PARAMETER[\"standard_parallel_2\",58.5],PARAMETER[\"false_easting\",1000000.0],PARAMETER[\"false_northing\",0],UNIT[\"Meter\",1]]" +# WGS 84 / LCC USA +<42103> +proj=lcc +lat_1=33 +lat_2=45 +lat_0=0 +lon_0=-100 +x_0=0 +y_0=0 +ellps=WGS72 +datum=WGS84 +units=m +no_defs no_defs <> +# NAD83 / MTM zone 8 Québec +<42104> +proj=tmerc +lat_0=0 +lon_0=-73.5 +k=0.999900 +x_0=304800 +y_0=0 +ellps=GRS80 +units=m +no_defs no_defs <> +# WGS84 / Merc NorthAm +<42105> +proj=merc +lat_ts=0 +lon_0=-96 +k=1.000000 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs no_defs <> +# WGS84 / Lambert Azim Mozambique +<42106> +proj=laea +lat_0=5 +lon_0=20 +x_0=0 +y_0=0 +a=6370997 +b=6370997 +datum=WGS84 +units=m +no_defs no_defs <> +# +# CubeWerx-customer definitions (42300--42399) +# +# NAD27 / Polar Stereographic / CM=-98 +<42301> +proj=stere +lat_0=90 +lon_0=-98 +x_0=0 +y_0=0 +ellps=clrk66 +datum=NAD27 +units=m +no_defs no_defs <> +# JapanOrtho.09 09 +<42302> +proj=tmerc +lat_0=36 +lon_0=139.833333333333 +k=0.999900 +x_0=0 +y_0=0 +ellps=bessel +units=m +no_defs no_defs <> +# NAD83 / Albers NorthAm +<42303> +proj=aea +lat_1=29.5 +lat_2=45.5 +lat_0=23 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> +# NAD83 / NRCan LCC Canada +<42304> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=49 +lon_0=-95 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> +# France_II +<42305> +proj=lcc +lat_1=45.898918964419 +lat_2=47.696014502038 +lat_0=46.8 +lon_0=2.337229166666667 +x_0=600000 +y_0=2200000 +a=6378249.2 +b=6356514.999904194 +pm=2.337229166666667 +units=m +no_defs no_defs <> +# NAD83/QC_LCC +<42306> +proj=lcc +lat_1=46 +lat_2=60 +lat_0=44 +lon_0=-68.5 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> +# NAD83 / Texas Central - feet +<42307> +proj=lcc +lat_1=31.8833333333333 +lat_2=30.1166666666667 +lat_0=29.6666666666667 +lon_0=-100.333333333333 +x_0=700000.0000000001 +y_0=3000000 +ellps=GRS80 +datum=NAD83 +to_meter=0.3048006096012192 +no_defs no_defs <> +# NAD27 / California Albers +<42308> +proj=aea +lat_1=34 +lat_2=40.5 +lat_0=0 +lon_0=-120 +x_0=0 +y_0=-4000000 +ellps=clrk66 +datum=NAD27 +units=m +no_defs no_defs <> +# NAD 83 / LCC Canada AVHRR-2 +<42309> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=0 +lon_0=-95 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> +# WGS84+GRS80 / Mercator +<42310> +proj=merc +lat_ts=0 +lon_0=0 +k=1.000000 +x_0=0 +y_0=0 +ellps=GRS80 +datum=WGS84 +units=m +no_defs no_defs <> +# NAD83 / LCC Statcan +<42311> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=63.390675 +lon_0=-91.86666700000001 +x_0=6200000 +y_0=3000000 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> +# +# Funny epsgish code for google mercator - you should really use EPSG:3857 +# +<900913> +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs <> diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/proj.db b/.venv/lib/python3.12/site-packages/fiona/proj_data/proj.db new file mode 100644 index 00000000..26696552 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/proj_data/proj.db differ diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/proj.ini b/.venv/lib/python3.12/site-packages/fiona/proj_data/proj.ini new file mode 100644 index 00000000..144562cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/proj.ini @@ -0,0 +1,51 @@ +[general] +; Lines starting by ; are commented lines. +; + +; Network capabilities disabled by default. +; Can be overridden with the PROJ_NETWORK=ON environment variable. +; Cf https://proj.org/en/latest/usage/network.html +; Valid values = on, off +network = off + +; Endpoint of the Content Delivery Network where remote resources might +; be accessed. Only used if network access is allowed (cf above "network" +; option) +; Can be overridden with the PROJ_NETWORK_ENDPOINT environment variable. +cdn_endpoint = https://cdn.proj.org + +; Whether to enable a cache of remote resources that are accessed, on the +; local file system +; Valid values = on, off +cache_enabled = on + +; Size of the cache in megabytes +cache_size_MB = 300 + +; Time-to-live delay in seconds before already accessed remote resources are +; acessed again to check if they have been updated. +cache_ttl_sec = 86400 + +; Can be set to on so that by default the lack of a known resource files needed +; for the best transformation PROJ would normally use causes an error, or off +; to accept missing resource files without errors or warnings. +; This default value itself is overriden by the PROJ_ONLY_BEST_DEFAULT environment +; variable if set, and then by the ONLY_BEST setting that can be +; passed to the proj_create_crs_to_crs() method, or with the --only-best +; option of the cs2cs program. +; (added in PROJ 9.2) +; Valid values = on, off +only_best_default = off + +; Filename of the Certificate Authority (CA) bundle. +; Can be overriden with the PROJ_CURL_CA_BUNDLE / CURL_CA_BUNDLE environment variable. +; (added in PROJ 9.0) +; ca_bundle_path = /path/to/cabundle.pem + +; Transverse Mercator (and UTM) default algorithm: auto, evenden_snyder or poder_engsager +; * evenden_snyder is the fastest, but less accurate far from central meridian +; * poder_engsager is slower, but more accurate far from central meridian +; * default will auto-select between the two above depending on the coordinate +; to transform and will use evenden_snyder if the error in doing so is below +; 0.1 mm (for an ellipsoid of the size of Earth) +tmerc_default_algo = poder_engsager diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/projjson.schema.json b/.venv/lib/python3.12/site-packages/fiona/proj_data/projjson.schema.json new file mode 100644 index 00000000..a8578c81 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/projjson.schema.json @@ -0,0 +1,1174 @@ +{ + "$id": "https://proj.org/schemas/v0.7/projjson.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "Schema for PROJJSON (v0.7)", + "$comment": "This document is copyright Even Rouault and PROJ contributors, 2019-2023, and subject to the MIT license. This file exists both in data/ and in schemas/vXXX/. Keep both in sync. And if changing the value of $id, change PROJJSON_DEFAULT_VERSION accordingly in io.cpp", + + "oneOf": [ + { "$ref": "#/definitions/crs" }, + { "$ref": "#/definitions/datum" }, + { "$ref": "#/definitions/datum_ensemble" }, + { "$ref": "#/definitions/ellipsoid" }, + { "$ref": "#/definitions/prime_meridian" }, + { "$ref": "#/definitions/single_operation" }, + { "$ref": "#/definitions/concatenated_operation" }, + { "$ref": "#/definitions/coordinate_metadata" } + ], + + "definitions": { + + "abridged_transformation": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["AbridgedTransformation"] }, + "name": { "type": "string" }, + "source_crs": { + "$ref": "#/definitions/crs", + "$comment": "Only present when the source_crs of the bound_crs does not match the source_crs of the AbridgedTransformation. No equivalent in WKT" + }, + "method": { "$ref": "#/definitions/method" }, + "parameters": { + "type": "array", + "items": { "$ref": "#/definitions/parameter_value" } + }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "method", "parameters" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "axis": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["Axis"] }, + "name": { "type": "string" }, + "abbreviation": { "type": "string" }, + "direction": { "type": "string", + "enum": [ "north", + "northNorthEast", + "northEast", + "eastNorthEast", + "east", + "eastSouthEast", + "southEast", + "southSouthEast", + "south", + "southSouthWest", + "southWest", + "westSouthWest", + "west", + "westNorthWest", + "northWest", + "northNorthWest", + "up", + "down", + "geocentricX", + "geocentricY", + "geocentricZ", + "columnPositive", + "columnNegative", + "rowPositive", + "rowNegative", + "displayRight", + "displayLeft", + "displayUp", + "displayDown", + "forward", + "aft", + "port", + "starboard", + "clockwise", + "counterClockwise", + "towards", + "awayFrom", + "future", + "past", + "unspecified" ] }, + "meridian": { "$ref": "#/definitions/meridian" }, + "unit": { "$ref": "#/definitions/unit" }, + "minimum_value": { "type": "number" }, + "maximum_value": { "type": "number" }, + "range_meaning": { "type": "string", "enum": [ "exact", "wraparound"] }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "abbreviation", "direction" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "bbox": { + "type": "object", + "properties": { + "east_longitude": { "type": "number" }, + "west_longitude": { "type": "number" }, + "south_latitude": { "type": "number" }, + "north_latitude": { "type": "number" } + }, + "required" : [ "east_longitude", "west_longitude", + "south_latitude", "north_latitude" ], + "additionalProperties": false + }, + + "bound_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["BoundCRS"] }, + "name": { "type": "string" }, + "source_crs": { "$ref": "#/definitions/crs" }, + "target_crs": { "$ref": "#/definitions/crs" }, + "transformation": { "$ref": "#/definitions/abridged_transformation" }, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "source_crs", "target_crs", "transformation" ], + "additionalProperties": false + }, + + "compound_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["CompoundCRS"] }, + "name": { "type": "string" }, + "components": { + "type": "array", + "items": { "$ref": "#/definitions/crs" } + }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "components" ], + "additionalProperties": false + }, + + "concatenated_operation": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["ConcatenatedOperation"] }, + "name": { "type": "string" }, + "source_crs": { "$ref": "#/definitions/crs" }, + "target_crs": { "$ref": "#/definitions/crs" }, + "steps": { + "type": "array", + "items": { "$ref": "#/definitions/single_operation" } + }, + "accuracy": { "type": "string" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "source_crs", "target_crs", "steps" ], + "additionalProperties": false + }, + + "conversion": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["Conversion"] }, + "name": { "type": "string" }, + "method": { "$ref": "#/definitions/method" }, + "parameters": { + "type": "array", + "items": { "$ref": "#/definitions/parameter_value" } + }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "method" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "coordinate_metadata": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["CoordinateMetadata"] }, + "crs": { "$ref": "#/definitions/crs" }, + "coordinateEpoch": { "type": "number" } + }, + "required" : [ "crs" ], + "additionalProperties": false + }, + + "coordinate_system": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["CoordinateSystem"] }, + "name": { "type": "string" }, + "subtype": { "type": "string", + "enum": ["Cartesian", + "spherical", + "ellipsoidal", + "vertical", + "ordinal", + "parametric", + "affine", + "TemporalDateTime", + "TemporalCount", + "TemporalMeasure"] }, + "axis": { + "type": "array", + "items": { "$ref": "#/definitions/axis" } + }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "subtype", "axis" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "crs": { + "oneOf": [ + { "$ref": "#/definitions/bound_crs" }, + { "$ref": "#/definitions/compound_crs" }, + { "$ref": "#/definitions/derived_engineering_crs" }, + { "$ref": "#/definitions/derived_geodetic_crs" }, + { "$ref": "#/definitions/derived_parametric_crs" }, + { "$ref": "#/definitions/derived_projected_crs" }, + { "$ref": "#/definitions/derived_temporal_crs" }, + { "$ref": "#/definitions/derived_vertical_crs" }, + { "$ref": "#/definitions/engineering_crs" }, + { "$ref": "#/definitions/geodetic_crs" }, + { "$ref": "#/definitions/parametric_crs" }, + { "$ref": "#/definitions/projected_crs" }, + { "$ref": "#/definitions/temporal_crs" }, + { "$ref": "#/definitions/vertical_crs" } + ] + }, + + "datum": { + "oneOf": [ + { "$ref": "#/definitions/geodetic_reference_frame" }, + { "$ref": "#/definitions/vertical_reference_frame" }, + { "$ref": "#/definitions/dynamic_geodetic_reference_frame" }, + { "$ref": "#/definitions/dynamic_vertical_reference_frame" }, + { "$ref": "#/definitions/temporal_datum" }, + { "$ref": "#/definitions/parametric_datum" }, + { "$ref": "#/definitions/engineering_datum" } + ] + }, + + "datum_ensemble": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["DatumEnsemble"] }, + "name": { "type": "string" }, + "members": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + } + }, + "ellipsoid": { "$ref": "#/definitions/ellipsoid" }, + "accuracy": { "type": "string" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "members", "accuracy" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "deformation_model": { + "description": "Association to a PointMotionOperation", + "type": "object", + "properties": { + "name": { "type": "string" }, + "id": { "$ref": "#/definitions/id" } + }, + "required" : [ "name" ], + "additionalProperties": false + }, + + "derived_engineering_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["DerivedEngineeringCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/engineering_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "derived_geodetic_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["DerivedGeodeticCRS", + "DerivedGeographicCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/geodetic_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "derived_parametric_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["DerivedParametricCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/parametric_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "derived_projected_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["DerivedProjectedCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/projected_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "derived_temporal_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["DerivedTemporalCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/temporal_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "derived_vertical_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["DerivedVerticalCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/vertical_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "dynamic_geodetic_reference_frame": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["DynamicGeodeticReferenceFrame"] }, + "name": {}, + "anchor": {}, + "anchor_epoch": {}, + "ellipsoid": {}, + "prime_meridian": {}, + "frame_reference_epoch": { "type": "number" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "ellipsoid", "frame_reference_epoch" ], + "additionalProperties": false + }, + + "dynamic_vertical_reference_frame": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["DynamicVerticalReferenceFrame"] }, + "name": {}, + "anchor": {}, + "anchor_epoch": {}, + "frame_reference_epoch": { "type": "number" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "frame_reference_epoch" ], + "additionalProperties": false + }, + + "ellipsoid": { + "type": "object", + "oneOf":[ + { + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["Ellipsoid"] }, + "name": { "type": "string" }, + "semi_major_axis": { "$ref": "#/definitions/value_in_metre_or_value_and_unit" }, + "semi_minor_axis": { "$ref": "#/definitions/value_in_metre_or_value_and_unit" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "semi_major_axis", "semi_minor_axis" ], + "additionalProperties": false + }, + { + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["Ellipsoid"] }, + "name": { "type": "string" }, + "semi_major_axis": { "$ref": "#/definitions/value_in_metre_or_value_and_unit" }, + "inverse_flattening": { "type": "number" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "semi_major_axis", "inverse_flattening" ], + "additionalProperties": false + }, + { + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["Ellipsoid"] }, + "name": { "type": "string" }, + "radius": { "$ref": "#/definitions/value_in_metre_or_value_and_unit" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "radius" ], + "additionalProperties": false + } + ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ] + }, + + "engineering_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["EngineeringCRS"] }, + "name": { "type": "string" }, + "datum": { "$ref": "#/definitions/engineering_datum" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "datum" ], + "additionalProperties": false + }, + + "engineering_datum": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["EngineeringDatum"] }, + "name": { "type": "string" }, + "anchor": { "type": "string" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name" ], + "additionalProperties": false + }, + + "geodetic_crs": { + "type": "object", + "properties": { + "type": { "type": "string", "enum": ["GeodeticCRS", "GeographicCRS"] }, + "name": { "type": "string" }, + "datum": { + "oneOf": [ + { "$ref": "#/definitions/geodetic_reference_frame" }, + { "$ref": "#/definitions/dynamic_geodetic_reference_frame" } + ] + }, + "datum_ensemble": { "$ref": "#/definitions/datum_ensemble" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "deformation_models": { + "type": "array", + "items": { "$ref": "#/definitions/deformation_model" } + }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name" ], + "description": "One and only one of datum and datum_ensemble must be provided", + "allOf": [ + { "$ref": "#/definitions/object_usage" }, + { "$ref": "#/definitions/one_and_only_one_of_datum_or_datum_ensemble" } + ], + "additionalProperties": false + }, + + "geodetic_reference_frame": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["GeodeticReferenceFrame"] }, + "name": { "type": "string" }, + "anchor": { "type": "string" }, + "anchor_epoch": { "type": "number" }, + "ellipsoid": { "$ref": "#/definitions/ellipsoid" }, + "prime_meridian": { "$ref": "#/definitions/prime_meridian" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "ellipsoid" ], + "additionalProperties": false + }, + + "geoid_model": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "interpolation_crs": { "$ref": "#/definitions/crs" }, + "id": { "$ref": "#/definitions/id" } + }, + "required" : [ "name" ], + "additionalProperties": false + }, + + "id": { + "type": "object", + "properties": { + "authority": { "type": "string" }, + "code": { + "oneOf": [ { "type": "string" }, { "type": "integer" } ] + }, + "version": { + "oneOf": [ { "type": "string" }, { "type": "number" } ] + }, + "authority_citation": { "type": "string" }, + "uri": { "type": "string" } + }, + "required" : [ "authority", "code" ], + "additionalProperties": false + }, + + "ids": { + "type": "array", + "items": { "$ref": "#/definitions/id" } + }, + + "method": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["OperationMethod"]}, + "name": { "type": "string" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "id_ids_mutually_exclusive": { + "not": { + "type": "object", + "required": [ "id", "ids" ] + } + }, + + "one_and_only_one_of_datum_or_datum_ensemble": { + "allOf": [ + { + "not": { + "type": "object", + "required": [ "datum", "datum_ensemble" ] + } + }, + { + "oneOf": [ + { "type": "object", "required": ["datum"] }, + { "type": "object", "required": ["datum_ensemble"] } + ] + } + ] + }, + + "meridian": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["Meridian"] }, + "longitude": { "$ref": "#/definitions/value_in_degree_or_value_and_unit" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "longitude" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "object_usage": { + "anyOf": [ + { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "scope": { "type": "string" }, + "area": { "type": "string" }, + "bbox": { "$ref": "#/definitions/bbox" }, + "vertical_extent": { "$ref": "#/definitions/vertical_extent" }, + "temporal_extent": { "$ref": "#/definitions/temporal_extent" }, + "remarks": { "type": "string" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ] + }, + { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "usages": { "$ref": "#/definitions/usages" }, + "remarks": { "type": "string" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ] + } + ] + }, + + "parameter_value": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["ParameterValue"] }, + "name": { "type": "string" }, + "value": { + "oneOf": [ + { "type": "string" }, + { "type": "number" } + ] + }, + "unit": { "$ref": "#/definitions/unit" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name", "value" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "parametric_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["ParametricCRS"] }, + "name": { "type": "string" }, + "datum": { "$ref": "#/definitions/parametric_datum" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "datum" ], + "additionalProperties": false + }, + + "parametric_datum": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["ParametricDatum"] }, + "name": { "type": "string" }, + "anchor": { "type": "string" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name" ], + "additionalProperties": false + }, + + "point_motion_operation": { + "$comment": "Not implemented in PROJ (at least as of PROJ 9.1)", + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["PointMotionOperation"] }, + "name": { "type": "string" }, + "source_crs": { "$ref": "#/definitions/crs" }, + "method": { "$ref": "#/definitions/method" }, + "parameters": { + "type": "array", + "items": { "$ref": "#/definitions/parameter_value" } + }, + "accuracy": { "type": "string" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "source_crs", "method", "parameters" ], + "additionalProperties": false + }, + + "prime_meridian": { + "type": "object", + "properties": { + "$schema" : { "type": "string" }, + "type": { "type": "string", "enum": ["PrimeMeridian"] }, + "name": { "type": "string" }, + "longitude": { "$ref": "#/definitions/value_in_degree_or_value_and_unit" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "name" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + }, + + "single_operation": { + "oneOf": [ + { "$ref": "#/definitions/conversion" }, + { "$ref": "#/definitions/transformation" }, + { "$ref": "#/definitions/point_motion_operation" } + ] + }, + + "projected_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", + "enum": ["ProjectedCRS"] }, + "name": { "type": "string" }, + "base_crs": { "$ref": "#/definitions/geodetic_crs" }, + "conversion": { "$ref": "#/definitions/conversion" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "base_crs", "conversion", "coordinate_system" ], + "additionalProperties": false + }, + + "temporal_crs": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["TemporalCRS"] }, + "name": { "type": "string" }, + "datum": { "$ref": "#/definitions/temporal_datum" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "datum" ], + "additionalProperties": false + }, + + "temporal_datum": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["TemporalDatum"] }, + "name": { "type": "string" }, + "calendar": { "type": "string" }, + "time_origin": { "type": "string" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "calendar" ], + "additionalProperties": false + }, + + "temporal_extent": { + "type": "object", + "properties": { + "start": { "type": "string" }, + "end": { "type": "string" } + }, + "required" : [ "start", "end" ], + "additionalProperties": false + }, + + "transformation": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["Transformation"] }, + "name": { "type": "string" }, + "source_crs": { "$ref": "#/definitions/crs" }, + "target_crs": { "$ref": "#/definitions/crs" }, + "interpolation_crs": { "$ref": "#/definitions/crs" }, + "method": { "$ref": "#/definitions/method" }, + "parameters": { + "type": "array", + "items": { "$ref": "#/definitions/parameter_value" } + }, + "accuracy": { "type": "string" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name", "source_crs", "target_crs", "method", "parameters" ], + "additionalProperties": false + }, + + "unit": { + "oneOf": [ + { + "type": "string", + "enum": ["metre", "degree", "unity"] + }, + { + "type": "object", + "properties": { + "type": { "type": "string", + "enum": ["LinearUnit", "AngularUnit", "ScaleUnit", + "TimeUnit", "ParametricUnit", "Unit"] }, + "name": { "type": "string" }, + "conversion_factor": { "type": "number" }, + "id": { "$ref": "#/definitions/id" }, + "ids": { "$ref": "#/definitions/ids" } + }, + "required" : [ "type", "name" ], + "allOf": [ + { "$ref": "#/definitions/id_ids_mutually_exclusive" } + ], + "additionalProperties": false + } + ] + }, + + "usages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "scope": { "type": "string" }, + "area": { "type": "string" }, + "bbox": { "$ref": "#/definitions/bbox" }, + "vertical_extent": { "$ref": "#/definitions/vertical_extent" }, + "temporal_extent": { "$ref": "#/definitions/temporal_extent" } + }, + "additionalProperties": false + } + }, + + "value_and_unit": { + "type": "object", + "properties": { + "value": { "type": "number" }, + "unit": { "$ref": "#/definitions/unit" } + }, + "required" : [ "value", "unit" ], + "additionalProperties": false + }, + + "value_in_degree_or_value_and_unit": { + "oneOf": [ + { "type": "number" }, + { "$ref": "#/definitions/value_and_unit" } + ] + }, + + "value_in_metre_or_value_and_unit": { + "oneOf": [ + { "type": "number" }, + { "$ref": "#/definitions/value_and_unit" } + ] + }, + + "vertical_crs": { + "type": "object", + "properties": { + "type": { "type": "string", "enum": ["VerticalCRS"] }, + "name": { "type": "string" }, + "datum": { + "oneOf": [ + { "$ref": "#/definitions/vertical_reference_frame" }, + { "$ref": "#/definitions/dynamic_vertical_reference_frame" } + ] + }, + "datum_ensemble": { "$ref": "#/definitions/datum_ensemble" }, + "coordinate_system": { "$ref": "#/definitions/coordinate_system" }, + "geoid_model": { "$ref": "#/definitions/geoid_model" }, + "geoid_models": { + "type": "array", + "items": { "$ref": "#/definitions/geoid_model" } + }, + "deformation_models": { + "type": "array", + "items": { "$ref": "#/definitions/deformation_model" } + }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name"], + "description": "One and only one of datum and datum_ensemble must be provided", + "allOf": [ + { "$ref": "#/definitions/object_usage" }, + { "$ref": "#/definitions/one_and_only_one_of_datum_or_datum_ensemble" }, + { + "not": { + "type": "object", + "required": [ "geoid_model", "geoid_models" ] + } + } + ], + "additionalProperties": false + }, + + "vertical_extent": { + "type": "object", + "properties": { + "minimum": { "type": "number" }, + "maximum": { "type": "number" }, + "unit": { "$ref": "#/definitions/unit" } + }, + "required" : [ "minimum", "maximum" ], + "additionalProperties": false + }, + + "vertical_reference_frame": { + "type": "object", + "allOf": [{ "$ref": "#/definitions/object_usage" }], + "properties": { + "type": { "type": "string", "enum": ["VerticalReferenceFrame"] }, + "name": { "type": "string" }, + "anchor": { "type": "string" }, + "anchor_epoch": { "type": "number" }, + "$schema" : {}, + "scope": {}, + "area": {}, + "bbox": {}, + "vertical_extent": {}, + "temporal_extent": {}, + "usages": {}, + "remarks": {}, + "id": {}, "ids": {} + }, + "required" : [ "name" ], + "additionalProperties": false + } + + } +} diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/triangulation.schema.json b/.venv/lib/python3.12/site-packages/fiona/proj_data/triangulation.schema.json new file mode 100644 index 00000000..8203f5d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/triangulation.schema.json @@ -0,0 +1,214 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "Schema for triangulation based transformation", + "type": "object", + "properties": { + "file_type": { + "type": "string", + "enum": [ + "triangulation_file" + ], + "description": "File type. Always \"triangulation_file\"" + }, + "format_version": { + "type": "string", + "enum": [ + "1.0", "1.1" + ] + }, + "name": { + "type": "string", + "description": "A brief descriptive name of the triangulation" + }, + "version": { + "type": "string", + "description": "A string identifying the version of the triangulation. The format for specifying version will be defined by the agency responsible for the triangulation" + }, + "publication_date": { + "$ref": "#/definitions/datetime", + "description": "The date on which this version of the triangulation was published (or possibly the date on which it takes effect?)" + }, + "fallback_strategy": { + "type": "string", + "enum": [ + "none", + "nearest_side", + "nearest_centroid" + ] + }, + "license": { + "type": "string", + "description": "License under which the file is published" + }, + "description": { + "type": "string", + "description": "A text description of the file" + }, + "authority": { + "type": "object", + "description": "Basic information about the agency responsible for the data set", + "properties": { + "name": { + "type": "string", + "description": "The name of the agency" + }, + "url": { + "type": "string", + "description": "The url of the agency website", + "format": "uri" + }, + "address": { + "type": "string", + "description": "The postal address of the agency" + }, + "email": { + "type": "string", + "description": "An email contact address for the agency", + "format": "email" + } + }, + "required": [ + "name" + ], + "additionalProperties": false + }, + "links": { + "type": "array", + "description": "Links to related information", + "items": { + "type": "object", + "properties": { + "href": { + "type": "string", + "description": "The URL holding the information", + "format": "uri" + }, + "rel": { + "type": "string", + "description": "The relationship to the dataset. Proposed relationships are:\n- \"about\": a web page for human consumption describing the model\n- \"source\": the authoritative source data from which the triangulation is built.\n- \"metadata\": ISO 19115 XML metadata regarding the triangulation." + }, + "type": { + "type": "string", + "description": "MIME type" + }, + "title": { + "type": "string", + "description": "Description of the link" + } + }, + "required": [ + "href" + ], + "additionalProperties": false + } + }, + "extent": { + "$ref": "#/definitions/extent", + "description": "Defines the region within which the triangulation is defined. This should be a bounding box defined as an array of [west,south,east,north] coordinate values in a unspecified geographic CRS. This bounding box should be seen as approximate, given that triangulation may be defined with projected coordinates, and also because some triangulations may not cover the whole bounding box." + }, + "input_crs": { + "$ref": "#/definitions/crs", + "description": "String identifying the CRS of source coordinates in the vertices. Typically \"EPSG:XXXX\". If the transformation is for vertical component, this should be the code for a compound CRS (can be EPSG:XXXX+YYYY where XXXX is the code of the horizontal CRS and YYYY the code of the vertical CRS). For example, for the KKJ->ETRS89 transformation, this is EPSG:2393 (\"KKJ / Finland Uniform Coordinate System\"). The input coordinates are assumed to be passed in the \"normalized for visualisation\" / \"GIS friendly\" order, that is longitude, latitude for geographic coordinates and easting, northing for projected coordinates." + }, + "output_crs": { + "$ref": "#/definitions/crs", + "description": "String identifying the CRS of target coordinates in the vertices. Typically \"EPSG:XXXX\". If the transformation is for vertical component, this should be the code for a compound CRS (can be EPSG:XXXX+YYYY where XXXX is the code of the horizontal CRS and YYYY the code of the vertical CRS). For example, for the KKJ->ETRS89 transformation, this is EPSG:3067 (\"ETRS89 / TM35FIN(E,N)\"). The output coordinates will be returned in the \"normalized for visualisation\" / \"GIS friendly\" order, that is easting, that is longitude, latitude for geographic coordinates and easting, northing for projected coordinates." + }, + "transformed_components": { + "type": "array", + "description": "Specify which component of the coordinates are transformed. Either \"horizontal\", \"vertical\" or both", + "minItems": 1, + "maxItems": 2, + "items": { + "type": "string", + "enum": [ + "horizontal", + "vertical" + ] + } + }, + "vertices_columns": { + "type": "array", + "description": "Specify the name of the columns of the rows in the \"vertices\" array. There must be exactly as many elements in \"vertices_columns\" as in a row of \"vertices\". The following names have a special meaning: \"source_x\", \"source_y\", \"target_x\", \"target_y\", \"source_z\", \"target_z\" and \"offset_z\". \"source_x\" and \"source_y\" are compulsory. \"source_x\" is for the source longitude (in degree) or easting. \"source_y\" is for the source latitude (in degree) or northing. \"target_x\" and \"target_y\" are compulsory when \"horizontal\" is specified in \"transformed_components\". (\"source_z\" and \"target_z\") or \"offset_z\" are compulsory when \"vertical\" is specified in \"transformed_components\".", + "minItems": 3, + "items": { + "type": "string" + } + }, + "triangles_columns": { + "type": "array", + "description": "Specify the name of the columns of the rows in the \"triangles\" array. There must be exactly as many elements in \"triangles_columns\" as in a row of \"triangles\". The following names have a special meaning: \"idx_vertex1\", \"idx_vertex2\", \"idx_vertex3\". They are compulsory.", + "minItems": 3, + "items": { + "type": "string" + } + }, + "vertices": { + "type": "array", + "description": "an array whose items are themselves arrays with as many columns as described in \"vertices_columns\"", + "items": { + "type": "array" + } + }, + "triangles": { + "type": "array", + "description": "an array whose items are themselves arrays with as many columns as described in \"triangles_columns\". The value of the \"idx_vertexN\" columns must be indices (between 0 and len(\"vertices\"-1) of items of the \"vertices\" array", + "items": { + "type": "array" + } + } + }, + "required": [ + "file_type", + "format_version", + "transformed_components", + "vertices_columns", + "triangles_columns", + "vertices", + "triangles" + ], + "additionalProperties": false, + "definitions": { + "crs": { + "type": "string" + }, + "datetime": { + "type": "string", + "format": "date-time", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + }, + "extent": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "bbox" + ] + }, + "name" : { + "type": "string", + "description": "Name of the extent (e.g. \"Finland - mainland south of 66°N\")" + }, + "parameters": { + "type": "object", + "properties": { + "bbox": { + "type": "array", + "minItems": 4, + "maxItems": 4, + "items": { + "type": "number" + } + } + } + } + }, + "required": [ + "type", + "parameters" + ], + "additionalProperties": false + } + } +} diff --git a/.venv/lib/python3.12/site-packages/fiona/proj_data/world b/.venv/lib/python3.12/site-packages/fiona/proj_data/world new file mode 100644 index 00000000..9119eed8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/proj_data/world @@ -0,0 +1,214 @@ +# SCCSID @(#)world 1.2 95/08/05 GIE REL +# proj +init files for various non-U.S. coordinate systems. +# + +lastupdate=2016-12-12 + + # Swiss Coordinate System + +proj=somerc +lat_0=46d57'8.660"N +lon_0=7d26'22.500"E + +ellps=bessel +x_0=600000 +y_0=200000 + +k_0=1. no_defs <> + # Laborde grid for Madagascar + proj=labrd ellps=intl lon_0=46d26'13.95E lat_0=18d54S + azi=18d54 k_0=.9995 x_0=400000 y_0=800000 + no_defs <> + # New Zealand Map Grid (NZMG) + proj=nzmg # Projection unique to N.Z. so all factors fixed + no_defs <> +# Secondary grids DMA TM8358.1, p. 4.3 + # British West Indies + proj=tmerc ellps=clrk80 lon_0=62W + x_0=400000 k_0=0.9995 + no_defs <> + # Costa Rica Norte + proj=lcc ellps=clrk66 lat_1=10d28N lon_0=84d20W + x_0=500000 y_0=217820.522 k_0=0.99995696 + no_defs <> + # Costa Rica Sud + proj=lcc ellps=clrk66 lat_1=9dN lon_0=83d40W + x_0=500000 y_0=327987.436 k_0=0.99995696 + no_defs <> + # Cuba Norte + proj=lcc ellps=clrk66 lat_1=22d21N lon_0=81dW + x_0=500000 y_0=280296.016 k_0=0.99993602 + no_defs <> + # Cuba Sud + proj=lcc ellps=clrk66 lat_1=20d43'N lon_0=76d50'W + x_0=500000 y_0=229126.939 k_0=0.99994848 + no_defs <> + # Dominican Republic + proj=lcc ellps=clrk66 lat_1=18d49'N lon_0=71d30'W + x_0=500000 y_0=277063.657 k_0=0.99991102 + no_defs <> + # Egypt + proj=tmerc ellps=intl lon_0=25d30'E x_0=300000 k_0=0.99985 + no_defs <> + # Egypt + proj=tmerc ellps=intl lon_0=28d30'E x_0=300000 k_0=0.99985 + no_defs <> + # Egypt + proj=tmerc ellps=intl lon_0=31d30'E x_0=300000 k_0=0.99985 + no_defs <> + # Egypt + proj=tmerc ellps=intl lon_0=34d30'E x_0=300000 k_0=0.99985 + no_defs <> + # Egypt + proj=tmerc ellps=intl lon_0=37d30'E x_0=300000 k_0=0.99985 + no_defs <> + # El Salvador + proj=lcc ellps=clrk66 lat_1=13d47'N lon_0=89dW + x_0=500000 y_0=295809.184 k_0=0.99996704 + no_defs <> + # Guatemala Norte + proj=lcc ellps=clrk66 lat_1=16d49'N lon_0=90d20'W + x_0=500000 y_0=292209.579 k_0=0.99992226 + no_defs <> + # Guatemala Sud + proj=lcc ellps=clrk66 lat_1=14d54'N lon_0=90d20'W + x_0=500000 y_0=325992.681 k_0=0.99989906 + no_defs <> + # Haiti + proj=lcc ellps=clrk66 lat_1=18d49'N lon_0=71d30'W + x_0=500000 y_0=277063.657 k_0=0.99991102 + no_defs <> + # Honduras Norte + proj=lcc ellps=clrk66 lat_1=15d30'N lon_0=86d10'W + x_0=500000 y_0=296917.439 k_0=0.99993273 + no_defs <> + # Honduras Sud + proj=lcc ellps=clrk66 lat_1=13d47'N lon_0=87d10'W + x_0=500000 y_0=296215.903 k_0=0.99995140 + no_defs <> + # Levant + proj=lcc ellps=clrk66 lat_1=34d39'N lon_0=37d21'E + x_0=500000 y_0=300000 k_0=0.9996256 + no_defs <> + # Nicaragua Norte + proj=lcc ellps=clrk66 lat_1=13d52'N lon_0=85d30'W + x_0=500000 y_0=359891.816 k_0=0.99990314 + no_defs <> + # Nicaragua Sud + proj=lcc ellps=clrk66 lat_1=11d40'N lon_0=85d30'W + x_0=500000 y_0=288876.327 k_0=0.99992228 + no_defs <> + # Northwest Africa + proj=lcc ellps=clrk80 lat_1=34dN lon_0=0dE + x_0=1000000 y_0=500000 k_0=0.99908 + no_defs <> + # Palestine + proj=tmerc a=6378300.79 rf=293.488307656 + lat_0=31d44'2.749"N lon_0=35d12'43.490"E + x_0=170251.555 y_0=126867.909 k_0=1 + no_defs <> + # Panama + proj=lcc ellps=clrk66 lat_1=8d25'N lon_0=80dW + x_0=500000 y_0=294865.303 k_0=0.99989909 + no_defs <> +# other grids in DMA TM8358.1 + # British National Grid + proj=tmerc ellps=airy lat_0=49dN lon_0=2dW + k_0=0.9996012717 x_0=400000 y_0=-100000 + no_defs <> + # West Malaysian RSO Grid + proj=omerc a=6377295.66402 rf=300.8017 alpha=323d01'32.846" + no_uoff rot_conv lonc=102d15E lat_0=4dN k_0=0.99984 x_0=804670.240 y_0=0 + no_defs <> + # India Zone I + proj=lcc ellps=everest lon_0=68E lat_1=32d30'N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # India Zone IIA + proj=lcc ellps=everest lon_0=74E lat_1=26N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # India Zone IIB + proj=lcc ellps=everest lon_0=90E lat_1=26N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # India Zone IIIA + proj=lcc ellps=everest lon_0=80E lat_1=19N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # India Zone IIIB + proj=lcc ellps=everest lon_0=100E lat_1=19N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # India Zone IVA + proj=lcc ellps=everest lon_0=80E lat_1=12N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # India Zone IVB + proj=lcc ellps=everest lon_0=104E lat_1=12N + x_0=2743185.69 y_0=914395.23 k_0=.998786408 + no_defs <> + # Ceylon Belt + proj=tmerc ellps=everest lon_0=80d46'18.160"E lat_0=7d0'1.729"N + x_0=160933.56048 y_0=160933.56048 k_0=1. + no_defs <> + # Irish Transverse Mercator Grid + proj=tmerc ellps=mod_airy lat_0=53d30'N lon_0=8W + x_0=200000 y_0=250000 k_0=1.000035 + no_defs <> + # Netherlands East Indies Equatorial Zone + proj=merc ellps=bessel lon_0=110E + x_0=3900000 y_0=900000 k_0=0.997 + no_defs <> + # Nord Algerie Grid + proj=lcc ellps=clrk80 lon_0=2d42E lat_0=36N + x_0=500000 y_0=300000 k_0=0.999625544 + no_defs <> + # Nord Maroc Grid + proj=lcc ellps=clrk80 lon_0=5d24'W lat_0=33d18'N + x_0=500000 y_0=300000 k_0=0.999625769 + no_defs <> + # Nord Tunisie Grid + proj=lcc ellps=clrk80 lon_0=9d54E lat_0=36N + x_0=500000 y_0=300000 k_0=0.999625544 + no_defs <> + # Sud Algerie Grid + proj=lcc ellps=clrk80 lon_0=2d42E lat_0=33d18'N + x_0=500000 y_0=300000 k_0=0.999625769 + no_defs <> + # Sud Maroc Grid + proj=lcc ellps=clrk80 lon_0=5d24W lat_0=29d42'N + x_0=500000 y_0=300000 k_0=0.999615596 + no_defs <> + # Sud Tunisie Grid + proj=lcc ellps=clrk80 lon_0=9d54'E lat_0=33d18'N + x_0=500000 y_0=300000 k_0=0.999625769 + no_defs <> +# Gauss Krueger Grid for Germany +# +# The first figure of the easting is lon_0 divided by 3 +# ( 2 for 6d0E, 3 for 9d0E, 4 for 12d0E) +# For translations you have to remove this first figure +# and convert northings and eastings from km to meter . +# The other way round, divide by 1000 and add the figure. +# I made 3 entries for the officially used grids in Germany +# +# +# Und nochmal in deutsch : +# Die erste Ziffer des Rechtswerts beschreibt den Hauptmeridian +# und ist dessen Gradzahl geteilt durch 3. +# Zum Umrechnen in Grad muss daher die erste Ziffer des Rechtswertes +# entfernt werden und evt. von km auf Metern umgerechnet werden. +# Zur Umrechnung in Gauss Krueger Koordinaten entsprechend die +# Ziffer fuer den Hauptmeridian vor dem Rechtswert ergaenzen. +# Ich hab fuer alle drei in Deutschland ueblichen Hauptmeridiane +# jeweils einen Eintrag ergaenzt. +# +# +# added by Michael Goepel +# + # Gauss Krueger Grid for Germany + proj=tmerc ellps=bessel lon_0=6d0E lat_0=0 + x_0=500000 + no_defs<> + # Gauss Krueger Grid for Germany + proj=tmerc ellps=bessel lon_0=9d0E lat_0=0 + x_0=500000 + no_defs<> + # Gauss Krueger Grid for Germany + proj=tmerc ellps=bessel lon_0=12d0E lat_0=0 + x_0=500000 + no_defs<> + diff --git a/.venv/lib/python3.12/site-packages/fiona/rfc3339.py b/.venv/lib/python3.12/site-packages/fiona/rfc3339.py new file mode 100644 index 00000000..d736745d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/rfc3339.py @@ -0,0 +1,122 @@ +# Fiona's date and time is founded on RFC 3339. +# +# OGR knows 3 time "zones": GMT, "local time", amd "unknown". Fiona, when +# writing will convert times with a timezone offset to GMT (Z) and otherwise +# will write times with the unknown zone. + +import logging +import re + +log = logging.getLogger("Fiona") + +pattern_date = re.compile(r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)") +pattern_time = re.compile( + r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") +pattern_datetime = re.compile( + r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") + + +class group_accessor: + def __init__(self, m): + self.match = m + + def group(self, i): + try: + return self.match.group(i) or 0 + except IndexError: + return 0 + + +def parse_time(text): + """ Given a time, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + + """ + match = re.search(pattern_time, text) + if match is None: + raise ValueError(f"Time data '{text}' does not match pattern") + g = group_accessor(match) + log.debug("Match groups: %s", match.groups()) + + if g.group(8) == '-': + tz = -1.0 * (int(g.group(9)) * 60 + int(g.group(11))) + elif g.group(8) == '+': + tz = int(g.group(9)) * 60 + int(g.group(11)) + else: + tz = None + + return (0, 0, 0, + int(g.group(1)), + int(g.group(3)), + int(g.group(5)), + int(1000000.0 * float(g.group(6))), + tz + ) + + +def parse_date(text): + """Given a date, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + """ + match = re.search(pattern_date, text) + if match is None: + raise ValueError(f"Time data '{text}' does not match pattern") + g = group_accessor(match) + log.debug("Match groups: %s", match.groups()) + return ( + int(g.group(1)), + int(g.group(3)), + int(g.group(5)), + 0, 0, 0, 0, None) + + +def parse_datetime(text): + """Given a datetime, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + """ + match = re.search(pattern_datetime, text) + if match is None: + raise ValueError(f"Time data '{text}' does not match pattern") + g = group_accessor(match) + log.debug("Match groups: %s", match.groups()) + + if g.group(14) == '-': + tz = -1.0 * (int(g.group(15)) * 60 + int(g.group(17))) + elif g.group(14) == '+': + tz = int(g.group(15)) * 60 + int(g.group(17)) + else: + tz = None + + return ( + int(g.group(1)), + int(g.group(3)), + int(g.group(5)), + int(g.group(7)), + int(g.group(9)), + int(g.group(11)), + int(1000000.0 * float(g.group(12))), + tz) diff --git a/.venv/lib/python3.12/site-packages/fiona/schema.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/fiona/schema.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..9295123f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/fiona/schema.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/fiona/session.py b/.venv/lib/python3.12/site-packages/fiona/session.py new file mode 100644 index 00000000..5e169976 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/session.py @@ -0,0 +1,655 @@ +"""Abstraction for sessions in various clouds.""" + +import logging +import os +import warnings + +from fiona._path import _parse_path, _UnparsedPath + +log = logging.getLogger(__name__) + +try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + import boto3 +except ImportError: + log.debug("Could not import boto3, continuing with reduced functionality.") + boto3 = None + + +class Session: + """Base for classes that configure access to secured resources. + + Attributes + ---------- + credentials : dict + Keys and values for session credentials. + + Notes + ----- + This class is not intended to be instantiated. + + """ + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return NotImplemented + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return NotImplemented + + @staticmethod + def from_foreign_session(session, cls=None): + """Create a session object matching the foreign `session`. + + Parameters + ---------- + session : obj + A foreign session object. + cls : Session class, optional + The class to return. + + Returns + ------- + Session + + """ + if not cls: + return DummySession() + else: + return cls(session) + + @staticmethod + def cls_from_path(path): + """Find the session class suited to the data at `path`. + + Parameters + ---------- + path : str + A dataset path or identifier. + + Returns + ------- + class + + """ + if not path: + return DummySession + + path = _parse_path(path) + + if isinstance(path, _UnparsedPath) or path.is_local: + return DummySession + + elif ( + path.scheme == "s3" or "amazonaws.com" in path.path + ) and "X-Amz-Signature" not in path.path: + if boto3 is not None: + return AWSSession + else: + log.info("boto3 not available, falling back to a DummySession.") + return DummySession + + elif path.scheme == "oss" or "aliyuncs.com" in path.path: + return OSSSession + + elif path.path.startswith("/vsiswift/"): + return SwiftSession + + elif path.scheme == "az": + return AzureSession + + # This factory can be extended to other cloud providers here. + # elif path.scheme == "cumulonimbus": # for example. + # return CumulonimbusSession(*args, **kwargs) + + else: + return DummySession + + @staticmethod + def from_path(path, *args, **kwargs): + """Create a session object suited to the data at `path`. + + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + + Returns + ------- + Session + + """ + return Session.cls_from_path(path)(*args, **kwargs) + + @staticmethod + def aws_or_dummy(*args, **kwargs): + """Create an AWSSession if boto3 is available, else DummySession + + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + + Returns + ------- + Session + """ + if boto3 is not None: + return AWSSession(*args, **kwargs) + else: + return DummySession(*args, **kwargs) + + @staticmethod + def from_environ(*args, **kwargs): + """Create a session object suited to the environment. + + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + + Returns + ------- + Session + """ + try: + session = Session.aws_or_dummy(*args, **kwargs) + session.credentials + except RuntimeError: + log.warning( + "Credentials in environment have expired. Creating a DummySession." + ) + session = DummySession(*args, **kwargs) + return session + + +class DummySession(Session): + """A dummy session. + + Attributes + ---------- + credentials : dict + The session credentials. + + """ + + def __init__(self, *args, **kwargs): + self._session = None + self.credentials = {} + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return True + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {} + + +class AWSSession(Session): + """Configures access to secured resources stored in AWS S3. + """ + + def __init__( + self, + session=None, + aws_unsigned=False, + aws_access_key_id=None, + aws_secret_access_key=None, + aws_session_token=None, + region_name=None, + profile_name=None, + endpoint_url=None, + requester_pays=False, + ): + """Create a new AWS session + + Parameters + ---------- + session : optional + A boto3 session object. + aws_unsigned : bool, optional (default: False) + If True, requests will be unsigned. + aws_access_key_id : str, optional + An access key id, as per boto3. + aws_secret_access_key : str, optional + A secret access key, as per boto3. + aws_session_token : str, optional + A session token, as per boto3. + region_name : str, optional + A region name, as per boto3. + profile_name : str, optional + A shared credentials profile name, as per boto3. + endpoint_url: str, optional + An endpoint_url, as per GDAL's AWS_S3_ENDPOINT + requester_pays : bool, optional + True if the requester agrees to pay transfer costs (default: + False) + + """ + if session: + self._session = session + else: + self._session = boto3.Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + region_name=region_name, + profile_name=profile_name) + + self.requester_pays = requester_pays + self.unsigned = bool(os.getenv("AWS_NO_SIGN_REQUEST", aws_unsigned)) + self.endpoint_url = endpoint_url + self._creds = ( + self._session.get_credentials() + if not self.unsigned and self._session + else None + ) + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}.issubset(config.keys()) + + @property + def credentials(self): + """The session credentials as a dict""" + res = {} + if self._creds: # pragma: no branch + frozen_creds = self._creds.get_frozen_credentials() + if frozen_creds.access_key: # pragma: no branch + res["aws_access_key_id"] = frozen_creds.access_key + if frozen_creds.secret_key: # pragma: no branch + res["aws_secret_access_key"] = frozen_creds.secret_key + if frozen_creds.token: + res["aws_session_token"] = frozen_creds.token + if self._session.region_name: + res["aws_region"] = self._session.region_name + if self.requester_pays: + res["aws_request_payer"] = "requester" + if self.endpoint_url: + res["aws_s3_endpoint"] = self.endpoint_url + return res + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + if self.unsigned: + opts = {"AWS_NO_SIGN_REQUEST": "YES"} + if "aws_region" in self.credentials: + opts["AWS_REGION"] = self.credentials["aws_region"] + return opts + else: + return {k.upper(): v for k, v in self.credentials.items()} + + +class GSSession(Session): + """Configures access to secured resources stored in Google Cloud Storage + """ + def __init__(self, google_application_credentials=None): + """Create new Google Cloud Storage session + + Parameters + ---------- + google_application_credentials: string + Path to the google application credentials JSON file. + + """ + self._creds = {} + if google_application_credentials is not None: + self._creds['google_application_credentials'] = google_application_credentials + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return 'GOOGLE_APPLICATION_CREDENTIALS' in config + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {k.upper(): v for k, v in self.credentials.items()} + + +class OSSSession(Session): + """Configures access to secured resources stored in Alibaba Cloud OSS.""" + + def __init__( + self, oss_access_key_id=None, oss_secret_access_key=None, oss_endpoint=None + ): + """Create new Alibaba Cloud OSS session + + Parameters + ---------- + oss_access_key_id: string, optional (default: None) + An access key id + oss_secret_access_key: string, optional (default: None) + An secret access key + oss_endpoint: string, optional (default: None) + the region attached to the bucket + + """ + self._creds = { + "oss_access_key_id": oss_access_key_id, + "oss_secret_access_key": oss_secret_access_key, + "oss_endpoint": oss_endpoint, + } + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return {"OSS_ACCESS_KEY_ID", "OSS_SECRET_ACCESS_KEY"}.issubset(config.keys()) + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {k.upper(): v for k, v in self.credentials.items()} + + +class SwiftSession(Session): + """Configures access to secured resources stored in OpenStack Swift Object Storage.""" + + def __init__( + self, + session=None, + swift_storage_url=None, + swift_auth_token=None, + swift_auth_v1_url=None, + swift_user=None, + swift_key=None, + ): + """Create new OpenStack Swift Object Storage Session. + + Three methods are possible: + 1. Create session by the swiftclient library. + 2. The SWIFT_STORAGE_URL and SWIFT_AUTH_TOKEN (this method + is recommended by GDAL docs). + 3. The SWIFT_AUTH_V1_URL, SWIFT_USER and SWIFT_KEY (This + depends on the swiftclient library). + + Parameters + ---------- + session: optional + A swiftclient connection object + swift_storage_url: + the storage URL + swift_auth_token: + the value of the x-auth-token authorization token + swift_storage_url: string, optional + authentication URL + swift_user: string, optional + user name to authenticate as + swift_key: string, optional + key/password to authenticate with + + Examples + -------- + >>> import rasterio + >>> from rasterio.session import SwiftSession + >>> fp = '/vsiswift/bucket/key.tif' + >>> conn = Connection( + ... authurl='http://127.0.0.1:7777/auth/v1.0', + ... user='test:tester', + ... key='testing' + ... ) + >>> session = SwiftSession(conn) + >>> with rasterio.Env(session): + >>> with rasterio.open(fp) as src: + >>> print(src.profile) + + """ + if swift_storage_url and swift_auth_token: + self._creds = { + "swift_storage_url": swift_storage_url, + "swift_auth_token": swift_auth_token, + } + else: + from swiftclient.client import Connection + + if session: + self._session = session + else: + self._session = Connection( + authurl=swift_auth_v1_url, user=swift_user, key=swift_key + ) + self._creds = { + "swift_storage_url": self._session.get_auth()[0], + "swift_auth_token": self._session.get_auth()[1], + } + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return {"SWIFT_STORAGE_URL", "SWIFT_AUTH_TOKEN"}.issubset(config.keys()) + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {k.upper(): v for k, v in self.credentials.items()} + + +class AzureSession(Session): + """Configures access to secured resources stored in Microsoft Azure Blob Storage.""" + + def __init__( + self, + azure_storage_connection_string=None, + azure_storage_account=None, + azure_storage_access_key=None, + azure_unsigned=False, + ): + """Create new Microsoft Azure Blob Storage session + + Parameters + ---------- + azure_storage_connection_string: string + A connection string contains both an account name and a secret key. + azure_storage_account: string + An account name + azure_storage_access_key: string + A secret key + azure_unsigned : bool, optional (default: False) + If True, requests will be unsigned. + + """ + self.unsigned = bool(os.getenv("AZURE_NO_SIGN_REQUEST", azure_unsigned)) + self.storage_account = os.getenv("AZURE_STORAGE_ACCOUNT", azure_storage_account) + + if azure_storage_connection_string: + self._creds = { + "azure_storage_connection_string": azure_storage_connection_string + } + elif not self.unsigned: + self._creds = { + "azure_storage_account": self.storage_account, + "azure_storage_access_key": azure_storage_access_key, + } + else: + self._creds = {"azure_storage_account": self.storage_account} + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return ( + "AZURE_STORAGE_CONNECTION_STRING" in config + or {"AZURE_STORAGE_ACCOUNT", "AZURE_STORAGE_ACCESS_KEY"}.issubset( + config.keys() + ) + or {"AZURE_STORAGE_ACCOUNT", "AZURE_NO_SIGN_REQUEST"}.issubset( + config.keys() + ) + ) + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + if self.unsigned: + return { + "AZURE_NO_SIGN_REQUEST": "YES", + "AZURE_STORAGE_ACCOUNT": self.storage_account, + } + else: + return {k.upper(): v for k, v in self.credentials.items()} diff --git a/.venv/lib/python3.12/site-packages/fiona/transform.py b/.venv/lib/python3.12/site-packages/fiona/transform.py new file mode 100644 index 00000000..90a651e8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/transform.py @@ -0,0 +1,124 @@ +"""Coordinate and geometry warping and reprojection""" + +from warnings import warn + +from fiona._transform import _transform, _transform_geom +from fiona.compat import DICT_TYPES +from fiona.errors import FionaDeprecationWarning +from fiona.model import decode_object, Geometry + + +def transform(src_crs, dst_crs, xs, ys): + """Transform coordinates from one reference system to another. + + Parameters + ---------- + src_crs: str or dict + A string like 'EPSG:4326' or a dict of proj4 parameters like + {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0} + representing the coordinate reference system on the "source" + or "from" side of the transformation. + dst_crs: str or dict + A string or dict representing the coordinate reference system + on the "destination" or "to" side of the transformation. + xs: sequence of float + A list or tuple of x coordinate values. Must have the same + length as the ``ys`` parameter. + ys: sequence of float + A list or tuple of y coordinate values. Must have the same + length as the ``xs`` parameter. + + Returns + ------- + xp, yp: list of float + A pair of transformed coordinate sequences. The elements of + ``xp`` and ``yp`` correspond exactly to the elements of the + ``xs`` and ``ys`` input parameters. + + Examples + -------- + + >>> transform('EPSG:4326', 'EPSG:26953', [-105.0], [40.0]) + ([957097.0952383667], [378940.8419189212]) + + """ + # Function is implemented in the _transform C extension module. + return _transform(src_crs, dst_crs, xs, ys) + + +def transform_geom( + src_crs, + dst_crs, + geom, + antimeridian_cutting=False, + antimeridian_offset=10.0, + precision=-1, +): + """Transform a geometry obj from one reference system to another. + + Parameters + ---------- + src_crs: str or dict + A string like 'EPSG:4326' or a dict of proj4 parameters like + {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0} + representing the coordinate reference system on the "source" + or "from" side of the transformation. + dst_crs: str or dict + A string or dict representing the coordinate reference system + on the "destination" or "to" side of the transformation. + geom: obj + A GeoJSON-like geometry object with 'type' and 'coordinates' + members or an iterable of GeoJSON-like geometry objects. + antimeridian_cutting: bool, optional + ``True`` to cut output geometries in two at the antimeridian, + the default is ``False`. + antimeridian_offset: float, optional + A distance in decimal degrees from the antimeridian, outside of + which geometries will not be cut. + precision: int, optional + Round geometry coordinates to this number of decimal places. + This parameter is deprecated and will be removed in 2.0. + + Returns + ------- + obj + A new GeoJSON-like geometry (or a list of GeoJSON-like geometries + if an iterable was given as input) with transformed coordinates. Note + that if the output is at the antimeridian, it may be cut and + of a different geometry ``type`` than the input, e.g., a + polygon input may result in multi-polygon output. + + Examples + -------- + + >>> transform_geom( + ... 'EPSG:4326', 'EPSG:26953', + ... {'type': 'Point', 'coordinates': [-105.0, 40.0]}) + {'type': 'Point', 'coordinates': (957097.0952383667, 378940.8419189212)} + + """ + if precision >= 0: + warn( + "The precision keyword argument is deprecated and will be removed in 2.0", + FionaDeprecationWarning, + ) + + # Function is implemented in the _transform C extension module. + if isinstance(geom, (Geometry,) + DICT_TYPES): + return _transform_geom( + src_crs, + dst_crs, + decode_object(geom), + antimeridian_cutting, + antimeridian_offset, + precision, + ) + else: + return _transform_geom( + src_crs, + dst_crs, + (decode_object(g) for g in geom), + antimeridian_cutting, + antimeridian_offset, + precision, + ) diff --git a/.venv/lib/python3.12/site-packages/fiona/vfs.py b/.venv/lib/python3.12/site-packages/fiona/vfs.py new file mode 100644 index 00000000..96c07421 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fiona/vfs.py @@ -0,0 +1,87 @@ +"""Implementation of Apache VFS schemes and URLs.""" + +import sys +import re +from urllib.parse import urlparse + + +# Supported URI schemes and their mapping to GDAL's VSI suffix. +# TODO: extend for other cloud platforms. +SCHEMES = { + 'ftp': 'curl', + 'gzip': 'gzip', + 'http': 'curl', + 'https': 'curl', + 's3': 's3', + 'tar': 'tar', + 'zip': 'zip', + 'gs': 'gs', +} + +CURLSCHEMES = {k for k, v in SCHEMES.items() if v == 'curl'} + +# TODO: extend for other cloud platforms. +REMOTESCHEMES = {k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')} + + +def valid_vsi(vsi): + """Ensures all parts of our vsi path are valid schemes.""" + return all(p in SCHEMES for p in vsi.split('+')) + + +def is_remote(scheme): + if scheme is None: + return False + return any(p in REMOTESCHEMES for p in scheme.split('+')) + + +def vsi_path(path, vsi=None, archive=None): + # If a VSI and archive file are specified, we convert the path to + # an OGR VSI path (see cpl_vsi.h). + if vsi: + prefix = '/'.join(f'vsi{SCHEMES[p]}' for p in vsi.split('+')) + if archive: + result = f'/{prefix}/{archive}{path}' + else: + result = f'/{prefix}/{path}' + else: + result = path + + return result + + +def parse_paths(uri, vfs=None): + """Parse a URI or Apache VFS URL into its parts + + Returns: tuple + (path, scheme, archive) + """ + archive = scheme = None + path = uri + # Windows drive letters (e.g. "C:\") confuse `urlparse` as they look like + # URL schemes + if sys.platform == "win32" and re.match("^[a-zA-Z]\\:", path): + return path, None, None + if vfs: + parts = urlparse(vfs) + scheme = parts.scheme + archive = parts.path + if parts.netloc and parts.netloc != 'localhost': + archive = parts.netloc + archive + else: + parts = urlparse(path) + scheme = parts.scheme + path = parts.path + if parts.netloc and parts.netloc != 'localhost': + if scheme.split("+")[-1] in CURLSCHEMES: + # We need to deal with cases such as zip+https://server.com/data.zip + path = f"{scheme.split('+')[-1]}://{parts.netloc}{path}" + else: + path = parts.netloc + path + if scheme in SCHEMES: + parts = path.split('!') + path = parts.pop() if parts else None + archive = parts.pop() if parts else None + + scheme = None if not scheme else scheme + return path, scheme, archive diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/AUTHORS b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/AUTHORS new file mode 100644 index 00000000..42a5c227 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/AUTHORS @@ -0,0 +1,51 @@ +Original Authors +---------------- +* Armin Rigo +* Christian Tismer + +Contributors +------------ +* Al Stone +* Alexander Schmidt +* Alexey Borzenkov +* Andreas Schwab +* Armin Ronacher +* Bin Wang +* Bob Ippolito +* ChangBo Guo +* Christoph Gohlke +* Denis Bilenko +* Dirk Mueller +* Donovan Preston +* Fantix King +* Floris Bruynooghe +* Fredrik Fornwall +* Gerd Woetzel +* Giel van Schijndel +* Gökhan Karabulut +* Gustavo Niemeyer +* Guy Rozendorn +* Hye-Shik Chang +* Jared Kuolt +* Jason Madden +* Josh Snyder +* Kyle Ambroff +* Laszlo Boszormenyi +* Mao Han +* Marc Abramowitz +* Marc Schlaich +* Marcin Bachry +* Matt Madison +* Matt Turner +* Michael Ellerman +* Michael Matz +* Ralf Schmitt +* Robie Basak +* Ronny Pfannschmidt +* Samual M. Rushing +* Tony Bowles +* Tony Breeds +* Trevor Bowen +* Tulio Magno Quites Machado Filho +* Ulrich Weigand +* Victor Stinner diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/LICENSE b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/LICENSE new file mode 100644 index 00000000..b73a4a10 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/LICENSE @@ -0,0 +1,30 @@ +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/LICENSE.PSF b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/LICENSE.PSF new file mode 100644 index 00000000..d3b509a2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/LICENSE.PSF @@ -0,0 +1,47 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/METADATA new file mode 100644 index 00000000..15294101 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: greenlet +Version: 3.1.1 +Summary: Lightweight in-process concurrent programming +Home-page: https://greenlet.readthedocs.io/ +Author: Alexey Borzenkov +Author-email: snaury@gmail.com +Maintainer: Jason Madden +Maintainer-email: jason@seecoresoftware.com +License: MIT License +Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues +Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ +Project-URL: Documentation, https://greenlet.readthedocs.io/ +Keywords: greenlet coroutine concurrency threads cooperative +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +License-File: AUTHORS +Provides-Extra: docs +Requires-Dist: Sphinx ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Provides-Extra: test +Requires-Dist: objgraph ; extra == 'test' +Requires-Dist: psutil ; extra == 'test' + +.. This file is included into docs/history.rst + + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are available on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/RECORD new file mode 100644 index 00000000..e869da88 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/RECORD @@ -0,0 +1,122 @@ +../../../include/site/python3.12/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet-3.1.1.dist-info/AUTHORS,sha256=swW28t2knVRxRkaEQNZtO7MP9Sgnompb7B6cNgJM8Gk,849 +greenlet-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +greenlet-3.1.1.dist-info/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 +greenlet-3.1.1.dist-info/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 +greenlet-3.1.1.dist-info/METADATA,sha256=MoOb0T5ZdGbCymjJygO-CkVIAaeI5KcuwE_JPpH1hp4,3830 +greenlet-3.1.1.dist-info/RECORD,, +greenlet-3.1.1.dist-info/WHEEL,sha256=kbc9CtIb3U5F4nd4oqzL_-IwpT6vqgiz5ZydaTuQe_0,152 +greenlet-3.1.1.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 +greenlet/CObjects.cpp,sha256=OPej1bWBgc4sRrTRQ2aFFML9pzDYKlKhlJSjsI0X_eU,3508 +greenlet/PyGreenlet.cpp,sha256=ogWsQ5VhSdItWRLLpWOgSuqYuM3QwQ4cVCxOQIgHx6E,23441 +greenlet/PyGreenlet.hpp,sha256=2ZQlOxYNoy7QwD7mppFoOXe_At56NIsJ0eNsE_hoSsw,1463 +greenlet/PyGreenletUnswitchable.cpp,sha256=PQE0fSZa_IOyUM44IESHkJoD2KtGW3dkhkmZSYY3WHs,4375 +greenlet/PyModule.cpp,sha256=J2TH06dGcNEarioS6NbWXkdME8hJY05XVbdqLrfO5w4,8587 +greenlet/TBrokenGreenlet.cpp,sha256=smN26uC7ahAbNYiS10rtWPjCeTG4jevM8siA2sjJiXg,1021 +greenlet/TExceptionState.cpp,sha256=U7Ctw9fBdNraS0d174MoQW7bN-ae209Ta0JuiKpcpVI,1359 +greenlet/TGreenlet.cpp,sha256=HGYGKpmKYqQ842tASW-QaaV8wua4a5XV_quYKPDsV_Y,25731 +greenlet/TGreenlet.hpp,sha256=mMHcb_rSuozdDiGJjX3GgyYkWgVM4kuO1UgbUP84BlU,27869 +greenlet/TGreenletGlobals.cpp,sha256=YyEmDjKf1g32bsL-unIUScFLnnA1fzLWf2gOMd-D0Zw,3264 +greenlet/TMainGreenlet.cpp,sha256=fvgb8HHB-FVTPEKjR1s_ifCZSpp5D5YQByik0CnIABg,3276 +greenlet/TPythonState.cpp,sha256=FxRdi76lTGXaQKWwkq82VaCfIRdF2Z-fh-TlRTMjYqg,15359 +greenlet/TStackState.cpp,sha256=V444I8Jj9DhQz-9leVW_9dtiSRjaE1NMlgDG02Xxq-Y,7381 +greenlet/TThreadState.hpp,sha256=2Jgg7DtGggMYR_x3CLAvAFf1mIdIDtQvSSItcdmX4ZQ,19131 +greenlet/TThreadStateCreator.hpp,sha256=uYTexDWooXSSgUc5uh-Mhm5BQi3-kR6CqpizvNynBFQ,2610 +greenlet/TThreadStateDestroy.cpp,sha256=wt7lQwLI0mi_JtnZB_jB4bUmfCa5b6nQhA7XOmnI1yk,9568 +greenlet/TUserGreenlet.cpp,sha256=uemg0lwKXtYB0yzmvyYdIIAsKnNkifXM1OJ2OlrFP1A,23553 +greenlet/__init__.py,sha256=OOmvT6_vn_SekdPzkj4qm6hjfikXMmdNZYDmGTOaRNo,1723 +greenlet/__pycache__/__init__.cpython-312.pyc,, +greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so,sha256=qJ2uNRkRvJ2QWGGj33mM0fMxULTqwARAX2w9a20m2VE,1429296 +greenlet/greenlet.cpp,sha256=WdItb1yWL9WNsTqJNf0Iw8ZwDHD49pkDP0rIRGBg2pw,10996 +greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet/greenlet_allocator.hpp,sha256=kxyWW4Qdwlrc7ufgdb5vd6Y7jhauQ699Kod0mqiO1iM,1582 +greenlet/greenlet_compiler_compat.hpp,sha256=nRxpLN9iNbnLVyFDeVmOwyeeNm6scQrOed1l7JQYMCM,4346 +greenlet/greenlet_cpython_add_pending.hpp,sha256=apAwIhGlgYrnYn03zWL6Sxy68kltDeb1e0QupZfb3DQ,6043 +greenlet/greenlet_cpython_compat.hpp,sha256=L_jig3dm2bsJWRazrhlokma2NfnwixoQ0cydshh6ce4,3964 +greenlet/greenlet_exceptions.hpp,sha256=06Bx81DtVaJTa6RtiMcV141b-XHv4ppEgVItkblcLWY,4503 +greenlet/greenlet_internal.hpp,sha256=Ajc-_09W4xWzm9XfyXHAeQAFUgKGKsnJwYsTCoNy3ns,2709 +greenlet/greenlet_refs.hpp,sha256=OnbA91yZf3QHH6-eJccvoNDAaN-pQBMMrclFU1Ot3J4,34436 +greenlet/greenlet_slp_switch.hpp,sha256=kM1QHA2iV-gH4cFyN6lfIagHQxvJZjWOVJdIxRE3TlQ,3198 +greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867 +greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet/platform/__pycache__/__init__.cpython-312.pyc,, +greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 +greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307 +greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671 +greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748 +greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479 +greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892 +greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245 +greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746 +greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398 +greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331 +greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779 +greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 +greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426 +greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860 +greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815 +greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941 +greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759 +greenlet/platform/switch_ppc_macosx.h,sha256=Z6KN_ud0n6nC3ltJrNz2qtvER6vnRAVRNH9mdIDpMxY,2624 +greenlet/platform/switch_ppc_unix.h,sha256=-ZG7MSSPEA5N4qO9PQChtyEJ-Fm6qInhyZm_ZBHTtMg,2652 +greenlet/platform/switch_riscv_unix.h,sha256=Xg0wBen8Je21LWzFtLNLvUUYq6p9n_WY7AUQbiBVyyk,865 +greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763 +greenlet/platform/switch_sh_gcc.h,sha256=mcRJBTu-2UBf4kZtX601qofwuDuy-Y-hnxJtrcaB7do,901 +greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797 +greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509 +greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 +greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 +greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 +greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838 +greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059 +greenlet/slp_platformselect.h,sha256=s-U-BrZ3qwwfI-6W9zWw2rb404OksZYbxYC2w5kSMXM,3841 +greenlet/tests/__init__.py,sha256=cj2-qpMXnlVRLbMLX-rPNNMVJ42ZssdxHd84NSQ3YXw,9246 +greenlet/tests/__pycache__/__init__.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc,, +greenlet/tests/__pycache__/leakcheck.cpython-312.pyc,, +greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc,, +greenlet/tests/__pycache__/test_cpp.cpython-312.pyc,, +greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc,, +greenlet/tests/__pycache__/test_gc.cpython-312.pyc,, +greenlet/tests/__pycache__/test_generator.cpython-312.pyc,, +greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc,, +greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc,, +greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc,, +greenlet/tests/__pycache__/test_leaks.cpython-312.pyc,, +greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc,, +greenlet/tests/__pycache__/test_throw.cpython-312.pyc,, +greenlet/tests/__pycache__/test_tracing.cpython-312.pyc,, +greenlet/tests/__pycache__/test_version.cpython-312.pyc,, +greenlet/tests/__pycache__/test_weakref.cpython-312.pyc,, +greenlet/tests/_test_extension.c,sha256=vkeGA-6oeJcGILsD7oIrT1qZop2GaTOHXiNT7mcSl-0,5773 +greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so,sha256=OcUSkiCS4G35zlL3jz5K6Xsvurgf6lyAlv2J_EpfXns,37168 +greenlet/tests/_test_extension_cpp.cpp,sha256=e0kVnaB8CCaEhE9yHtNyfqTjevsPDKKx-zgxk7PPK48,6565 +greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so,sha256=WNTdn6rjiT8hKKgi5viliSiFim1uKnTC6ENTIFZp4m8,57800 +greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263 +greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985 +greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961 +greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524 +greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956 +greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285 +greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817 +greenlet/tests/leakcheck.py,sha256=inbfM7_oVzd8jIKGxCgo4JqpFZaDAnWPkSULJ8vIE1s,11964 +greenlet/tests/test_contextvars.py,sha256=0n5pR_lbpAppc5wFfK0e1SwYLM-fsSFp72B5_ArLPGE,10348 +greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736 +greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829 +greenlet/tests/test_gc.py,sha256=PCOaRpIyjNnNlDogGL3FZU_lrdXuM-pv1rxeE5TP5mc,2923 +greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240 +greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718 +greenlet/tests/test_greenlet.py,sha256=zoAy56MtEyz5P93Iknpt2pPjNO3ePYrgM7SDE8Cw_uI,45990 +greenlet/tests/test_greenlet_trash.py,sha256=n2dBlQfOoEO1ODatFi8QdhboH3fB86YtqzcYMYOXxbw,7947 +greenlet/tests/test_leaks.py,sha256=wskLqCAvqZ3qTZkam_wXzd-E5zelUjlXS5Ss8KshtZY,17465 +greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446 +greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712 +greenlet/tests/test_tracing.py,sha256=VlwzMU0C1noospZhuUMyB7MHw200emIvGCN_6G2p2ZU,8250 +greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339 +greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883 diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/WHEEL new file mode 100644 index 00000000..a23e9fa7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.1.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_24_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..46725be4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet-3.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +greenlet diff --git a/.venv/lib/python3.12/site-packages/greenlet/CObjects.cpp b/.venv/lib/python3.12/site-packages/greenlet/CObjects.cpp new file mode 100644 index 00000000..c135995b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/CObjects.cpp @@ -0,0 +1,157 @@ +#ifndef COBJECTS_CPP +#define COBJECTS_CPP +/***************************************************************************** + * C interface + * + * These are exported using the CObject API + */ +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +#include "greenlet_exceptions.hpp" + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" + + +#include "TThreadStateDestroy.cpp" + +#include "PyGreenlet.hpp" + +using greenlet::PyErrOccurred; +using greenlet::Require; + + + +extern "C" { +static PyGreenlet* +PyGreenlet_GetCurrent(void) +{ + return GET_THREAD_STATE().state().get_current().relinquish_ownership(); +} + +static int +PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) +{ + return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); +} + +static PyGreenlet* +PyGreenlet_New(PyObject* run, PyGreenlet* parent) +{ + using greenlet::refs::NewDictReference; + // In the past, we didn't use green_new and green_init, but that + // was a maintenance issue because we duplicated code. This way is + // much safer, but slightly slower. If that's a problem, we could + // refactor green_init to separate argument parsing from initialization. + OwnedGreenlet g = OwnedGreenlet::consuming(green_new(&PyGreenlet_Type, nullptr, nullptr)); + if (!g) { + return NULL; + } + + try { + NewDictReference kwargs; + if (run) { + kwargs.SetItem(mod_globs->str_run, run); + } + if (parent) { + kwargs.SetItem("parent", (PyObject*)parent); + } + + Require(green_init(g.borrow(), mod_globs->empty_tuple, kwargs.borrow())); + } + catch (const PyErrOccurred&) { + return nullptr; + } + + return g.relinquish_ownership(); +} + +static PyObject* +PyGreenlet_Switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + + if (args == NULL) { + args = mod_globs->empty_tuple; + } + + if (kwargs == NULL || !PyDict_Check(kwargs)) { + kwargs = NULL; + } + + return green_switch(self, args, kwargs); +} + +static PyObject* +PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return nullptr; + } + try { + PyErrPieces err_pieces(typ, val, tb); + return internal_green_throw(self, err_pieces).relinquish_ownership(); + } + catch (const PyErrOccurred&) { + return nullptr; + } +} + + + +static int +Extern_PyGreenlet_MAIN(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->main(); +} + +static int +Extern_PyGreenlet_ACTIVE(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->active(); +} + +static int +Extern_PyGreenlet_STARTED(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->started(); +} + +static PyGreenlet* +Extern_PyGreenlet_GET_PARENT(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + // This can return NULL even if there is no exception + return self->pimpl->parent().acquire(); +} +} // extern C. + +/** End C API ****************************************************************/ +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/PyGreenlet.cpp b/.venv/lib/python3.12/site-packages/greenlet/PyGreenlet.cpp new file mode 100644 index 00000000..29c0bba0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/PyGreenlet.cpp @@ -0,0 +1,738 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef PYGREENLET_CPP +#define PYGREENLET_CPP +/***************** +The Python slot functions for TGreenlet. + */ + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +#include "TThreadStateDestroy.cpp" +#include "TGreenlet.hpp" +// #include "TUserGreenlet.cpp" +// #include "TMainGreenlet.cpp" +// #include "TBrokenGreenlet.cpp" + + +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" +#include "PyGreenlet.hpp" +// #include "TGreenlet.cpp" + +// #include "TExceptionState.cpp" +// #include "TPythonState.cpp" +// #include "TStackState.cpp" + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + + +static PyGreenlet* +green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) +{ + PyGreenlet* o = + (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); + if (o) { + new UserGreenlet(o, GET_THREAD_STATE().state().borrow_current()); + assert(Py_REFCNT(o) == 1); + } + return o; +} + + +// green_init is used in the tp_init slot. So it's important that +// it can be called directly from CPython. Thus, we don't use +// BorrowedGreenlet and BorrowedObject --- although in theory +// these should be binary layout compatible, that may not be +// guaranteed to be the case (32-bit linux ppc possibly). +static int +green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + PyArgParseParam run; + PyArgParseParam nparent; + static const char* kwlist[] = { + "run", + "parent", + NULL + }; + + // recall: The O specifier does NOT increase the reference count. + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "|OO:green", (char**)kwlist, &run, &nparent)) { + return -1; + } + + if (run) { + if (green_setrun(self, run, NULL)) { + return -1; + } + } + if (nparent && !nparent.is_None()) { + return green_setparent(self, nparent, NULL); + } + return 0; +} + + + +static int +green_traverse(PyGreenlet* self, visitproc visit, void* arg) +{ + // We must only visit referenced objects, i.e. only objects + // Py_INCREF'ed by this greenlet (directly or indirectly): + // + // - stack_prev is not visited: holds previous stack pointer, but it's not + // referenced + // - frames are not visited as we don't strongly reference them; + // alive greenlets are not garbage collected + // anyway. This can be a problem, however, if this greenlet is + // never allowed to finish, and is referenced from the frame: we + // have an uncollectible cycle in that case. Note that the + // frame object itself is also frequently not even tracked by the GC + // starting with Python 3.7 (frames are allocated by the + // interpreter untracked, and only become tracked when their + // evaluation is finished if they have a refcount > 1). All of + // this is to say that we should probably strongly reference + // the frame object. Doing so, while always allowing GC on a + // greenlet, solves several leaks for us. + + Py_VISIT(self->dict); + if (!self->pimpl) { + // Hmm. I have seen this at interpreter shutdown time, + // I think. That's very odd because this doesn't go away until + // we're ``green_dealloc()``, at which point we shouldn't be + // traversed anymore. + return 0; + } + + return self->pimpl->tp_traverse(visit, arg); +} + +static int +green_is_gc(PyObject* _self) +{ + BorrowedGreenlet self(_self); + int result = 0; + /* Main greenlet can be garbage collected since it can only + become unreachable if the underlying thread exited. + Active greenlets --- including those that are suspended --- + cannot be garbage collected, however. + */ + if (self->main() || !self->active()) { + result = 1; + } + // The main greenlet pointer will eventually go away after the thread dies. + if (self->was_running_in_dead_thread()) { + // Our thread is dead! We can never run again. Might as well + // GC us. Note that if a tuple containing only us and other + // immutable objects had been scanned before this, when we + // would have returned 0, the tuple will take itself out of GC + // tracking and never be investigated again. So that could + // result in both us and the tuple leaking due to an + // unreachable/uncollectible reference. The same goes for + // dictionaries. + // + // It's not a great idea to be changing our GC state on the + // fly. + result = 1; + } + return result; +} + + +static int +green_clear(PyGreenlet* self) +{ + /* Greenlet is only cleared if it is about to be collected. + Since active greenlets are not garbage collectable, we can + be sure that, even if they are deallocated during clear, + nothing they reference is in unreachable or finalizers, + so even if it switches we are relatively safe. */ + // XXX: Are we responsible for clearing weakrefs here? + Py_CLEAR(self->dict); + return self->pimpl->tp_clear(); +} + +/** + * Returns 0 on failure (the object was resurrected) or 1 on success. + **/ +static int +_green_dealloc_kill_started_non_main_greenlet(BorrowedGreenlet self) +{ + /* Hacks hacks hacks copied from instance_dealloc() */ + /* Temporarily resurrect the greenlet. */ + assert(self.REFCNT() == 0); + Py_SET_REFCNT(self.borrow(), 1); + /* Save the current exception, if any. */ + PyErrPieces saved_err; + try { + // BY THE TIME WE GET HERE, the state may actually be going + // away + // if we're shutting down the interpreter and freeing thread + // entries, + // this could result in freeing greenlets that were leaked. So + // we can't try to read the state. + self->deallocing_greenlet_in_thread( + self->thread_state() + ? static_cast(GET_THREAD_STATE()) + : nullptr); + } + catch (const PyErrOccurred&) { + PyErr_WriteUnraisable(self.borrow_o()); + /* XXX what else should we do? */ + } + /* Check for no resurrection must be done while we keep + * our internal reference, otherwise PyFile_WriteObject + * causes recursion if using Py_INCREF/Py_DECREF + */ + if (self.REFCNT() == 1 && self->active()) { + /* Not resurrected, but still not dead! + XXX what else should we do? we complain. */ + PyObject* f = PySys_GetObject("stderr"); + Py_INCREF(self.borrow_o()); /* leak! */ + if (f != NULL) { + PyFile_WriteString("GreenletExit did not kill ", f); + PyFile_WriteObject(self.borrow_o(), f, 0); + PyFile_WriteString("\n", f); + } + } + /* Restore the saved exception. */ + saved_err.PyErrRestore(); + /* Undo the temporary resurrection; can't use DECREF here, + * it would cause a recursive call. + */ + assert(self.REFCNT() > 0); + + Py_ssize_t refcnt = self.REFCNT() - 1; + Py_SET_REFCNT(self.borrow_o(), refcnt); + if (refcnt != 0) { + /* Resurrected! */ + _Py_NewReference(self.borrow_o()); + Py_SET_REFCNT(self.borrow_o(), refcnt); + /* Better to use tp_finalizer slot (PEP 442) + * and call ``PyObject_CallFinalizerFromDealloc``, + * but that's only supported in Python 3.4+; see + * Modules/_io/iobase.c for an example. + * + * The following approach is copied from iobase.c in CPython 2.7. + * (along with much of this function in general). Here's their + * comment: + * + * When called from a heap type's dealloc, the type will be + * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). */ + if (PyType_HasFeature(self.TYPE(), Py_TPFLAGS_HEAPTYPE)) { + Py_INCREF(self.TYPE()); + } + + PyObject_GC_Track((PyObject*)self); + + _Py_DEC_REFTOTAL; +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif /* COUNT_ALLOCS */ + return 0; + } + return 1; +} + + +static void +green_dealloc(PyGreenlet* self) +{ + PyObject_GC_UnTrack(self); + BorrowedGreenlet me(self); + if (me->active() + && me->started() + && !me->main()) { + if (!_green_dealloc_kill_started_non_main_greenlet(me)) { + return; + } + } + + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs((PyObject*)self); + } + Py_CLEAR(self->dict); + + if (self->pimpl) { + // In case deleting this, which frees some memory, + // somehow winds up calling back into us. That's usually a + //bug in our code. + Greenlet* p = self->pimpl; + self->pimpl = nullptr; + delete p; + } + // and finally we're done. self is now invalid. + Py_TYPE(self)->tp_free((PyObject*)self); +} + + + +static OwnedObject +internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces) +{ + PyObject* result = nullptr; + err_pieces.PyErrRestore(); + assert(PyErr_Occurred()); + if (self->started() && !self->active()) { + /* dead greenlet: turn GreenletExit into a regular return */ + result = g_handle_exit(OwnedObject()).relinquish_ownership(); + } + self->args() <<= result; + + return single_result(self->g_switch()); +} + + + +PyDoc_STRVAR( + green_switch_doc, + "switch(*args, **kwargs)\n" + "\n" + "Switch execution to this greenlet.\n" + "\n" + "If this greenlet has never been run, then this greenlet\n" + "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" + "\n" + "If the greenlet is active (has been run, but was switch()'ed\n" + "out before leaving its run function), then this greenlet will\n" + "be resumed and the return value to its switch call will be\n" + "None if no arguments are given, the given argument if one\n" + "argument is given, or the args tuple and keyword args dict if\n" + "multiple arguments are given.\n" + "\n" + "If the greenlet is dead, or is the current greenlet then this\n" + "function will simply return the arguments using the same rules as\n" + "above.\n"); + +static PyObject* +green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + using greenlet::SwitchingArgs; + SwitchingArgs switch_args(OwnedObject::owning(args), OwnedObject::owning(kwargs)); + self->pimpl->may_switch_away(); + self->pimpl->args() <<= switch_args; + + // If we're switching out of a greenlet, and that switch is the + // last thing the greenlet does, the greenlet ought to be able to + // go ahead and die at that point. Currently, someone else must + // manually switch back to the greenlet so that we "fall off the + // end" and can perform cleanup. You'd think we'd be able to + // figure out that this is happening using the frame's ``f_lasti`` + // member, which is supposed to be an index into + // ``frame->f_code->co_code``, the bytecode string. However, in + // recent interpreters, ``f_lasti`` tends not to be updated thanks + // to things like the PREDICT() macros in ceval.c. So it doesn't + // really work to do that in many cases. For example, the Python + // code: + // def run(): + // greenlet.getcurrent().parent.switch() + // produces bytecode of len 16, with the actual call to switch() + // being at index 10 (in Python 3.10). However, the reported + // ``f_lasti`` we actually see is...5! (Which happens to be the + // second byte of the CALL_METHOD op for ``getcurrent()``). + + try { + //OwnedObject result = single_result(self->pimpl->g_switch()); + OwnedObject result(single_result(self->pimpl->g_switch())); +#ifndef NDEBUG + // Note that the current greenlet isn't necessarily self. If self + // finished, we went to one of its parents. + assert(!self->pimpl->args()); + + const BorrowedGreenlet& current = GET_THREAD_STATE().state().borrow_current(); + // It's possible it's never been switched to. + assert(!current->args()); +#endif + PyObject* p = result.relinquish_ownership(); + + if (!p && !PyErr_Occurred()) { + // This shouldn't be happening anymore, so the asserts + // are there for debug builds. Non-debug builds + // crash "gracefully" in this case, although there is an + // argument to be made for killing the process in all + // cases --- for this to be the case, our switches + // probably nested in an incorrect way, so the state is + // suspicious. Nothing should be corrupt though, just + // confused at the Python level. Letting this propagate is + // probably good enough. + assert(p || PyErr_Occurred()); + throw PyErrOccurred( + mod_globs->PyExc_GreenletError, + "Greenlet.switch() returned NULL without an exception set." + ); + } + return p; + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + +PyDoc_STRVAR( + green_throw_doc, + "Switches execution to this greenlet, but immediately raises the\n" + "given exception in this greenlet. If no argument is provided, the " + "exception\n" + "defaults to `greenlet.GreenletExit`. The normal exception\n" + "propagation rules apply, as described for `switch`. Note that calling " + "this\n" + "method is almost equivalent to the following::\n" + "\n" + " def raiser():\n" + " raise typ, val, tb\n" + " g_raiser = greenlet(raiser, parent=g)\n" + " g_raiser.switch()\n" + "\n" + "except that this trick does not work for the\n" + "`greenlet.GreenletExit` exception, which would not propagate\n" + "from ``g_raiser`` to ``g``.\n"); + +static PyObject* +green_throw(PyGreenlet* self, PyObject* args) +{ + PyArgParseParam typ(mod_globs->PyExc_GreenletExit); + PyArgParseParam val; + PyArgParseParam tb; + + if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { + return nullptr; + } + + assert(typ.borrow() || val.borrow()); + + self->pimpl->may_switch_away(); + try { + // Both normalizing the error and the actual throw_greenlet + // could throw PyErrOccurred. + PyErrPieces err_pieces(typ.borrow(), val.borrow(), tb.borrow()); + + return internal_green_throw(self, err_pieces).relinquish_ownership(); + } + catch (const PyErrOccurred&) { + return nullptr; + } +} + +static int +green_bool(PyGreenlet* self) +{ + return self->pimpl->active(); +} + +/** + * CAUTION: Allocates memory, may run GC and arbitrary Python code. + */ +static PyObject* +green_getdict(PyGreenlet* self, void* UNUSED(context)) +{ + if (self->dict == NULL) { + self->dict = PyDict_New(); + if (self->dict == NULL) { + return NULL; + } + } + Py_INCREF(self->dict); + return self->dict; +} + +static int +green_setdict(PyGreenlet* self, PyObject* val, void* UNUSED(context)) +{ + PyObject* tmp; + + if (val == NULL) { + PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); + return -1; + } + if (!PyDict_Check(val)) { + PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); + return -1; + } + tmp = self->dict; + Py_INCREF(val); + self->dict = val; + Py_XDECREF(tmp); + return 0; +} + +static bool +_green_not_dead(BorrowedGreenlet self) +{ + // XXX: Where else should we do this? + // Probably on entry to most Python-facing functions? + if (self->was_running_in_dead_thread()) { + self->deactivate_and_free(); + return false; + } + return self->active() || !self->started(); +} + + +static PyObject* +green_getdead(PyGreenlet* self, void* UNUSED(context)) +{ + if (_green_not_dead(self)) { + Py_RETURN_FALSE; + } + else { + Py_RETURN_TRUE; + } +} + +static PyObject* +green_get_stack_saved(PyGreenlet* self, void* UNUSED(context)) +{ + return PyLong_FromSsize_t(self->pimpl->stack_saved()); +} + + +static PyObject* +green_getrun(PyGreenlet* self, void* UNUSED(context)) +{ + try { + OwnedObject result(BorrowedGreenlet(self)->run()); + return result.relinquish_ownership(); + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->run(nrun); + return 0; + } + catch(const PyErrOccurred&) { + return -1; + } +} + +static PyObject* +green_getparent(PyGreenlet* self, void* UNUSED(context)) +{ + return BorrowedGreenlet(self)->parent().acquire_or_None(); +} + + +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->parent(nparent); + } + catch(const PyErrOccurred&) { + return -1; + } + return 0; +} + + +static PyObject* +green_getcontext(const PyGreenlet* self, void* UNUSED(context)) +{ + const Greenlet *const g = self->pimpl; + try { + OwnedObject result(g->context()); + return result.relinquish_ownership(); + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + +static int +green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->context(nctx); + return 0; + } + catch(const PyErrOccurred&) { + return -1; + } +} + + +static PyObject* +green_getframe(PyGreenlet* self, void* UNUSED(context)) +{ + const PythonState::OwnedFrame& top_frame = BorrowedGreenlet(self)->top_frame(); + return top_frame.acquire_or_None(); +} + + +static PyObject* +green_getstate(PyGreenlet* self) +{ + PyErr_Format(PyExc_TypeError, + "cannot serialize '%s' object", + Py_TYPE(self)->tp_name); + return nullptr; +} + +static PyObject* +green_repr(PyGreenlet* _self) +{ + BorrowedGreenlet self(_self); + /* + Return a string like + + + The handling of greenlets across threads is not super good. + We mostly use the internal definitions of these terms, but they + generally should make sense to users as well. + */ + PyObject* result; + int never_started = !self->started() && !self->active(); + + const char* const tp_name = Py_TYPE(self)->tp_name; + + if (_green_not_dead(self)) { + /* XXX: The otid= is almost useless because you can't correlate it to + any thread identifier exposed to Python. We could use + PyThreadState_GET()->thread_id, but we'd need to save that in the + greenlet, or save the whole PyThreadState object itself. + + As it stands, its only useful for identifying greenlets from the same thread. + */ + const char* state_in_thread; + if (self->was_running_in_dead_thread()) { + // The thread it was running in is dead! + // This can happen, especially at interpreter shut down. + // It complicates debugging output because it may be + // impossible to access the current thread state at that + // time. Thus, don't access the current thread state. + state_in_thread = " (thread exited)"; + } + else { + state_in_thread = GET_THREAD_STATE().state().is_current(self) + ? " current" + : (self->started() ? " suspended" : ""); + } + result = PyUnicode_FromFormat( + "<%s object at %p (otid=%p)%s%s%s%s>", + tp_name, + self.borrow_o(), + self->thread_state(), + state_in_thread, + self->active() ? " active" : "", + never_started ? " pending" : " started", + self->main() ? " main" : "" + ); + } + else { + result = PyUnicode_FromFormat( + "<%s object at %p (otid=%p) %sdead>", + tp_name, + self.borrow_o(), + self->thread_state(), + self->was_running_in_dead_thread() + ? "(thread exited) " + : "" + ); + } + + return result; +} + + +static PyMethodDef green_methods[] = { + { + .ml_name="switch", + .ml_meth=reinterpret_cast(green_switch), + .ml_flags=METH_VARARGS | METH_KEYWORDS, + .ml_doc=green_switch_doc + }, + {.ml_name="throw", .ml_meth=(PyCFunction)green_throw, .ml_flags=METH_VARARGS, .ml_doc=green_throw_doc}, + {.ml_name="__getstate__", .ml_meth=(PyCFunction)green_getstate, .ml_flags=METH_NOARGS, .ml_doc=NULL}, + {.ml_name=NULL, .ml_meth=NULL} /* sentinel */ +}; + +static PyGetSetDef green_getsets[] = { + /* name, getter, setter, doc, context pointer */ + {.name="__dict__", .get=(getter)green_getdict, .set=(setter)green_setdict}, + {.name="run", .get=(getter)green_getrun, .set=(setter)green_setrun}, + {.name="parent", .get=(getter)green_getparent, .set=(setter)green_setparent}, + {.name="gr_frame", .get=(getter)green_getframe }, + { + .name="gr_context", + .get=(getter)green_getcontext, + .set=(setter)green_setcontext + }, + {.name="dead", .get=(getter)green_getdead}, + {.name="_stack_saved", .get=(getter)green_get_stack_saved}, + {.name=NULL} +}; + +static PyMemberDef green_members[] = { + {.name=NULL} +}; + +static PyNumberMethods green_as_number = { + .nb_bool=(inquiry)green_bool, +}; + + +PyTypeObject PyGreenlet_Type = { + .ob_base=PyVarObject_HEAD_INIT(NULL, 0) + .tp_name="greenlet.greenlet", /* tp_name */ + .tp_basicsize=sizeof(PyGreenlet), /* tp_basicsize */ + /* methods */ + .tp_dealloc=(destructor)green_dealloc, /* tp_dealloc */ + .tp_repr=(reprfunc)green_repr, /* tp_repr */ + .tp_as_number=&green_as_number, /* tp_as _number*/ + .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + .tp_doc="greenlet(run=None, parent=None) -> greenlet\n\n" + "Creates a new greenlet object (without running it).\n\n" + " - *run* -- The callable to invoke.\n" + " - *parent* -- The parent greenlet. The default is the current " + "greenlet.", /* tp_doc */ + .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ + .tp_clear=(inquiry)green_clear, /* tp_clear */ + .tp_weaklistoffset=offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ + + .tp_methods=green_methods, /* tp_methods */ + .tp_members=green_members, /* tp_members */ + .tp_getset=green_getsets, /* tp_getset */ + .tp_dictoffset=offsetof(PyGreenlet, dict), /* tp_dictoffset */ + .tp_init=(initproc)green_init, /* tp_init */ + .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ + .tp_new=(newfunc)green_new, /* tp_new */ + .tp_free=PyObject_GC_Del, /* tp_free */ + .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ +}; + +#endif + +// Local Variables: +// flycheck-clang-include-path: ("/opt/local/Library/Frameworks/Python.framework/Versions/3.8/include/python3.8") +// End: diff --git a/.venv/lib/python3.12/site-packages/greenlet/PyGreenlet.hpp b/.venv/lib/python3.12/site-packages/greenlet/PyGreenlet.hpp new file mode 100644 index 00000000..df6cd805 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/PyGreenlet.hpp @@ -0,0 +1,35 @@ +#ifndef PYGREENLET_HPP +#define PYGREENLET_HPP + + +#include "greenlet.h" +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" + + +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::BorrowedGreenlet; +using greenlet::refs::BorrowedObject;; +using greenlet::refs::OwnedObject; +using greenlet::refs::PyErrPieces; + + +// XXX: These doesn't really belong here, it's not a Python slot. +static OwnedObject internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces); + +static PyGreenlet* green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)); +static int green_clear(PyGreenlet* self); +static int green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs); +static int green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)); +static int green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)); +static int green_traverse(PyGreenlet* self, visitproc visit, void* arg); +static void green_dealloc(PyGreenlet* self); +static PyObject* green_getparent(PyGreenlet* self, void* UNUSED(context)); + +static int green_is_gc(PyObject* self); +static PyObject* green_getdead(PyGreenlet* self, void* UNUSED(context)); +static PyObject* green_getrun(PyGreenlet* self, void* UNUSED(context)); +static int green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)); +static PyObject* green_getframe(PyGreenlet* self, void* UNUSED(context)); +static PyObject* green_repr(PyGreenlet* self); +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/PyGreenletUnswitchable.cpp b/.venv/lib/python3.12/site-packages/greenlet/PyGreenletUnswitchable.cpp new file mode 100644 index 00000000..1b768ee3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/PyGreenletUnswitchable.cpp @@ -0,0 +1,147 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + Implementation of the Python slots for PyGreenletUnswitchable_Type +*/ +#ifndef PY_GREENLET_UNSWITCHABLE_CPP +#define PY_GREENLET_UNSWITCHABLE_CPP + + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +// Code after this point can assume access to things declared in stdint.h, +// including the fixed-width types. This goes for the platform-specific switch functions +// as well. +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenlet.cpp" +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" + + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + +#include "PyGreenlet.hpp" + +static PyGreenlet* +green_unswitchable_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) +{ + PyGreenlet* o = + (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); + if (o) { + new BrokenGreenlet(o, GET_THREAD_STATE().state().borrow_current()); + assert(Py_REFCNT(o) == 1); + } + return o; +} + +static PyObject* +green_unswitchable_getforce(PyGreenlet* self, void* UNUSED(context)) +{ + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + return PyBool_FromLong(broken->_force_switch_error); +} + +static int +green_unswitchable_setforce(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) +{ + if (!nforce) { + PyErr_SetString( + PyExc_AttributeError, + "Cannot delete force_switch_error" + ); + return -1; + } + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + int is_true = PyObject_IsTrue(nforce); + if (is_true == -1) { + return -1; + } + broken->_force_switch_error = is_true; + return 0; +} + +static PyObject* +green_unswitchable_getforceslp(PyGreenlet* self, void* UNUSED(context)) +{ + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + return PyBool_FromLong(broken->_force_slp_switch_error); +} + +static int +green_unswitchable_setforceslp(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) +{ + if (!nforce) { + PyErr_SetString( + PyExc_AttributeError, + "Cannot delete force_slp_switch_error" + ); + return -1; + } + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + int is_true = PyObject_IsTrue(nforce); + if (is_true == -1) { + return -1; + } + broken->_force_slp_switch_error = is_true; + return 0; +} + +static PyGetSetDef green_unswitchable_getsets[] = { + /* name, getter, setter, doc, closure (context pointer) */ + { + .name="force_switch_error", + .get=(getter)green_unswitchable_getforce, + .set=(setter)green_unswitchable_setforce, + .doc=NULL + }, + { + .name="force_slp_switch_error", + .get=(getter)green_unswitchable_getforceslp, + .set=(setter)green_unswitchable_setforceslp, + .doc=nullptr + }, + {.name=nullptr} +}; + +PyTypeObject PyGreenletUnswitchable_Type = { + .ob_base=PyVarObject_HEAD_INIT(NULL, 0) + .tp_name="greenlet._greenlet.UnswitchableGreenlet", + .tp_dealloc= (destructor)green_dealloc, /* tp_dealloc */ + .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + .tp_doc="Undocumented internal class", /* tp_doc */ + .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ + .tp_clear=(inquiry)green_clear, /* tp_clear */ + + .tp_getset=green_unswitchable_getsets, /* tp_getset */ + .tp_base=&PyGreenlet_Type, /* tp_base */ + .tp_init=(initproc)green_init, /* tp_init */ + .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ + .tp_new=(newfunc)green_unswitchable_new, /* tp_new */ + .tp_free=PyObject_GC_Del, /* tp_free */ + .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ +}; + + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/PyModule.cpp b/.venv/lib/python3.12/site-packages/greenlet/PyModule.cpp new file mode 100644 index 00000000..6adcb5c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/PyModule.cpp @@ -0,0 +1,292 @@ +#ifndef PY_MODULE_CPP +#define PY_MODULE_CPP + +#include "greenlet_internal.hpp" + + +#include "TGreenletGlobals.cpp" +#include "TMainGreenlet.cpp" +#include "TThreadStateDestroy.cpp" + +using greenlet::LockGuard; +using greenlet::ThreadState; + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +# pragma clang diagnostic ignored "-Wunused-variable" +#endif + +PyDoc_STRVAR(mod_getcurrent_doc, + "getcurrent() -> greenlet\n" + "\n" + "Returns the current greenlet (i.e. the one which called this " + "function).\n"); + +static PyObject* +mod_getcurrent(PyObject* UNUSED(module)) +{ + return GET_THREAD_STATE().state().get_current().relinquish_ownership_o(); +} + +PyDoc_STRVAR(mod_settrace_doc, + "settrace(callback) -> object\n" + "\n" + "Sets a new tracing function and returns the previous one.\n"); +static PyObject* +mod_settrace(PyObject* UNUSED(module), PyObject* args) +{ + PyArgParseParam tracefunc; + if (!PyArg_ParseTuple(args, "O", &tracefunc)) { + return NULL; + } + ThreadState& state = GET_THREAD_STATE(); + OwnedObject previous = state.get_tracefunc(); + if (!previous) { + previous = Py_None; + } + + state.set_tracefunc(tracefunc); + + return previous.relinquish_ownership(); +} + +PyDoc_STRVAR(mod_gettrace_doc, + "gettrace() -> object\n" + "\n" + "Returns the currently set tracing function, or None.\n"); + +static PyObject* +mod_gettrace(PyObject* UNUSED(module)) +{ + OwnedObject tracefunc = GET_THREAD_STATE().state().get_tracefunc(); + if (!tracefunc) { + tracefunc = Py_None; + } + return tracefunc.relinquish_ownership(); +} + + + +PyDoc_STRVAR(mod_set_thread_local_doc, + "set_thread_local(key, value) -> None\n" + "\n" + "Set a value in the current thread-local dictionary. Debugging only.\n"); + +static PyObject* +mod_set_thread_local(PyObject* UNUSED(module), PyObject* args) +{ + PyArgParseParam key; + PyArgParseParam value; + PyObject* result = NULL; + + if (PyArg_UnpackTuple(args, "set_thread_local", 2, 2, &key, &value)) { + if(PyDict_SetItem( + PyThreadState_GetDict(), // borrow + key, + value) == 0 ) { + // success + Py_INCREF(Py_None); + result = Py_None; + } + } + return result; +} + +PyDoc_STRVAR(mod_get_pending_cleanup_count_doc, + "get_pending_cleanup_count() -> Integer\n" + "\n" + "Get the number of greenlet cleanup operations pending. Testing only.\n"); + + +static PyObject* +mod_get_pending_cleanup_count(PyObject* UNUSED(module)) +{ + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + return PyLong_FromSize_t(mod_globs->thread_states_to_destroy.size()); +} + +PyDoc_STRVAR(mod_get_total_main_greenlets_doc, + "get_total_main_greenlets() -> Integer\n" + "\n" + "Quickly return the number of main greenlets that exist. Testing only.\n"); + +static PyObject* +mod_get_total_main_greenlets(PyObject* UNUSED(module)) +{ + return PyLong_FromSize_t(G_TOTAL_MAIN_GREENLETS); +} + + + +PyDoc_STRVAR(mod_get_clocks_used_doing_optional_cleanup_doc, + "get_clocks_used_doing_optional_cleanup() -> Integer\n" + "\n" + "Get the number of clock ticks the program has used doing optional " + "greenlet cleanup.\n" + "Beginning in greenlet 2.0, greenlet tries to find and dispose of greenlets\n" + "that leaked after a thread exited. This requires invoking Python's garbage collector,\n" + "which may have a performance cost proportional to the number of live objects.\n" + "This function returns the amount of processor time\n" + "greenlet has used to do this. In programs that run with very large amounts of live\n" + "objects, this metric can be used to decide whether the cost of doing this cleanup\n" + "is worth the memory leak being corrected. If not, you can disable the cleanup\n" + "using ``enable_optional_cleanup(False)``.\n" + "The units are arbitrary and can only be compared to themselves (similarly to ``time.clock()``);\n" + "for example, to see how it scales with your heap. You can attempt to convert them into seconds\n" + "by dividing by the value of CLOCKS_PER_SEC." + "If cleanup has been disabled, returns None." + "\n" + "This is an implementation specific, provisional API. It may be changed or removed\n" + "in the future.\n" + ".. versionadded:: 2.0" + ); +static PyObject* +mod_get_clocks_used_doing_optional_cleanup(PyObject* UNUSED(module)) +{ + std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); + + if (clocks == std::clock_t(-1)) { + Py_RETURN_NONE; + } + // This might not actually work on some implementations; clock_t + // is an opaque type. + return PyLong_FromSsize_t(clocks); +} + +PyDoc_STRVAR(mod_enable_optional_cleanup_doc, + "mod_enable_optional_cleanup(bool) -> None\n" + "\n" + "Enable or disable optional cleanup operations.\n" + "See ``get_clocks_used_doing_optional_cleanup()`` for details.\n" + ); +static PyObject* +mod_enable_optional_cleanup(PyObject* UNUSED(module), PyObject* flag) +{ + int is_true = PyObject_IsTrue(flag); + if (is_true == -1) { + return nullptr; + } + + std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); + if (is_true) { + // If we already have a value, we don't want to lose it. + if (clocks == std::clock_t(-1)) { + clocks = 0; + } + } + else { + clocks = std::clock_t(-1); + } + Py_RETURN_NONE; +} + + + + +#if !GREENLET_PY313 +PyDoc_STRVAR(mod_get_tstate_trash_delete_nesting_doc, + "get_tstate_trash_delete_nesting() -> Integer\n" + "\n" + "Return the 'trash can' nesting level. Testing only.\n"); +static PyObject* +mod_get_tstate_trash_delete_nesting(PyObject* UNUSED(module)) +{ + PyThreadState* tstate = PyThreadState_GET(); + +#if GREENLET_PY312 + return PyLong_FromLong(tstate->trash.delete_nesting); +#else + return PyLong_FromLong(tstate->trash_delete_nesting); +#endif +} +#endif + + + + +static PyMethodDef GreenMethods[] = { + { + .ml_name="getcurrent", + .ml_meth=(PyCFunction)mod_getcurrent, + .ml_flags=METH_NOARGS, + .ml_doc=mod_getcurrent_doc + }, + { + .ml_name="settrace", + .ml_meth=(PyCFunction)mod_settrace, + .ml_flags=METH_VARARGS, + .ml_doc=mod_settrace_doc + }, + { + .ml_name="gettrace", + .ml_meth=(PyCFunction)mod_gettrace, + .ml_flags=METH_NOARGS, + .ml_doc=mod_gettrace_doc + }, + { + .ml_name="set_thread_local", + .ml_meth=(PyCFunction)mod_set_thread_local, + .ml_flags=METH_VARARGS, + .ml_doc=mod_set_thread_local_doc + }, + { + .ml_name="get_pending_cleanup_count", + .ml_meth=(PyCFunction)mod_get_pending_cleanup_count, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_pending_cleanup_count_doc + }, + { + .ml_name="get_total_main_greenlets", + .ml_meth=(PyCFunction)mod_get_total_main_greenlets, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_total_main_greenlets_doc + }, + { + .ml_name="get_clocks_used_doing_optional_cleanup", + .ml_meth=(PyCFunction)mod_get_clocks_used_doing_optional_cleanup, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_clocks_used_doing_optional_cleanup_doc + }, + { + .ml_name="enable_optional_cleanup", + .ml_meth=(PyCFunction)mod_enable_optional_cleanup, + .ml_flags=METH_O, + .ml_doc=mod_enable_optional_cleanup_doc + }, +#if !GREENLET_PY313 + { + .ml_name="get_tstate_trash_delete_nesting", + .ml_meth=(PyCFunction)mod_get_tstate_trash_delete_nesting, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_tstate_trash_delete_nesting_doc + }, +#endif + {.ml_name=NULL, .ml_meth=NULL} /* Sentinel */ +}; + +static const char* const copy_on_greentype[] = { + "getcurrent", + "error", + "GreenletExit", + "settrace", + "gettrace", + NULL +}; + +static struct PyModuleDef greenlet_module_def = { + .m_base=PyModuleDef_HEAD_INIT, + .m_name="greenlet._greenlet", + .m_doc=NULL, + .m_size=-1, + .m_methods=GreenMethods, +}; + + +#endif + +#ifdef __clang__ +# pragma clang diagnostic pop +#elif defined(__GNUC__) +# pragma GCC diagnostic pop +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/TBrokenGreenlet.cpp b/.venv/lib/python3.12/site-packages/greenlet/TBrokenGreenlet.cpp new file mode 100644 index 00000000..7e9ab5be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TBrokenGreenlet.cpp @@ -0,0 +1,45 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::UserGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ + +#include "TGreenlet.hpp" + +namespace greenlet { + +void* BrokenGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void BrokenGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + +greenlet::PythonAllocator greenlet::BrokenGreenlet::allocator; + +bool +BrokenGreenlet::force_slp_switch_error() const noexcept +{ + return this->_force_slp_switch_error; +} + +UserGreenlet::switchstack_result_t BrokenGreenlet::g_switchstack(void) +{ + if (this->_force_switch_error) { + return switchstack_result_t(-1); + } + return UserGreenlet::g_switchstack(); +} + +}; //namespace greenlet diff --git a/.venv/lib/python3.12/site-packages/greenlet/TExceptionState.cpp b/.venv/lib/python3.12/site-packages/greenlet/TExceptionState.cpp new file mode 100644 index 00000000..08a94ae8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TExceptionState.cpp @@ -0,0 +1,62 @@ +#ifndef GREENLET_EXCEPTION_STATE_CPP +#define GREENLET_EXCEPTION_STATE_CPP + +#include +#include "TGreenlet.hpp" + +namespace greenlet { + + +ExceptionState::ExceptionState() +{ + this->clear(); +} + +void ExceptionState::operator<<(const PyThreadState *const tstate) noexcept +{ + this->exc_info = tstate->exc_info; + this->exc_state = tstate->exc_state; +} + +void ExceptionState::operator>>(PyThreadState *const tstate) noexcept +{ + tstate->exc_state = this->exc_state; + tstate->exc_info = + this->exc_info ? this->exc_info : &tstate->exc_state; + this->clear(); +} + +void ExceptionState::clear() noexcept +{ + this->exc_info = nullptr; + this->exc_state.exc_value = nullptr; +#if !GREENLET_PY311 + this->exc_state.exc_type = nullptr; + this->exc_state.exc_traceback = nullptr; +#endif + this->exc_state.previous_item = nullptr; +} + +int ExceptionState::tp_traverse(visitproc visit, void* arg) noexcept +{ + Py_VISIT(this->exc_state.exc_value); +#if !GREENLET_PY311 + Py_VISIT(this->exc_state.exc_type); + Py_VISIT(this->exc_state.exc_traceback); +#endif + return 0; +} + +void ExceptionState::tp_clear() noexcept +{ + Py_CLEAR(this->exc_state.exc_value); +#if !GREENLET_PY311 + Py_CLEAR(this->exc_state.exc_type); + Py_CLEAR(this->exc_state.exc_traceback); +#endif +} + + +}; // namespace greenlet + +#endif // GREENLET_EXCEPTION_STATE_CPP diff --git a/.venv/lib/python3.12/site-packages/greenlet/TGreenlet.cpp b/.venv/lib/python3.12/site-packages/greenlet/TGreenlet.cpp new file mode 100644 index 00000000..4698a178 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TGreenlet.cpp @@ -0,0 +1,718 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::Greenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef TGREENLET_CPP +#define TGREENLET_CPP +#include "greenlet_internal.hpp" +#include "TGreenlet.hpp" + + +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" + +namespace greenlet { + +Greenlet::Greenlet(PyGreenlet* p) + : Greenlet(p, StackState()) +{ +} + +Greenlet::Greenlet(PyGreenlet* p, const StackState& initial_stack) + : _self(p), stack_state(initial_stack) +{ + assert(p->pimpl == nullptr); + p->pimpl = this; +} + +Greenlet::~Greenlet() +{ + // XXX: Can't do this. tp_clear is a virtual function, and by the + // time we're here, we've sliced off our child classes. + //this->tp_clear(); + this->_self->pimpl = nullptr; +} + +bool +Greenlet::force_slp_switch_error() const noexcept +{ + return false; +} + +void +Greenlet::release_args() +{ + this->switch_args.CLEAR(); +} + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +Greenlet::throw_GreenletExit_during_dealloc(const ThreadState& UNUSED(current_thread_state)) +{ + // If we're killed because we lost all references in the + // middle of a switch, that's ok. Don't reset the args/kwargs, + // we still want to pass them to the parent. + PyErr_SetString(mod_globs->PyExc_GreenletExit, + "Killing the greenlet because all references have vanished."); + // To get here it had to have run before + return this->g_switch(); +} + +inline void +Greenlet::slp_restore_state() noexcept +{ +#ifdef SLP_BEFORE_RESTORE_STATE + SLP_BEFORE_RESTORE_STATE(); +#endif + this->stack_state.copy_heap_to_stack( + this->thread_state()->borrow_current()->stack_state); +} + + +inline int +Greenlet::slp_save_state(char *const stackref) noexcept +{ + // XXX: This used to happen in the middle, before saving, but + // after finding the next owner. Does that matter? This is + // only defined for Sparc/GCC where it flushes register + // windows to the stack (I think) +#ifdef SLP_BEFORE_SAVE_STATE + SLP_BEFORE_SAVE_STATE(); +#endif + return this->stack_state.copy_stack_to_heap(stackref, + this->thread_state()->borrow_current()->stack_state); +} + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +Greenlet::on_switchstack_or_initialstub_failure( + Greenlet* target, + const Greenlet::switchstack_result_t& err, + const bool target_was_me, + const bool was_initial_stub) +{ + // If we get here, either g_initialstub() + // failed, or g_switchstack() failed. Either one of those + // cases SHOULD leave us in the original greenlet with a valid stack. + if (!PyErr_Occurred()) { + PyErr_SetString( + PyExc_SystemError, + was_initial_stub + ? "Failed to switch stacks into a greenlet for the first time." + : "Failed to switch stacks into a running greenlet."); + } + this->release_args(); + + if (target && !target_was_me) { + target->murder_in_place(); + } + + assert(!err.the_new_current_greenlet); + assert(!err.origin_greenlet); + return OwnedObject(); + +} + +OwnedGreenlet +Greenlet::g_switchstack_success() noexcept +{ + PyThreadState* tstate = PyThreadState_GET(); + // restore the saved state + this->python_state >> tstate; + this->exception_state >> tstate; + + // The thread state hasn't been changed yet. + ThreadState* thread_state = this->thread_state(); + OwnedGreenlet result(thread_state->get_current()); + thread_state->set_current(this->self()); + //assert(thread_state->borrow_current().borrow() == this->_self); + return result; +} + +Greenlet::switchstack_result_t +Greenlet::g_switchstack(void) +{ + // if any of these assertions fail, it's likely because we + // switched away and tried to switch back to us. Early stages of + // switching are not reentrant because we re-use ``this->args()``. + // Switching away would happen if we trigger a garbage collection + // (by just using some Python APIs that happen to allocate Python + // objects) and some garbage had weakref callbacks or __del__ that + // switches (people don't write code like that by hand, but with + // gevent it's possible without realizing it) + assert(this->args() || PyErr_Occurred()); + { /* save state */ + if (this->thread_state()->is_current(this->self())) { + // Hmm, nothing to do. + // TODO: Does this bypass trace events that are + // important? + return switchstack_result_t(0, + this, this->thread_state()->borrow_current()); + } + BorrowedGreenlet current = this->thread_state()->borrow_current(); + PyThreadState* tstate = PyThreadState_GET(); + + current->python_state << tstate; + current->exception_state << tstate; + this->python_state.will_switch_from(tstate); + switching_thread_state = this; + current->expose_frames(); + } + assert(this->args() || PyErr_Occurred()); + // If this is the first switch into a greenlet, this will + // return twice, once with 1 in the new greenlet, once with 0 + // in the origin. + int err; + if (this->force_slp_switch_error()) { + err = -1; + } + else { + err = slp_switch(); + } + + if (err < 0) { /* error */ + // Tested by + // test_greenlet.TestBrokenGreenlets.test_failed_to_slp_switch_into_running + // + // It's not clear if it's worth trying to clean up and + // continue here. Failing to switch stacks is a big deal which + // may not be recoverable (who knows what state the stack is in). + // Also, we've stolen references in preparation for calling + // ``g_switchstack_success()`` and we don't have a clean + // mechanism for backing that all out. + Py_FatalError("greenlet: Failed low-level slp_switch(). The stack is probably corrupt."); + } + + // No stack-based variables are valid anymore. + + // But the global is volatile so we can reload it without the + // compiler caching it from earlier. + Greenlet* greenlet_that_switched_in = switching_thread_state; // aka this + switching_thread_state = nullptr; + // except that no stack variables are valid, we would: + // assert(this == greenlet_that_switched_in); + + // switchstack success is where we restore the exception state, + // etc. It returns the origin greenlet because its convenient. + + OwnedGreenlet origin = greenlet_that_switched_in->g_switchstack_success(); + assert(greenlet_that_switched_in->args() || PyErr_Occurred()); + return switchstack_result_t(err, greenlet_that_switched_in, origin); +} + + +inline void +Greenlet::check_switch_allowed() const +{ + // TODO: Make this take a parameter of the current greenlet, + // or current main greenlet, to make the check for + // cross-thread switching cheaper. Surely somewhere up the + // call stack we've already accessed the thread local variable. + + // We expect to always have a main greenlet now; accessing the thread state + // created it. However, if we get here and cleanup has already + // begun because we're a greenlet that was running in a + // (now dead) thread, these invariants will not hold true. In + // fact, accessing `this->thread_state` may not even be possible. + + // If the thread this greenlet was running in is dead, + // we'll still have a reference to a main greenlet, but the + // thread state pointer we have is bogus. + // TODO: Give the objects an API to determine if they belong + // to a dead thread. + + const BorrowedMainGreenlet main_greenlet = this->find_main_greenlet_in_lineage(); + + if (!main_greenlet) { + throw PyErrOccurred(mod_globs->PyExc_GreenletError, + "cannot switch to a garbage collected greenlet"); + } + + if (!main_greenlet->thread_state()) { + throw PyErrOccurred(mod_globs->PyExc_GreenletError, + "cannot switch to a different thread (which happens to have exited)"); + } + + // The main greenlet we found was from the .parent lineage. + // That may or may not have any relationship to the main + // greenlet of the running thread. We can't actually access + // our this->thread_state members to try to check that, + // because it could be in the process of getting destroyed, + // but setting the main_greenlet->thread_state member to NULL + // may not be visible yet. So we need to check against the + // current thread state (once the cheaper checks are out of + // the way) + const BorrowedMainGreenlet current_main_greenlet = GET_THREAD_STATE().state().borrow_main_greenlet(); + if ( + // lineage main greenlet is not this thread's greenlet + current_main_greenlet != main_greenlet + || ( + // atteched to some thread + this->main_greenlet() + // XXX: Same condition as above. Was this supposed to be + // this->main_greenlet()? + && current_main_greenlet != main_greenlet) + // switching into a known dead thread (XXX: which, if we get here, + // is bad, because we just accessed the thread state, which is + // gone!) + || (!current_main_greenlet->thread_state())) { + // CAUTION: This may trigger memory allocations, gc, and + // arbitrary Python code. + throw PyErrOccurred( + mod_globs->PyExc_GreenletError, + "Cannot switch to a different thread\n\tCurrent: %R\n\tExpected: %R", + current_main_greenlet, main_greenlet); + } +} + +const OwnedObject +Greenlet::context() const +{ + using greenlet::PythonStateContext; + OwnedObject result; + + if (this->is_currently_running_in_some_thread()) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (GET_THREAD_STATE().state().is_current(this->self())) { + result = PythonStateContext::context(PyThreadState_GET()); + } + else { + throw ValueError( + "cannot get context of a " + "greenlet that is running in a different thread"); + } + } + else { + /* Greenlet is not running: just return context. */ + result = this->python_state.context(); + } + if (!result) { + result = OwnedObject::None(); + } + return result; +} + + +void +Greenlet::context(BorrowedObject given) +{ + using greenlet::PythonStateContext; + if (!given) { + throw AttributeError("can't delete context attribute"); + } + if (given.is_None()) { + /* "Empty context" is stored as NULL, not None. */ + given = nullptr; + } + + //checks type, incrs refcnt + greenlet::refs::OwnedContext context(given); + PyThreadState* tstate = PyThreadState_GET(); + + if (this->is_currently_running_in_some_thread()) { + if (!GET_THREAD_STATE().state().is_current(this->self())) { + throw ValueError("cannot set context of a greenlet" + " that is running in a different thread"); + } + + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + OwnedObject octx = OwnedObject::consuming(PythonStateContext::context(tstate)); + PythonStateContext::context(tstate, context.relinquish_ownership()); + } + else { + /* Greenlet is not running: just set context. Note that the + greenlet may be dead.*/ + this->python_state.context() = context; + } +} + +/** + * CAUTION: May invoke arbitrary Python code. + * + * Figure out what the result of ``greenlet.switch(arg, kwargs)`` + * should be and transfers ownership of it to the left-hand-side. + * + * If switch() was just passed an arg tuple, then we'll just return that. + * If only keyword arguments were passed, then we'll pass the keyword + * argument dict. Otherwise, we'll create a tuple of (args, kwargs) and + * return both. + * + * CAUTION: This may allocate a new tuple object, which may + * cause the Python garbage collector to run, which in turn may + * run arbitrary Python code that switches. + */ +OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept +{ + // Because this may invoke arbitrary Python code, which could + // result in switching back to us, we need to get the + // arguments locally on the stack. + assert(rhs); + OwnedObject args = rhs.args(); + OwnedObject kwargs = rhs.kwargs(); + rhs.CLEAR(); + // We shouldn't be called twice for the same switch. + assert(args || kwargs); + assert(!rhs); + + if (!kwargs) { + lhs = args; + } + else if (!PyDict_Size(kwargs.borrow())) { + lhs = args; + } + else if (!PySequence_Length(args.borrow())) { + lhs = kwargs; + } + else { + // PyTuple_Pack allocates memory, may GC, may run arbitrary + // Python code. + lhs = OwnedObject::consuming(PyTuple_Pack(2, args.borrow(), kwargs.borrow())); + } + return lhs; +} + +static OwnedObject +g_handle_exit(const OwnedObject& greenlet_result) +{ + if (!greenlet_result && mod_globs->PyExc_GreenletExit.PyExceptionMatches()) { + /* catch and ignore GreenletExit */ + PyErrFetchParam val; + PyErr_Fetch(PyErrFetchParam(), val, PyErrFetchParam()); + if (!val) { + return OwnedObject::None(); + } + return OwnedObject(val); + } + + if (greenlet_result) { + // package the result into a 1-tuple + // PyTuple_Pack increments the reference of its arguments, + // so we always need to decref the greenlet result; + // the owner will do that. + return OwnedObject::consuming(PyTuple_Pack(1, greenlet_result.borrow())); + } + + return OwnedObject(); +} + + + +/** + * May run arbitrary Python code. + */ +OwnedObject +Greenlet::g_switch_finish(const switchstack_result_t& err) +{ + assert(err.the_new_current_greenlet == this); + + ThreadState& state = *this->thread_state(); + // Because calling the trace function could do arbitrary things, + // including switching away from this greenlet and then maybe + // switching back, we need to capture the arguments now so that + // they don't change. + OwnedObject result; + if (this->args()) { + result <<= this->args(); + } + else { + assert(PyErr_Occurred()); + } + assert(!this->args()); + try { + // Our only caller handles the bad error case + assert(err.status >= 0); + assert(state.borrow_current() == this->self()); + if (OwnedObject tracefunc = state.get_tracefunc()) { + assert(result || PyErr_Occurred()); + g_calltrace(tracefunc, + result ? mod_globs->event_switch : mod_globs->event_throw, + err.origin_greenlet, + this->self()); + } + // The above could have invoked arbitrary Python code, but + // it couldn't switch back to this object and *also* + // throw an exception, so the args won't have changed. + + if (PyErr_Occurred()) { + // We get here if we fell of the end of the run() function + // raising an exception. The switch itself was + // successful, but the function raised. + // valgrind reports that memory allocated here can still + // be reached after a test run. + throw PyErrOccurred::from_current(); + } + return result; + } + catch (const PyErrOccurred&) { + /* Turn switch errors into switch throws */ + /* Turn trace errors into switch throws */ + this->release_args(); + throw; + } +} + +void +Greenlet::g_calltrace(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const BorrowedGreenlet& origin, + const BorrowedGreenlet& target) +{ + PyErrPieces saved_exc; + try { + TracingGuard tracing_guard; + // TODO: We have saved the active exception (if any) that's + // about to be raised. In the 'throw' case, we could provide + // the exception to the tracefunction, which seems very helpful. + tracing_guard.CallTraceFunction(tracefunc, event, origin, target); + } + catch (const PyErrOccurred&) { + // In case of exceptions trace function is removed, + // and any existing exception is replaced with the tracing + // exception. + GET_THREAD_STATE().state().set_tracefunc(Py_None); + throw; + } + + saved_exc.PyErrRestore(); + assert( + (event == mod_globs->event_throw && PyErr_Occurred()) + || (event == mod_globs->event_switch && !PyErr_Occurred()) + ); +} + +void +Greenlet::murder_in_place() +{ + if (this->active()) { + assert(!this->is_currently_running_in_some_thread()); + this->deactivate_and_free(); + } +} + +inline void +Greenlet::deactivate_and_free() +{ + if (!this->active()) { + return; + } + // Throw away any saved stack. + this->stack_state = StackState(); + assert(!this->stack_state.active()); + // Throw away any Python references. + // We're holding a borrowed reference to the last + // frame we executed. Since we borrowed it, the + // normal traversal, clear, and dealloc functions + // ignore it, meaning it leaks. (The thread state + // object can't find it to clear it when that's + // deallocated either, because by definition if we + // got an object on this list, it wasn't + // running and the thread state doesn't have + // this frame.) + // So here, we *do* clear it. + this->python_state.tp_clear(true); +} + +bool +Greenlet::belongs_to_thread(const ThreadState* thread_state) const +{ + if (!this->thread_state() // not running anywhere, or thread + // exited + || !thread_state) { // same, or there is no thread state. + return false; + } + return true; +} + + +void +Greenlet::deallocing_greenlet_in_thread(const ThreadState* current_thread_state) +{ + /* Cannot raise an exception to kill the greenlet if + it is not running in the same thread! */ + if (this->belongs_to_thread(current_thread_state)) { + assert(current_thread_state); + // To get here it had to have run before + /* Send the greenlet a GreenletExit exception. */ + + // We don't care about the return value, only whether an + // exception happened. + this->throw_GreenletExit_during_dealloc(*current_thread_state); + return; + } + + // Not the same thread! Temporarily save the greenlet + // into its thread's deleteme list, *if* it exists. + // If that thread has already exited, and processed its pending + // cleanup, we'll never be able to clean everything up: we won't + // be able to raise an exception. + // That's mostly OK! Since we can't add it to a list, our refcount + // won't increase, and we'll go ahead with the DECREFs later. + ThreadState *const thread_state = this->thread_state(); + if (thread_state) { + thread_state->delete_when_thread_running(this->self()); + } + else { + // The thread is dead, we can't raise an exception. + // We need to make it look non-active, though, so that dealloc + // finishes killing it. + this->deactivate_and_free(); + } + return; +} + + +int +Greenlet::tp_traverse(visitproc visit, void* arg) +{ + + int result; + if ((result = this->exception_state.tp_traverse(visit, arg)) != 0) { + return result; + } + //XXX: This is ugly. But so is handling everything having to do + //with the top frame. + bool visit_top_frame = this->was_running_in_dead_thread(); + // When true, the thread is dead. Our implicit weak reference to the + // frame is now all that's left; we consider ourselves to + // strongly own it now. + if ((result = this->python_state.tp_traverse(visit, arg, visit_top_frame)) != 0) { + return result; + } + return 0; +} + +int +Greenlet::tp_clear() +{ + bool own_top_frame = this->was_running_in_dead_thread(); + this->exception_state.tp_clear(); + this->python_state.tp_clear(own_top_frame); + return 0; +} + +bool Greenlet::is_currently_running_in_some_thread() const +{ + return this->stack_state.active() && !this->python_state.top_frame(); +} + +#if GREENLET_PY312 +void GREENLET_NOINLINE(Greenlet::expose_frames)() +{ + if (!this->python_state.top_frame()) { + return; + } + + _PyInterpreterFrame* last_complete_iframe = nullptr; + _PyInterpreterFrame* iframe = this->python_state.top_frame()->f_frame; + while (iframe) { + // We must make a copy before looking at the iframe contents, + // since iframe might point to a portion of the greenlet's C stack + // that was spilled when switching greenlets. + _PyInterpreterFrame iframe_copy; + this->stack_state.copy_from_stack(&iframe_copy, iframe, sizeof(*iframe)); + if (!_PyFrame_IsIncomplete(&iframe_copy)) { + // If the iframe were OWNED_BY_CSTACK then it would always be + // incomplete. Since it's not incomplete, it's not on the C stack + // and we can access it through the original `iframe` pointer + // directly. This is important since GetFrameObject might + // lazily _create_ the frame object and we don't want the + // interpreter to lose track of it. + assert(iframe_copy.owner != FRAME_OWNED_BY_CSTACK); + + // We really want to just write: + // PyFrameObject* frame = _PyFrame_GetFrameObject(iframe); + // but _PyFrame_GetFrameObject calls _PyFrame_MakeAndSetFrameObject + // which is not a visible symbol in libpython. The easiest + // way to get a public function to call it is using + // PyFrame_GetBack, which is defined as follows: + // assert(frame != NULL); + // assert(!_PyFrame_IsIncomplete(frame->f_frame)); + // PyFrameObject *back = frame->f_back; + // if (back == NULL) { + // _PyInterpreterFrame *prev = frame->f_frame->previous; + // prev = _PyFrame_GetFirstComplete(prev); + // if (prev) { + // back = _PyFrame_GetFrameObject(prev); + // } + // } + // return (PyFrameObject*)Py_XNewRef(back); + if (!iframe->frame_obj) { + PyFrameObject dummy_frame; + _PyInterpreterFrame dummy_iframe; + dummy_frame.f_back = nullptr; + dummy_frame.f_frame = &dummy_iframe; + // force the iframe to be considered complete without + // needing to check its code object: + dummy_iframe.owner = FRAME_OWNED_BY_GENERATOR; + dummy_iframe.previous = iframe; + assert(!_PyFrame_IsIncomplete(&dummy_iframe)); + // Drop the returned reference immediately; the iframe + // continues to hold a strong reference + Py_XDECREF(PyFrame_GetBack(&dummy_frame)); + assert(iframe->frame_obj); + } + + // This is a complete frame, so make the last one of those we saw + // point at it, bypassing any incomplete frames (which may have + // been on the C stack) in between the two. We're overwriting + // last_complete_iframe->previous and need that to be reversible, + // so we store the original previous ptr in the frame object + // (which we must have created on a previous iteration through + // this loop). The frame object has a bunch of storage that is + // only used when its iframe is OWNED_BY_FRAME_OBJECT, which only + // occurs when the frame object outlives the frame's execution, + // which can't have happened yet because the frame is currently + // executing as far as the interpreter is concerned. So, we can + // reuse it for our own purposes. + assert(iframe->owner == FRAME_OWNED_BY_THREAD + || iframe->owner == FRAME_OWNED_BY_GENERATOR); + if (last_complete_iframe) { + assert(last_complete_iframe->frame_obj); + memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], + &last_complete_iframe->previous, sizeof(void *)); + last_complete_iframe->previous = iframe; + } + last_complete_iframe = iframe; + } + // Frames that are OWNED_BY_FRAME_OBJECT are linked via the + // frame's f_back while all others are linked via the iframe's + // previous ptr. Since all the frames we traverse are running + // as far as the interpreter is concerned, we don't have to + // worry about the OWNED_BY_FRAME_OBJECT case. + iframe = iframe_copy.previous; + } + + // Give the outermost complete iframe a null previous pointer to + // account for any potential incomplete/C-stack iframes between it + // and the actual top-of-stack + if (last_complete_iframe) { + assert(last_complete_iframe->frame_obj); + memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], + &last_complete_iframe->previous, sizeof(void *)); + last_complete_iframe->previous = nullptr; + } +} +#else +void Greenlet::expose_frames() +{ + +} +#endif + +}; // namespace greenlet +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/TGreenlet.hpp b/.venv/lib/python3.12/site-packages/greenlet/TGreenlet.hpp new file mode 100644 index 00000000..512f7fb3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TGreenlet.hpp @@ -0,0 +1,813 @@ +#ifndef GREENLET_GREENLET_HPP +#define GREENLET_GREENLET_HPP +/* + * Declarations of the core data structures. +*/ + +#define PY_SSIZE_T_CLEAN +#include + +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_allocator.hpp" + +using greenlet::refs::OwnedObject; +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::OwnedMainGreenlet; +using greenlet::refs::BorrowedGreenlet; + +#if PY_VERSION_HEX < 0x30B00A6 +# define _PyCFrame CFrame +# define _PyInterpreterFrame _interpreter_frame +#endif + +#if GREENLET_PY312 +# define Py_BUILD_CORE +# include "internal/pycore_frame.h" +#endif + +// XXX: TODO: Work to remove all virtual functions +// for speed of calling and size of objects (no vtable). +// One pattern is the Curiously Recurring Template +namespace greenlet +{ + class ExceptionState + { + private: + G_NO_COPIES_OF_CLS(ExceptionState); + + // Even though these are borrowed objects, we actually own + // them, when they're not null. + // XXX: Express that in the API. + private: + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; + public: + ExceptionState(); + void operator<<(const PyThreadState *const tstate) noexcept; + void operator>>(PyThreadState* tstate) noexcept; + void clear() noexcept; + + int tp_traverse(visitproc visit, void* arg) noexcept; + void tp_clear() noexcept; + }; + + template + void operator<<(const PyThreadState *const tstate, T& exc); + + class PythonStateContext + { + protected: + greenlet::refs::OwnedContext _context; + public: + inline const greenlet::refs::OwnedContext& context() const + { + return this->_context; + } + inline greenlet::refs::OwnedContext& context() + { + return this->_context; + } + + inline void tp_clear() + { + this->_context.CLEAR(); + } + + template + inline static PyObject* context(T* tstate) + { + return tstate->context; + } + + template + inline static void context(T* tstate, PyObject* new_context) + { + tstate->context = new_context; + tstate->context_ver++; + } + }; + class SwitchingArgs; + class PythonState : public PythonStateContext + { + public: + typedef greenlet::refs::OwnedReference OwnedFrame; + private: + G_NO_COPIES_OF_CLS(PythonState); + // We own this if we're suspended (although currently we don't + // tp_traverse into it; that's a TODO). If we're running, it's + // empty. If we get deallocated and *still* have a frame, it + // won't be reachable from the place that normally decref's + // it, so we need to do it (hence owning it). + OwnedFrame _top_frame; +#if GREENLET_USE_CFRAME + _PyCFrame* cframe; + int use_tracing; +#endif +#if GREENLET_PY312 + int py_recursion_depth; + int c_recursion_depth; +#else + int recursion_depth; +#endif +#if GREENLET_PY313 + PyObject *delete_later; +#else + int trash_delete_nesting; +#endif +#if GREENLET_PY311 + _PyInterpreterFrame* current_frame; + _PyStackChunk* datastack_chunk; + PyObject** datastack_top; + PyObject** datastack_limit; +#endif + // The PyInterpreterFrame list on 3.12+ contains some entries that are + // on the C stack, which can't be directly accessed while a greenlet is + // suspended. In order to keep greenlet gr_frame introspection working, + // we adjust stack switching to rewrite the interpreter frame list + // to skip these C-stack frames; we call this "exposing" the greenlet's + // frames because it makes them valid to work with in Python. Then when + // the greenlet is resumed we need to remember to reverse the operation + // we did. The C-stack frames are "entry frames" which are a low-level + // interpreter detail; they're not needed for introspection, but do + // need to be present for the eval loop to work. + void unexpose_frames(); + + public: + + PythonState(); + // You can use this for testing whether we have a frame + // or not. It returns const so they can't modify it. + const OwnedFrame& top_frame() const noexcept; + + inline void operator<<(const PyThreadState *const tstate) noexcept; + inline void operator>>(PyThreadState* tstate) noexcept; + void clear() noexcept; + + int tp_traverse(visitproc visit, void* arg, bool visit_top_frame) noexcept; + void tp_clear(bool own_top_frame) noexcept; + void set_initial_state(const PyThreadState* const tstate) noexcept; +#if GREENLET_USE_CFRAME + void set_new_cframe(_PyCFrame& frame) noexcept; +#endif + + void may_switch_away() noexcept; + inline void will_switch_from(PyThreadState *const origin_tstate) noexcept; + void did_finish(PyThreadState* tstate) noexcept; + }; + + class StackState + { + // By having only plain C (POD) members, no virtual functions + // or bases, we get a trivial assignment operator generated + // for us. However, that's not safe since we do manage memory. + // So we declare an assignment operator that only works if we + // don't have any memory allocated. (We don't use + // std::shared_ptr for reference counting just to keep this + // object small) + private: + char* _stack_start; + char* stack_stop; + char* stack_copy; + intptr_t _stack_saved; + StackState* stack_prev; + inline int copy_stack_to_heap_up_to(const char* const stop) noexcept; + inline void free_stack_copy() noexcept; + + public: + /** + * Creates a started, but inactive, state, using *current* + * as the previous. + */ + StackState(void* mark, StackState& current); + /** + * Creates an inactive, unstarted, state. + */ + StackState(); + ~StackState(); + StackState(const StackState& other); + StackState& operator=(const StackState& other); + inline void copy_heap_to_stack(const StackState& current) noexcept; + inline int copy_stack_to_heap(char* const stackref, const StackState& current) noexcept; + inline bool started() const noexcept; + inline bool main() const noexcept; + inline bool active() const noexcept; + inline void set_active() noexcept; + inline void set_inactive() noexcept; + inline intptr_t stack_saved() const noexcept; + inline char* stack_start() const noexcept; + static inline StackState make_main() noexcept; +#ifdef GREENLET_USE_STDIO + friend std::ostream& operator<<(std::ostream& os, const StackState& s); +#endif + + // Fill in [dest, dest + n) with the values that would be at + // [src, src + n) while this greenlet is running. This is like memcpy + // except that if the greenlet is suspended it accounts for the portion + // of the greenlet's stack that was spilled to the heap. `src` may + // be on this greenlet's stack, or on the heap, but not on a different + // greenlet's stack. + void copy_from_stack(void* dest, const void* src, size_t n) const; + }; +#ifdef GREENLET_USE_STDIO + std::ostream& operator<<(std::ostream& os, const StackState& s); +#endif + + class SwitchingArgs + { + private: + G_NO_ASSIGNMENT_OF_CLS(SwitchingArgs); + // If args and kwargs are both false (NULL), this is a *throw*, not a + // switch. PyErr_... must have been called already. + OwnedObject _args; + OwnedObject _kwargs; + public: + + SwitchingArgs() + {} + + SwitchingArgs(const OwnedObject& args, const OwnedObject& kwargs) + : _args(args), + _kwargs(kwargs) + {} + + SwitchingArgs(const SwitchingArgs& other) + : _args(other._args), + _kwargs(other._kwargs) + {} + + const OwnedObject& args() + { + return this->_args; + } + + const OwnedObject& kwargs() + { + return this->_kwargs; + } + + /** + * Moves ownership from the argument to this object. + */ + SwitchingArgs& operator<<=(SwitchingArgs& other) + { + if (this != &other) { + this->_args = other._args; + this->_kwargs = other._kwargs; + other.CLEAR(); + } + return *this; + } + + /** + * Acquires ownership of the argument (consumes the reference). + */ + SwitchingArgs& operator<<=(PyObject* args) + { + this->_args = OwnedObject::consuming(args); + this->_kwargs.CLEAR(); + return *this; + } + + /** + * Acquires ownership of the argument. + * + * Sets the args to be the given value; clears the kwargs. + */ + SwitchingArgs& operator<<=(OwnedObject& args) + { + assert(&args != &this->_args); + this->_args = args; + this->_kwargs.CLEAR(); + args.CLEAR(); + + return *this; + } + + explicit operator bool() const noexcept + { + return this->_args || this->_kwargs; + } + + inline void CLEAR() + { + this->_args.CLEAR(); + this->_kwargs.CLEAR(); + } + + const std::string as_str() const noexcept + { + return PyUnicode_AsUTF8( + OwnedObject::consuming( + PyUnicode_FromFormat( + "SwitchingArgs(args=%R, kwargs=%R)", + this->_args.borrow(), + this->_kwargs.borrow() + ) + ).borrow() + ); + } + }; + + class ThreadState; + + class UserGreenlet; + class MainGreenlet; + + class Greenlet + { + private: + G_NO_COPIES_OF_CLS(Greenlet); + PyGreenlet* const _self; + private: + // XXX: Work to remove these. + friend class ThreadState; + friend class UserGreenlet; + friend class MainGreenlet; + protected: + ExceptionState exception_state; + SwitchingArgs switch_args; + StackState stack_state; + PythonState python_state; + Greenlet(PyGreenlet* p, const StackState& initial_state); + public: + // This constructor takes ownership of the PyGreenlet, by + // setting ``p->pimpl = this;``. + Greenlet(PyGreenlet* p); + virtual ~Greenlet(); + + const OwnedObject context() const; + + // You MUST call this _very_ early in the switching process to + // prepare anything that may need prepared. This might perform + // garbage collections or otherwise run arbitrary Python code. + // + // One specific use of it is for Python 3.11+, preventing + // running arbitrary code at unsafe times. See + // PythonState::may_switch_away(). + inline void may_switch_away() + { + this->python_state.may_switch_away(); + } + + inline void context(refs::BorrowedObject new_context); + + inline SwitchingArgs& args() + { + return this->switch_args; + } + + virtual const refs::BorrowedMainGreenlet main_greenlet() const = 0; + + inline intptr_t stack_saved() const noexcept + { + return this->stack_state.stack_saved(); + } + + // This is used by the macro SLP_SAVE_STATE to compute the + // difference in stack sizes. It might be nice to handle the + // computation ourself, but the type of the result + // varies by platform, so doing it in the macro is the + // simplest way. + inline const char* stack_start() const noexcept + { + return this->stack_state.stack_start(); + } + + virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); + virtual OwnedObject g_switch() = 0; + /** + * Force the greenlet to appear dead. Used when it's not + * possible to throw an exception into a greenlet anymore. + * + * This losses access to the thread state and the main greenlet. + */ + virtual void murder_in_place(); + + /** + * Called when somebody notices we were running in a dead + * thread to allow cleaning up resources (because we can't + * raise GreenletExit into it anymore). + * This is very similar to ``murder_in_place()``, except that + * it DOES NOT lose the main greenlet or thread state. + */ + inline void deactivate_and_free(); + + + // Called when some thread wants to deallocate a greenlet + // object. + // The thread may or may not be the same thread the greenlet + // was running in. + // The thread state will be null if the thread the greenlet + // was running in was known to have exited. + void deallocing_greenlet_in_thread(const ThreadState* current_state); + + // Must be called on 3.12+ before exposing a suspended greenlet's + // frames to user code. This rewrites the linked list of interpreter + // frames to skip the ones that are being stored on the C stack (which + // can't be safely accessed while the greenlet is suspended because + // that stack space might be hosting a different greenlet), and + // sets PythonState::frames_were_exposed so we remember to restore + // the original list before resuming the greenlet. The C-stack frames + // are a low-level interpreter implementation detail; while they're + // important to the bytecode eval loop, they're superfluous for + // introspection purposes. + void expose_frames(); + + + // TODO: Figure out how to make these non-public. + inline void slp_restore_state() noexcept; + inline int slp_save_state(char *const stackref) noexcept; + + inline bool is_currently_running_in_some_thread() const; + virtual bool belongs_to_thread(const ThreadState* state) const; + + inline bool started() const + { + return this->stack_state.started(); + } + inline bool active() const + { + return this->stack_state.active(); + } + inline bool main() const + { + return this->stack_state.main(); + } + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const = 0; + + virtual const OwnedGreenlet parent() const = 0; + virtual void parent(const refs::BorrowedObject new_parent) = 0; + + inline const PythonState::OwnedFrame& top_frame() + { + return this->python_state.top_frame(); + } + + virtual const OwnedObject& run() const = 0; + virtual void run(const refs::BorrowedObject nrun) = 0; + + + virtual int tp_traverse(visitproc visit, void* arg); + virtual int tp_clear(); + + + // Return the thread state that the greenlet is running in, or + // null if the greenlet is not running or the thread is known + // to have exited. + virtual ThreadState* thread_state() const noexcept = 0; + + // Return true if the greenlet is known to have been running + // (active) in a thread that has now exited. + virtual bool was_running_in_dead_thread() const noexcept = 0; + + // Return a borrowed greenlet that is the Python object + // this object represents. + inline BorrowedGreenlet self() const noexcept + { + return BorrowedGreenlet(this->_self); + } + + // For testing. If this returns true, we should pretend that + // slp_switch() failed. + virtual bool force_slp_switch_error() const noexcept; + + protected: + inline void release_args(); + + // The functions that must not be inlined are declared virtual. + // We also mark them as protected, not private, so that the + // compiler is forced to call them through a function pointer. + // (A sufficiently smart compiler could directly call a private + // virtual function since it can never be overridden in a + // subclass). + + // Also TODO: Switch away from integer error codes and to enums, + // or throw exceptions when possible. + struct switchstack_result_t + { + int status; + Greenlet* the_new_current_greenlet; + OwnedGreenlet origin_greenlet; + + switchstack_result_t() + : status(0), + the_new_current_greenlet(nullptr) + {} + + switchstack_result_t(int err) + : status(err), + the_new_current_greenlet(nullptr) + {} + + switchstack_result_t(int err, Greenlet* state, OwnedGreenlet& origin) + : status(err), + the_new_current_greenlet(state), + origin_greenlet(origin) + { + } + + switchstack_result_t(int err, Greenlet* state, const BorrowedGreenlet& origin) + : status(err), + the_new_current_greenlet(state), + origin_greenlet(origin) + { + } + + switchstack_result_t(const switchstack_result_t& other) + : status(other.status), + the_new_current_greenlet(other.the_new_current_greenlet), + origin_greenlet(other.origin_greenlet) + {} + + switchstack_result_t& operator=(const switchstack_result_t& other) + { + this->status = other.status; + this->the_new_current_greenlet = other.the_new_current_greenlet; + this->origin_greenlet = other.origin_greenlet; + return *this; + } + }; + + OwnedObject on_switchstack_or_initialstub_failure( + Greenlet* target, + const switchstack_result_t& err, + const bool target_was_me=false, + const bool was_initial_stub=false); + + // Returns the previous greenlet we just switched away from. + virtual OwnedGreenlet g_switchstack_success() noexcept; + + + // Check the preconditions for switching to this greenlet; if they + // aren't met, throws PyErrOccurred. Most callers will want to + // catch this and clear the arguments + inline void check_switch_allowed() const; + class GreenletStartedWhileInPython : public std::runtime_error + { + public: + GreenletStartedWhileInPython() : std::runtime_error("") + {} + }; + + protected: + + + /** + Perform a stack switch into this greenlet. + + This temporarily sets the global variable + ``switching_thread_state`` to this greenlet; as soon as the + call to ``slp_switch`` completes, this is reset to NULL. + Consequently, this depends on the GIL. + + TODO: Adopt the stackman model and pass ``slp_switch`` a + callback function and context pointer; this eliminates the + need for global variables altogether. + + Because the stack switch happens in this function, this + function can't use its own stack (local) variables, set + before the switch, and then accessed after the switch. + + Further, you con't even access ``g_thread_state_global`` + before and after the switch from the global variable. + Because it is thread local some compilers cache it in a + register/on the stack, notably new versions of MSVC; this + breaks with strange crashes sometime later, because writing + to anything in ``g_thread_state_global`` after the switch + is actually writing to random memory. For this reason, we + call a non-inlined function to finish the operation. (XXX: + The ``/GT`` MSVC compiler argument probably fixes that.) + + It is very important that stack switch is 'atomic', i.e. no + calls into other Python code allowed (except very few that + are safe), because global variables are very fragile. (This + should no longer be the case with thread-local variables.) + + */ + // Made virtual to facilitate subclassing UserGreenlet for testing. + virtual switchstack_result_t g_switchstack(void); + +class TracingGuard +{ +private: + PyThreadState* tstate; +public: + TracingGuard() + : tstate(PyThreadState_GET()) + { + PyThreadState_EnterTracing(this->tstate); + } + + ~TracingGuard() + { + PyThreadState_LeaveTracing(this->tstate); + this->tstate = nullptr; + } + + inline void CallTraceFunction(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const BorrowedGreenlet& origin, + const BorrowedGreenlet& target) + { + // TODO: This calls tracefunc(event, (origin, target)). Add a shortcut + // function for that that's specialized to avoid the Py_BuildValue + // string parsing, or start with just using "ON" format with PyTuple_Pack(2, + // origin, target). That seems like what the N format is meant + // for. + // XXX: Why does event not automatically cast back to a PyObject? + // It tries to call the "deleted constructor ImmortalEventName + // const" instead. + assert(tracefunc); + assert(event); + assert(origin); + assert(target); + greenlet::refs::NewReference retval( + PyObject_CallFunction( + tracefunc.borrow(), + "O(OO)", + event.borrow(), + origin.borrow(), + target.borrow() + )); + if (!retval) { + throw PyErrOccurred::from_current(); + } + } +}; + + static void + g_calltrace(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const greenlet::refs::BorrowedGreenlet& origin, + const BorrowedGreenlet& target); + private: + OwnedObject g_switch_finish(const switchstack_result_t& err); + + }; + + class UserGreenlet : public Greenlet + { + private: + static greenlet::PythonAllocator allocator; + OwnedMainGreenlet _main_greenlet; + OwnedObject _run_callable; + OwnedGreenlet _parent; + public: + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + + UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent); + virtual ~UserGreenlet(); + + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; + virtual bool was_running_in_dead_thread() const noexcept; + virtual ThreadState* thread_state() const noexcept; + virtual OwnedObject g_switch(); + virtual const OwnedObject& run() const + { + if (this->started() || !this->_run_callable) { + throw AttributeError("run"); + } + return this->_run_callable; + } + virtual void run(const refs::BorrowedObject nrun); + + virtual const OwnedGreenlet parent() const; + virtual void parent(const refs::BorrowedObject new_parent); + + virtual const refs::BorrowedMainGreenlet main_greenlet() const; + + virtual void murder_in_place(); + virtual bool belongs_to_thread(const ThreadState* state) const; + virtual int tp_traverse(visitproc visit, void* arg); + virtual int tp_clear(); + class ParentIsCurrentGuard + { + private: + OwnedGreenlet oldparent; + UserGreenlet* greenlet; + G_NO_COPIES_OF_CLS(ParentIsCurrentGuard); + public: + ParentIsCurrentGuard(UserGreenlet* p, const ThreadState& thread_state); + ~ParentIsCurrentGuard(); + }; + virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); + protected: + virtual switchstack_result_t g_initialstub(void* mark); + private: + // This function isn't meant to return. + // This accepts raw pointers and the ownership of them at the + // same time. The caller should use ``inner_bootstrap(origin.relinquish_ownership())``. + void inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run); + }; + + class BrokenGreenlet : public UserGreenlet + { + private: + static greenlet::PythonAllocator allocator; + public: + bool _force_switch_error = false; + bool _force_slp_switch_error = false; + + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + BrokenGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) + : UserGreenlet(p, the_parent) + {} + virtual ~BrokenGreenlet() + {} + + virtual switchstack_result_t g_switchstack(void); + virtual bool force_slp_switch_error() const noexcept; + + }; + + class MainGreenlet : public Greenlet + { + private: + static greenlet::PythonAllocator allocator; + refs::BorrowedMainGreenlet _self; + ThreadState* _thread_state; + G_NO_COPIES_OF_CLS(MainGreenlet); + public: + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + + MainGreenlet(refs::BorrowedMainGreenlet::PyType*, ThreadState*); + virtual ~MainGreenlet(); + + + virtual const OwnedObject& run() const; + virtual void run(const refs::BorrowedObject nrun); + + virtual const OwnedGreenlet parent() const; + virtual void parent(const refs::BorrowedObject new_parent); + + virtual const refs::BorrowedMainGreenlet main_greenlet() const; + + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; + virtual bool was_running_in_dead_thread() const noexcept; + virtual ThreadState* thread_state() const noexcept; + void thread_state(ThreadState*) noexcept; + virtual OwnedObject g_switch(); + virtual int tp_traverse(visitproc visit, void* arg); + }; + + // Instantiate one on the stack to save the GC state, + // and then disable GC. When it goes out of scope, GC will be + // restored to its original state. Sadly, these APIs are only + // available on 3.10+; luckily, we only need them on 3.11+. +#if GREENLET_PY310 + class GCDisabledGuard + { + private: + int was_enabled = 0; + public: + GCDisabledGuard() + : was_enabled(PyGC_IsEnabled()) + { + PyGC_Disable(); + } + + ~GCDisabledGuard() + { + if (this->was_enabled) { + PyGC_Enable(); + } + } + }; +#endif + + OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept; + + //TODO: Greenlet::g_switch() should call this automatically on its + //return value. As it is, the module code is calling it. + static inline OwnedObject + single_result(const OwnedObject& results) + { + if (results + && PyTuple_Check(results.borrow()) + && PyTuple_GET_SIZE(results.borrow()) == 1) { + PyObject* result = PyTuple_GET_ITEM(results.borrow(), 0); + assert(result); + return OwnedObject::owning(result); + } + return results; + } + + + static OwnedObject + g_handle_exit(const OwnedObject& greenlet_result); + + + template + void operator<<(const PyThreadState *const lhs, T& rhs) + { + rhs.operator<<(lhs); + } + +} // namespace greenlet ; + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/TGreenletGlobals.cpp b/.venv/lib/python3.12/site-packages/greenlet/TGreenletGlobals.cpp new file mode 100644 index 00000000..0087d2ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TGreenletGlobals.cpp @@ -0,0 +1,94 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of GreenletGlobals. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_GREENLET_GLOBALS +#define T_GREENLET_GLOBALS + +#include "greenlet_refs.hpp" +#include "greenlet_exceptions.hpp" +#include "greenlet_thread_support.hpp" +#include "greenlet_internal.hpp" + +namespace greenlet { + +// This encapsulates what were previously module global "constants" +// established at init time. +// This is a step towards Python3 style module state that allows +// reloading. +// +// In an earlier iteration of this code, we used placement new to be +// able to allocate this object statically still, so that references +// to its members don't incur an extra pointer indirection. +// But under some scenarios, that could result in crashes at +// shutdown because apparently the destructor was getting run twice? +class GreenletGlobals +{ + +public: + const greenlet::refs::ImmortalEventName event_switch; + const greenlet::refs::ImmortalEventName event_throw; + const greenlet::refs::ImmortalException PyExc_GreenletError; + const greenlet::refs::ImmortalException PyExc_GreenletExit; + const greenlet::refs::ImmortalObject empty_tuple; + const greenlet::refs::ImmortalObject empty_dict; + const greenlet::refs::ImmortalString str_run; + Mutex* const thread_states_to_destroy_lock; + greenlet::cleanup_queue_t thread_states_to_destroy; + + GreenletGlobals() : + event_switch("switch"), + event_throw("throw"), + PyExc_GreenletError("greenlet.error"), + PyExc_GreenletExit("greenlet.GreenletExit", PyExc_BaseException), + empty_tuple(Require(PyTuple_New(0))), + empty_dict(Require(PyDict_New())), + str_run("run"), + thread_states_to_destroy_lock(new Mutex()) + {} + + ~GreenletGlobals() + { + // This object is (currently) effectively immortal, and not + // just because of those placement new tricks; if we try to + // deallocate the static object we allocated, and overwrote, + // we would be doing so at C++ teardown time, which is after + // the final Python GIL is released, and we can't use the API + // then. + // (The members will still be destructed, but they also don't + // do any deallocation.) + } + + void queue_to_destroy(ThreadState* ts) const + { + // we're currently accessed through a static const object, + // implicitly marking our members as const, so code can't just + // call push_back (or pop_back) without casting away the + // const. + // + // Do that for callers. + greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); + q.push_back(ts); + } + + ThreadState* take_next_to_destroy() const + { + greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); + ThreadState* result = q.back(); + q.pop_back(); + return result; + } +}; + +}; // namespace greenlet + +static const greenlet::GreenletGlobals* mod_globs; + +#endif // T_GREENLET_GLOBALS diff --git a/.venv/lib/python3.12/site-packages/greenlet/TMainGreenlet.cpp b/.venv/lib/python3.12/site-packages/greenlet/TMainGreenlet.cpp new file mode 100644 index 00000000..a2a9cfe4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TMainGreenlet.cpp @@ -0,0 +1,153 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::MainGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_MAIN_GREENLET_CPP +#define T_MAIN_GREENLET_CPP + +#include "TGreenlet.hpp" + + + +// Protected by the GIL. Incremented when we create a main greenlet, +// in a new thread, decremented when it is destroyed. +static Py_ssize_t G_TOTAL_MAIN_GREENLETS; + +namespace greenlet { +greenlet::PythonAllocator MainGreenlet::allocator; + +void* MainGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void MainGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + + +MainGreenlet::MainGreenlet(PyGreenlet* p, ThreadState* state) + : Greenlet(p, StackState::make_main()), + _self(p), + _thread_state(state) +{ + G_TOTAL_MAIN_GREENLETS++; +} + +MainGreenlet::~MainGreenlet() +{ + G_TOTAL_MAIN_GREENLETS--; + this->tp_clear(); +} + +ThreadState* +MainGreenlet::thread_state() const noexcept +{ + return this->_thread_state; +} + +void +MainGreenlet::thread_state(ThreadState* t) noexcept +{ + assert(!t); + this->_thread_state = t; +} + + +const BorrowedMainGreenlet +MainGreenlet::main_greenlet() const +{ + return this->_self; +} + +BorrowedMainGreenlet +MainGreenlet::find_main_greenlet_in_lineage() const +{ + return BorrowedMainGreenlet(this->_self); +} + +bool +MainGreenlet::was_running_in_dead_thread() const noexcept +{ + return !this->_thread_state; +} + +OwnedObject +MainGreenlet::g_switch() +{ + try { + this->check_switch_allowed(); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + + switchstack_result_t err = this->g_switchstack(); + if (err.status < 0) { + // XXX: This code path is untested, but it is shared + // with the UserGreenlet path that is tested. + return this->on_switchstack_or_initialstub_failure( + this, + err, + true, // target was me + false // was initial stub + ); + } + + return err.the_new_current_greenlet->g_switch_finish(err); +} + +int +MainGreenlet::tp_traverse(visitproc visit, void* arg) +{ + if (this->_thread_state) { + // we've already traversed main, (self), don't do it again. + int result = this->_thread_state->tp_traverse(visit, arg, false); + if (result) { + return result; + } + } + return Greenlet::tp_traverse(visit, arg); +} + +const OwnedObject& +MainGreenlet::run() const +{ + throw AttributeError("Main greenlets do not have a run attribute."); +} + +void +MainGreenlet::run(const BorrowedObject UNUSED(nrun)) +{ + throw AttributeError("Main greenlets do not have a run attribute."); +} + +void +MainGreenlet::parent(const BorrowedObject raw_new_parent) +{ + if (!raw_new_parent) { + throw AttributeError("can't delete attribute"); + } + throw AttributeError("cannot set the parent of a main greenlet"); +} + +const OwnedGreenlet +MainGreenlet::parent() const +{ + return OwnedGreenlet(); // null becomes None +} + +}; // namespace greenlet + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/TPythonState.cpp b/.venv/lib/python3.12/site-packages/greenlet/TPythonState.cpp new file mode 100644 index 00000000..cc5dff51 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TPythonState.cpp @@ -0,0 +1,393 @@ +#ifndef GREENLET_PYTHON_STATE_CPP +#define GREENLET_PYTHON_STATE_CPP + +#include +#include "TGreenlet.hpp" + +namespace greenlet { + +PythonState::PythonState() + : _top_frame() +#if GREENLET_USE_CFRAME + ,cframe(nullptr) + ,use_tracing(0) +#endif +#if GREENLET_PY312 + ,py_recursion_depth(0) + ,c_recursion_depth(0) +#else + ,recursion_depth(0) +#endif +#if GREENLET_PY313 + ,delete_later(nullptr) +#else + ,trash_delete_nesting(0) +#endif +#if GREENLET_PY311 + ,current_frame(nullptr) + ,datastack_chunk(nullptr) + ,datastack_top(nullptr) + ,datastack_limit(nullptr) +#endif +{ +#if GREENLET_USE_CFRAME + /* + The PyThreadState->cframe pointer usually points to memory on + the stack, alloceted in a call into PyEval_EvalFrameDefault. + + Initially, before any evaluation begins, it points to the + initial PyThreadState object's ``root_cframe`` object, which is + statically allocated for the lifetime of the thread. + + A greenlet can last for longer than a call to + PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer + to be the current ``PyThreadState->cframe``; nor could we use + one from the greenlet parent for the same reason. Yet a further + no: we can't allocate one scoped to the greenlet and then + destroy it when the greenlet is deallocated, because inside the + interpreter the _PyCFrame objects form a linked list, and that too + can result in accessing memory beyond its dynamic lifetime (if + the greenlet doesn't actually finish before it dies, its entry + could still be in the list). + + Using the ``root_cframe`` is problematic, though, because its + members are never modified by the interpreter and are set to 0, + meaning that its ``use_tracing`` flag is never updated. We don't + want to modify that value in the ``root_cframe`` ourself: it + *shouldn't* matter much because we should probably never get + back to the point where that's the only cframe on the stack; + even if it did matter, the major consequence of an incorrect + value for ``use_tracing`` is that if its true the interpreter + does some extra work --- however, it's just good code hygiene. + + Our solution: before a greenlet runs, after its initial + creation, it uses the ``root_cframe`` just to have something to + put there. However, once the greenlet is actually switched to + for the first time, ``g_initialstub`` (which doesn't actually + "return" while the greenlet is running) stores a new _PyCFrame on + its local stack, and copies the appropriate values from the + currently running _PyCFrame; this is then made the _PyCFrame for the + newly-minted greenlet. ``g_initialstub`` then proceeds to call + ``glet.run()``, which results in ``PyEval_...`` adding the + _PyCFrame to the list. Switches continue as normal. Finally, when + the greenlet finishes, the call to ``glet.run()`` returns and + the _PyCFrame is taken out of the linked list and the stack value + is now unused and free to expire. + + XXX: I think we can do better. If we're deallocing in the same + thread, can't we traverse the list and unlink our frame? + Can we just keep a reference to the thread state in case we + dealloc in another thread? (Is that even possible if we're still + running and haven't returned from g_initialstub?) + */ + this->cframe = &PyThreadState_GET()->root_cframe; +#endif +} + + +inline void PythonState::may_switch_away() noexcept +{ +#if GREENLET_PY311 + // PyThreadState_GetFrame is probably going to have to allocate a + // new frame object. That may trigger garbage collection. Because + // we call this during the early phases of a switch (it doesn't + // matter to which greenlet, as this has a global effect), if a GC + // triggers a switch away, two things can happen, both bad: + // - We might not get switched back to, halting forward progress. + // this is pathological, but possible. + // - We might get switched back to with a different set of + // arguments or a throw instead of a switch. That would corrupt + // our state (specifically, PyErr_Occurred() and this->args() + // would no longer agree). + // + // Thus, when we call this API, we need to have GC disabled. + // This method serves as a bottleneck we call when maybe beginning + // a switch. In this way, it is always safe -- no risk of GC -- to + // use ``_GetFrame()`` whenever we need to, just as it was in + // <=3.10 (because subsequent calls will be cached and not + // allocate memory). + + GCDisabledGuard no_gc; + Py_XDECREF(PyThreadState_GetFrame(PyThreadState_GET())); +#endif +} + +void PythonState::operator<<(const PyThreadState *const tstate) noexcept +{ + this->_context.steal(tstate->context); +#if GREENLET_USE_CFRAME + /* + IMPORTANT: ``cframe`` is a pointer into the STACK. Thus, because + the call to ``slp_switch()`` changes the contents of the stack, + you cannot read from ``ts_current->cframe`` after that call and + necessarily get the same values you get from reading it here. + Anything you need to restore from now to then must be saved in a + global/threadlocal variable (because we can't use stack + variables here either). For things that need to persist across + the switch, use `will_switch_from`. + */ + this->cframe = tstate->cframe; + #if !GREENLET_PY312 + this->use_tracing = tstate->cframe->use_tracing; + #endif +#endif // GREENLET_USE_CFRAME +#if GREENLET_PY311 + #if GREENLET_PY312 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + this->c_recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; + #else // not 312 + this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; + #endif // GREENLET_PY312 + #if GREENLET_PY313 + this->current_frame = tstate->current_frame; + #elif GREENLET_USE_CFRAME + this->current_frame = tstate->cframe->current_frame; + #endif + this->datastack_chunk = tstate->datastack_chunk; + this->datastack_top = tstate->datastack_top; + this->datastack_limit = tstate->datastack_limit; + + PyFrameObject *frame = PyThreadState_GetFrame((PyThreadState *)tstate); + Py_XDECREF(frame); // PyThreadState_GetFrame gives us a new + // reference. + this->_top_frame.steal(frame); + #if GREENLET_PY313 + this->delete_later = Py_XNewRef(tstate->delete_later); + #elif GREENLET_PY312 + this->trash_delete_nesting = tstate->trash.delete_nesting; + #else // not 312 + this->trash_delete_nesting = tstate->trash_delete_nesting; + #endif // GREENLET_PY312 +#else // Not 311 + this->recursion_depth = tstate->recursion_depth; + this->_top_frame.steal(tstate->frame); + this->trash_delete_nesting = tstate->trash_delete_nesting; +#endif // GREENLET_PY311 +} + +#if GREENLET_PY312 +void GREENLET_NOINLINE(PythonState::unexpose_frames)() +{ + if (!this->top_frame()) { + return; + } + + // See GreenletState::expose_frames() and the comment on frames_were_exposed + // for more information about this logic. + _PyInterpreterFrame *iframe = this->_top_frame->f_frame; + while (iframe != nullptr) { + _PyInterpreterFrame *prev_exposed = iframe->previous; + assert(iframe->frame_obj); + memcpy(&iframe->previous, &iframe->frame_obj->_f_frame_data[0], + sizeof(void *)); + iframe = prev_exposed; + } +} +#else +void PythonState::unexpose_frames() +{} +#endif + +void PythonState::operator>>(PyThreadState *const tstate) noexcept +{ + tstate->context = this->_context.relinquish_ownership(); + /* Incrementing this value invalidates the contextvars cache, + which would otherwise remain valid across switches */ + tstate->context_ver++; +#if GREENLET_USE_CFRAME + tstate->cframe = this->cframe; + /* + If we were tracing, we need to keep tracing. + There should never be the possibility of hitting the + root_cframe here. See note above about why we can't + just copy this from ``origin->cframe->use_tracing``. + */ + #if !GREENLET_PY312 + tstate->cframe->use_tracing = this->use_tracing; + #endif +#endif // GREENLET_USE_CFRAME +#if GREENLET_PY311 + #if GREENLET_PY312 + tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; + tstate->c_recursion_remaining = Py_C_RECURSION_LIMIT - this->c_recursion_depth; + this->unexpose_frames(); + #else // \/ 3.11 + tstate->recursion_remaining = tstate->recursion_limit - this->recursion_depth; + #endif // GREENLET_PY312 + #if GREENLET_PY313 + tstate->current_frame = this->current_frame; + #elif GREENLET_USE_CFRAME + tstate->cframe->current_frame = this->current_frame; + #endif + tstate->datastack_chunk = this->datastack_chunk; + tstate->datastack_top = this->datastack_top; + tstate->datastack_limit = this->datastack_limit; + this->_top_frame.relinquish_ownership(); + #if GREENLET_PY313 + Py_XDECREF(tstate->delete_later); + tstate->delete_later = this->delete_later; + Py_CLEAR(this->delete_later); + #elif GREENLET_PY312 + tstate->trash.delete_nesting = this->trash_delete_nesting; + #else // not 3.12 + tstate->trash_delete_nesting = this->trash_delete_nesting; + #endif // GREENLET_PY312 +#else // not 3.11 + tstate->frame = this->_top_frame.relinquish_ownership(); + tstate->recursion_depth = this->recursion_depth; + tstate->trash_delete_nesting = this->trash_delete_nesting; +#endif // GREENLET_PY311 +} + +inline void PythonState::will_switch_from(PyThreadState *const origin_tstate) noexcept +{ +#if GREENLET_USE_CFRAME && !GREENLET_PY312 + // The weird thing is, we don't actually save this for an + // effect on the current greenlet, it's saved for an + // effect on the target greenlet. That is, we want + // continuity of this setting across the greenlet switch. + this->use_tracing = origin_tstate->cframe->use_tracing; +#endif +} + +void PythonState::set_initial_state(const PyThreadState* const tstate) noexcept +{ + this->_top_frame = nullptr; +#if GREENLET_PY312 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + // XXX: TODO: Comment from a reviewer: + // Should this be ``Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining``? + // But to me it looks more like that might not be the right + // initialization either? + this->c_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; +#elif GREENLET_PY311 + this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; +#else + this->recursion_depth = tstate->recursion_depth; +#endif +} +// TODO: Better state management about when we own the top frame. +int PythonState::tp_traverse(visitproc visit, void* arg, bool own_top_frame) noexcept +{ + Py_VISIT(this->_context.borrow()); + if (own_top_frame) { + Py_VISIT(this->_top_frame.borrow()); + } + return 0; +} + +void PythonState::tp_clear(bool own_top_frame) noexcept +{ + PythonStateContext::tp_clear(); + // If we get here owning a frame, + // we got dealloc'd without being finished. We may or may not be + // in the same thread. + if (own_top_frame) { + this->_top_frame.CLEAR(); + } +} + +#if GREENLET_USE_CFRAME +void PythonState::set_new_cframe(_PyCFrame& frame) noexcept +{ + frame = *PyThreadState_GET()->cframe; + /* Make the target greenlet refer to the stack value. */ + this->cframe = &frame; + /* + And restore the link to the previous frame so this one gets + unliked appropriately. + */ + this->cframe->previous = &PyThreadState_GET()->root_cframe; +} +#endif + +const PythonState::OwnedFrame& PythonState::top_frame() const noexcept +{ + return this->_top_frame; +} + +void PythonState::did_finish(PyThreadState* tstate) noexcept +{ +#if GREENLET_PY311 + // See https://github.com/gevent/gevent/issues/1924 and + // https://github.com/python-greenlet/greenlet/issues/328. In + // short, Python 3.11 allocates memory for frames as a sort of + // linked list that's kept as part of PyThreadState in the + // ``datastack_chunk`` member and friends. These are saved and + // restored as part of switching greenlets. + // + // When we initially switch to a greenlet, we set those to NULL. + // That causes the frame management code to treat this like a + // brand new thread and start a fresh list of chunks, beginning + // with a new "root" chunk. As we make calls in this greenlet, + // those chunks get added, and as calls return, they get popped. + // But the frame code (pystate.c) is careful to make sure that the + // root chunk never gets popped. + // + // Thus, when a greenlet exits for the last time, there will be at + // least a single root chunk that we must be responsible for + // deallocating. + // + // The complex part is that these chunks are allocated and freed + // using ``_PyObject_VirtualAlloc``/``Free``. Those aren't public + // functions, and they aren't exported for linking. It so happens + // that we know they are just thin wrappers around the Arena + // allocator, so we can use that directly to deallocate in a + // compatible way. + // + // CAUTION: Check this implementation detail on every major version. + // + // It might be nice to be able to do this in our destructor, but + // can we be sure that no one else is using that memory? Plus, as + // described below, our pointers may not even be valid anymore. As + // a special case, there is one time that we know we can do this, + // and that's from the destructor of the associated UserGreenlet + // (NOT main greenlet) + PyObjectArenaAllocator alloc; + _PyStackChunk* chunk = nullptr; + if (tstate) { + // We really did finish, we can never be switched to again. + chunk = tstate->datastack_chunk; + // Unfortunately, we can't do much sanity checking. Our + // this->datastack_chunk pointer is out of date (evaluation may + // have popped down through it already) so we can't verify that + // we deallocate it. I don't think we can even check datastack_top + // for the same reason. + + PyObject_GetArenaAllocator(&alloc); + tstate->datastack_chunk = nullptr; + tstate->datastack_limit = nullptr; + tstate->datastack_top = nullptr; + + } + else if (this->datastack_chunk) { + // The UserGreenlet (NOT the main greenlet!) is being deallocated. If we're + // still holding a stack chunk, it's garbage because we know + // we can never switch back to let cPython clean it up. + // Because the last time we got switched away from, and we + // haven't run since then, we know our chain is valid and can + // be dealloced. + chunk = this->datastack_chunk; + PyObject_GetArenaAllocator(&alloc); + } + + if (alloc.free && chunk) { + // In case the arena mechanism has been torn down already. + while (chunk) { + _PyStackChunk *prev = chunk->previous; + chunk->previous = nullptr; + alloc.free(alloc.ctx, chunk, chunk->size); + chunk = prev; + } + } + + this->datastack_chunk = nullptr; + this->datastack_limit = nullptr; + this->datastack_top = nullptr; +#endif +} + + +}; // namespace greenlet + +#endif // GREENLET_PYTHON_STATE_CPP diff --git a/.venv/lib/python3.12/site-packages/greenlet/TStackState.cpp b/.venv/lib/python3.12/site-packages/greenlet/TStackState.cpp new file mode 100644 index 00000000..9743ab51 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TStackState.cpp @@ -0,0 +1,265 @@ +#ifndef GREENLET_STACK_STATE_CPP +#define GREENLET_STACK_STATE_CPP + +#include "TGreenlet.hpp" + +namespace greenlet { + +#ifdef GREENLET_USE_STDIO +#include +using std::cerr; +using std::endl; + +std::ostream& operator<<(std::ostream& os, const StackState& s) +{ + os << "StackState(stack_start=" << (void*)s._stack_start + << ", stack_stop=" << (void*)s.stack_stop + << ", stack_copy=" << (void*)s.stack_copy + << ", stack_saved=" << s._stack_saved + << ", stack_prev=" << s.stack_prev + << ", addr=" << &s + << ")"; + return os; +} +#endif + +StackState::StackState(void* mark, StackState& current) + : _stack_start(nullptr), + stack_stop((char*)mark), + stack_copy(nullptr), + _stack_saved(0), + /* Skip a dying greenlet */ + stack_prev(current._stack_start + ? ¤t + : current.stack_prev) +{ +} + +StackState::StackState() + : _stack_start(nullptr), + stack_stop(nullptr), + stack_copy(nullptr), + _stack_saved(0), + stack_prev(nullptr) +{ +} + +StackState::StackState(const StackState& other) +// can't use a delegating constructor because of +// MSVC for Python 2.7 + : _stack_start(nullptr), + stack_stop(nullptr), + stack_copy(nullptr), + _stack_saved(0), + stack_prev(nullptr) +{ + this->operator=(other); +} + +StackState& StackState::operator=(const StackState& other) +{ + if (&other == this) { + return *this; + } + if (other._stack_saved) { + throw std::runtime_error("Refusing to steal memory."); + } + + //If we have memory allocated, dispose of it + this->free_stack_copy(); + + this->_stack_start = other._stack_start; + this->stack_stop = other.stack_stop; + this->stack_copy = other.stack_copy; + this->_stack_saved = other._stack_saved; + this->stack_prev = other.stack_prev; + return *this; +} + +inline void StackState::free_stack_copy() noexcept +{ + PyMem_Free(this->stack_copy); + this->stack_copy = nullptr; + this->_stack_saved = 0; +} + +inline void StackState::copy_heap_to_stack(const StackState& current) noexcept +{ + + /* Restore the heap copy back into the C stack */ + if (this->_stack_saved != 0) { + memcpy(this->_stack_start, this->stack_copy, this->_stack_saved); + this->free_stack_copy(); + } + StackState* owner = const_cast(¤t); + if (!owner->_stack_start) { + owner = owner->stack_prev; /* greenlet is dying, skip it */ + } + while (owner && owner->stack_stop <= this->stack_stop) { + // cerr << "\tOwner: " << owner << endl; + owner = owner->stack_prev; /* find greenlet with more stack */ + } + this->stack_prev = owner; + // cerr << "\tFinished with: " << *this << endl; +} + +inline int StackState::copy_stack_to_heap_up_to(const char* const stop) noexcept +{ + /* Save more of g's stack into the heap -- at least up to 'stop' + g->stack_stop |________| + | | + | __ stop . . . . . + | | ==> . . + |________| _______ + | | | | + | | | | + g->stack_start | | |_______| g->stack_copy + */ + intptr_t sz1 = this->_stack_saved; + intptr_t sz2 = stop - this->_stack_start; + assert(this->_stack_start); + if (sz2 > sz1) { + char* c = (char*)PyMem_Realloc(this->stack_copy, sz2); + if (!c) { + PyErr_NoMemory(); + return -1; + } + memcpy(c + sz1, this->_stack_start + sz1, sz2 - sz1); + this->stack_copy = c; + this->_stack_saved = sz2; + } + return 0; +} + +inline int StackState::copy_stack_to_heap(char* const stackref, + const StackState& current) noexcept +{ + /* must free all the C stack up to target_stop */ + const char* const target_stop = this->stack_stop; + + StackState* owner = const_cast(¤t); + assert(owner->_stack_saved == 0); // everything is present on the stack + if (!owner->_stack_start) { + owner = owner->stack_prev; /* not saved if dying */ + } + else { + owner->_stack_start = stackref; + } + + while (owner->stack_stop < target_stop) { + /* ts_current is entierely within the area to free */ + if (owner->copy_stack_to_heap_up_to(owner->stack_stop)) { + return -1; /* XXX */ + } + owner = owner->stack_prev; + } + if (owner != this) { + if (owner->copy_stack_to_heap_up_to(target_stop)) { + return -1; /* XXX */ + } + } + return 0; +} + +inline bool StackState::started() const noexcept +{ + return this->stack_stop != nullptr; +} + +inline bool StackState::main() const noexcept +{ + return this->stack_stop == (char*)-1; +} + +inline bool StackState::active() const noexcept +{ + return this->_stack_start != nullptr; +} + +inline void StackState::set_active() noexcept +{ + assert(this->_stack_start == nullptr); + this->_stack_start = (char*)1; +} + +inline void StackState::set_inactive() noexcept +{ + this->_stack_start = nullptr; + // XXX: What if we still have memory out there? + // That case is actually triggered by + // test_issue251_issue252_explicit_reference_not_collectable (greenlet.tests.test_leaks.TestLeaks) + // and + // test_issue251_issue252_need_to_collect_in_background + // (greenlet.tests.test_leaks.TestLeaks) + // + // Those objects never get deallocated, so the destructor never + // runs. + // It *seems* safe to clean up the memory here? + if (this->_stack_saved) { + this->free_stack_copy(); + } +} + +inline intptr_t StackState::stack_saved() const noexcept +{ + return this->_stack_saved; +} + +inline char* StackState::stack_start() const noexcept +{ + return this->_stack_start; +} + + +inline StackState StackState::make_main() noexcept +{ + StackState s; + s._stack_start = (char*)1; + s.stack_stop = (char*)-1; + return s; +} + +StackState::~StackState() +{ + if (this->_stack_saved != 0) { + this->free_stack_copy(); + } +} + +void StackState::copy_from_stack(void* vdest, const void* vsrc, size_t n) const +{ + char* dest = static_cast(vdest); + const char* src = static_cast(vsrc); + if (src + n <= this->_stack_start + || src >= this->_stack_start + this->_stack_saved + || this->_stack_saved == 0) { + // Nothing we're copying was spilled from the stack + memcpy(dest, src, n); + return; + } + + if (src < this->_stack_start) { + // Copy the part before the saved stack. + // We know src + n > _stack_start due to the test above. + const size_t nbefore = this->_stack_start - src; + memcpy(dest, src, nbefore); + dest += nbefore; + src += nbefore; + n -= nbefore; + } + // We know src >= _stack_start after the before-copy, and + // src < _stack_start + _stack_saved due to the first if condition + size_t nspilled = std::min(n, this->_stack_start + this->_stack_saved - src); + memcpy(dest, this->stack_copy + (src - this->_stack_start), nspilled); + dest += nspilled; + src += nspilled; + n -= nspilled; + if (n > 0) { + // Copy the part after the saved stack + memcpy(dest, src, n); + } +} + +}; // namespace greenlet + +#endif // GREENLET_STACK_STATE_CPP diff --git a/.venv/lib/python3.12/site-packages/greenlet/TThreadState.hpp b/.venv/lib/python3.12/site-packages/greenlet/TThreadState.hpp new file mode 100644 index 00000000..e4e6f6cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TThreadState.hpp @@ -0,0 +1,497 @@ +#ifndef GREENLET_THREAD_STATE_HPP +#define GREENLET_THREAD_STATE_HPP + +#include +#include + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_thread_support.hpp" + +using greenlet::refs::BorrowedObject; +using greenlet::refs::BorrowedGreenlet; +using greenlet::refs::BorrowedMainGreenlet; +using greenlet::refs::OwnedMainGreenlet; +using greenlet::refs::OwnedObject; +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::OwnedList; +using greenlet::refs::PyErrFetchParam; +using greenlet::refs::PyArgParseParam; +using greenlet::refs::ImmortalString; +using greenlet::refs::CreatedModule; +using greenlet::refs::PyErrPieces; +using greenlet::refs::NewReference; + +namespace greenlet { +/** + * Thread-local state of greenlets. + * + * Each native thread will get exactly one of these objects, + * automatically accessed through the best available thread-local + * mechanism the compiler supports (``thread_local`` for C++11 + * compilers or ``__thread``/``declspec(thread)`` for older GCC/clang + * or MSVC, respectively.) + * + * Previously, we kept thread-local state mostly in a bunch of + * ``static volatile`` variables in the main greenlet file.. This had + * the problem of requiring extra checks, loops, and great care + * accessing these variables if we potentially invoked any Python code + * that could release the GIL, because the state could change out from + * under us. Making the variables thread-local solves this problem. + * + * When we detected that a greenlet API accessing the current greenlet + * was invoked from a different thread than the greenlet belonged to, + * we stored a reference to the greenlet in the Python thread + * dictionary for the thread the greenlet belonged to. This could lead + * to memory leaks if the thread then exited (because of a reference + * cycle, as greenlets referred to the thread dictionary, and deleting + * non-current greenlets leaked their frame plus perhaps arguments on + * the C stack). If a thread exited while still having running + * greenlet objects (perhaps that had just switched back to the main + * greenlet), and did not invoke one of the greenlet APIs *in that + * thread, immediately before it exited, without some other thread + * then being invoked*, such a leak was guaranteed. + * + * This can be partly solved by using compiler thread-local variables + * instead of the Python thread dictionary, thus avoiding a cycle. + * + * To fully solve this problem, we need a reliable way to know that a + * thread is done and we should clean up the main greenlet. On POSIX, + * we can use the destructor function of ``pthread_key_create``, but + * there's nothing similar on Windows; a C++11 thread local object + * reliably invokes its destructor when the thread it belongs to exits + * (non-C++11 compilers offer ``__thread`` or ``declspec(thread)`` to + * create thread-local variables, but they can't hold C++ objects that + * invoke destructors; the C++11 version is the most portable solution + * I found). When the thread exits, we can drop references and + * otherwise manipulate greenlets and frames that we know can no + * longer be switched to. For compilers that don't support C++11 + * thread locals, we have a solution that uses the python thread + * dictionary, though it may not collect everything as promptly as + * other compilers do, if some other library is using the thread + * dictionary and has a cycle or extra reference. + * + * There are two small wrinkles. The first is that when the thread + * exits, it is too late to actually invoke Python APIs: the Python + * thread state is gone, and the GIL is released. To solve *this* + * problem, our destructor uses ``Py_AddPendingCall`` to transfer the + * destruction work to the main thread. (This is not an issue for the + * dictionary solution.) + * + * The second is that once the thread exits, the thread local object + * is invalid and we can't even access a pointer to it, so we can't + * pass it to ``Py_AddPendingCall``. This is handled by actually using + * a second object that's thread local (ThreadStateCreator) and having + * it dynamically allocate this object so it can live until the + * pending call runs. + */ + + + +class ThreadState { +private: + // As of commit 08ad1dd7012b101db953f492e0021fb08634afad + // this class needed 56 bytes in o Py_DEBUG build + // on 64-bit macOS 11. + // Adding the vector takes us up to 80 bytes () + + /* Strong reference to the main greenlet */ + OwnedMainGreenlet main_greenlet; + + /* Strong reference to the current greenlet. */ + OwnedGreenlet current_greenlet; + + /* Strong reference to the trace function, if any. */ + OwnedObject tracefunc; + + typedef std::vector > deleteme_t; + /* A vector of raw PyGreenlet pointers representing things that need + deleted when this thread is running. The vector owns the + references, but you need to manually INCREF/DECREF as you use + them. We don't use a vector because we + make copy of this vector, and that would become O(n) as all the + refcounts are incremented in the copy. + */ + deleteme_t deleteme; + +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + void* exception_state; +#endif + + static std::clock_t _clocks_used_doing_gc; + static ImmortalString get_referrers_name; + static PythonAllocator allocator; + + G_NO_COPIES_OF_CLS(ThreadState); + + + // Allocates a main greenlet for the thread state. If this fails, + // exits the process. Called only during constructing a ThreadState. + MainGreenlet* alloc_main() + { + PyGreenlet* gmain; + + /* create the main greenlet for this thread */ + gmain = reinterpret_cast(PyType_GenericAlloc(&PyGreenlet_Type, 0)); + if (gmain == NULL) { + throw PyFatalError("alloc_main failed to alloc"); //exits the process + } + + MainGreenlet* const main = new MainGreenlet(gmain, this); + + assert(Py_REFCNT(gmain) == 1); + assert(gmain->pimpl == main); + return main; + } + + +public: + static void* operator new(size_t UNUSED(count)) + { + return ThreadState::allocator.allocate(1); + } + + static void operator delete(void* ptr) + { + return ThreadState::allocator.deallocate(static_cast(ptr), + 1); + } + + static void init() + { + ThreadState::get_referrers_name = "get_referrers"; + ThreadState::_clocks_used_doing_gc = 0; + } + + ThreadState() + { + +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + this->exception_state = slp_get_exception_state(); +#endif + + // XXX: Potentially dangerous, exposing a not fully + // constructed object. + MainGreenlet* const main = this->alloc_main(); + this->main_greenlet = OwnedMainGreenlet::consuming( + main->self() + ); + assert(this->main_greenlet); + this->current_greenlet = main->self(); + // The main greenlet starts with 1 refs: The returned one. We + // then copied it to the current greenlet. + assert(this->main_greenlet.REFCNT() == 2); + } + + inline void restore_exception_state() + { +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + // It's probably important this be inlined and only call C + // functions to avoid adding an SEH frame. + slp_set_exception_state(this->exception_state); +#endif + } + + inline bool has_main_greenlet() const noexcept + { + return bool(this->main_greenlet); + } + + // Called from the ThreadStateCreator when we're in non-standard + // threading mode. In that case, there is an object in the Python + // thread state dictionary that points to us. The main greenlet + // also traverses into us, in which case it's crucial not to + // traverse back into the main greenlet. + int tp_traverse(visitproc visit, void* arg, bool traverse_main=true) + { + if (traverse_main) { + Py_VISIT(main_greenlet.borrow_o()); + } + if (traverse_main || current_greenlet != main_greenlet) { + Py_VISIT(current_greenlet.borrow_o()); + } + Py_VISIT(tracefunc.borrow()); + return 0; + } + + inline BorrowedMainGreenlet borrow_main_greenlet() const noexcept + { + assert(this->main_greenlet); + assert(this->main_greenlet.REFCNT() >= 2); + return this->main_greenlet; + }; + + inline OwnedMainGreenlet get_main_greenlet() const noexcept + { + return this->main_greenlet; + } + + /** + * In addition to returning a new reference to the currunt + * greenlet, this performs any maintenance needed. + */ + inline OwnedGreenlet get_current() + { + /* green_dealloc() cannot delete greenlets from other threads, so + it stores them in the thread dict; delete them now. */ + this->clear_deleteme_list(); + //assert(this->current_greenlet->main_greenlet == this->main_greenlet); + //assert(this->main_greenlet->main_greenlet == this->main_greenlet); + return this->current_greenlet; + } + + /** + * As for non-const get_current(); + */ + inline BorrowedGreenlet borrow_current() + { + this->clear_deleteme_list(); + return this->current_greenlet; + } + + /** + * Does no maintenance. + */ + inline OwnedGreenlet get_current() const + { + return this->current_greenlet; + } + + template + inline bool is_current(const refs::PyObjectPointer& obj) const + { + return this->current_greenlet.borrow_o() == obj.borrow_o(); + } + + inline void set_current(const OwnedGreenlet& target) + { + this->current_greenlet = target; + } + +private: + /** + * Deref and remove the greenlets from the deleteme list. Must be + * holding the GIL. + * + * If *murder* is true, then we must be called from a different + * thread than the one that these greenlets were running in. + * In that case, if the greenlet was actually running, we destroy + * the frame reference and otherwise make it appear dead before + * proceeding; otherwise, we would try (and fail) to raise an + * exception in it and wind up right back in this list. + */ + inline void clear_deleteme_list(const bool murder=false) + { + if (!this->deleteme.empty()) { + // It's possible we could add items to this list while + // running Python code if there's a thread switch, so we + // need to defensively copy it before that can happen. + deleteme_t copy = this->deleteme; + this->deleteme.clear(); // in case things come back on the list + for(deleteme_t::iterator it = copy.begin(), end = copy.end(); + it != end; + ++it ) { + PyGreenlet* to_del = *it; + if (murder) { + // Force each greenlet to appear dead; we can't raise an + // exception into it anymore anyway. + to_del->pimpl->murder_in_place(); + } + + // The only reference to these greenlets should be in + // this list, decreffing them should let them be + // deleted again, triggering calls to green_dealloc() + // in the correct thread (if we're not murdering). + // This may run arbitrary Python code and switch + // threads or greenlets! + Py_DECREF(to_del); + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(nullptr); + PyErr_Clear(); + } + } + } + } + +public: + + /** + * Returns a new reference, or a false object. + */ + inline OwnedObject get_tracefunc() const + { + return tracefunc; + }; + + + inline void set_tracefunc(BorrowedObject tracefunc) + { + assert(tracefunc); + if (tracefunc == BorrowedObject(Py_None)) { + this->tracefunc.CLEAR(); + } + else { + this->tracefunc = tracefunc; + } + } + + /** + * Given a reference to a greenlet that some other thread + * attempted to delete (has a refcount of 0) store it for later + * deletion when the thread this state belongs to is current. + */ + inline void delete_when_thread_running(PyGreenlet* to_del) + { + Py_INCREF(to_del); + this->deleteme.push_back(to_del); + } + + /** + * Set to std::clock_t(-1) to disable. + */ + inline static std::clock_t& clocks_used_doing_gc() + { + return ThreadState::_clocks_used_doing_gc; + } + + ~ThreadState() + { + if (!PyInterpreterState_Head()) { + // We shouldn't get here (our callers protect us) + // but if we do, all we can do is bail early. + return; + } + + // We should not have an "origin" greenlet; that only exists + // for the temporary time during a switch, which should not + // be in progress as the thread dies. + //assert(!this->switching_state.origin); + + this->tracefunc.CLEAR(); + + // Forcibly GC as much as we can. + this->clear_deleteme_list(true); + + // The pending call did this. + assert(this->main_greenlet->thread_state() == nullptr); + + // If the main greenlet is the current greenlet, + // then we "fell off the end" and the thread died. + // It's possible that there is some other greenlet that + // switched to us, leaving a reference to the main greenlet + // on the stack, somewhere uncollectible. Try to detect that. + if (this->current_greenlet == this->main_greenlet && this->current_greenlet) { + assert(this->current_greenlet->is_currently_running_in_some_thread()); + // Drop one reference we hold. + this->current_greenlet.CLEAR(); + assert(!this->current_greenlet); + // Only our reference to the main greenlet should be left, + // But hold onto the pointer in case we need to do extra cleanup. + PyGreenlet* old_main_greenlet = this->main_greenlet.borrow(); + Py_ssize_t cnt = this->main_greenlet.REFCNT(); + this->main_greenlet.CLEAR(); + if (ThreadState::_clocks_used_doing_gc != std::clock_t(-1) + && cnt == 2 && Py_REFCNT(old_main_greenlet) == 1) { + // Highly likely that the reference is somewhere on + // the stack, not reachable by GC. Verify. + // XXX: This is O(n) in the total number of objects. + // TODO: Add a way to disable this at runtime, and + // another way to report on it. + std::clock_t begin = std::clock(); + NewReference gc(PyImport_ImportModule("gc")); + if (gc) { + OwnedObject get_referrers = gc.PyRequireAttr(ThreadState::get_referrers_name); + OwnedList refs(get_referrers.PyCall(old_main_greenlet)); + if (refs && refs.empty()) { + assert(refs.REFCNT() == 1); + // We found nothing! So we left a dangling + // reference: Probably the last thing some + // other greenlet did was call + // 'getcurrent().parent.switch()' to switch + // back to us. Clean it up. This will be the + // case on CPython 3.7 and newer, as they use + // an internal calling conversion that avoids + // creating method objects and storing them on + // the stack. + Py_DECREF(old_main_greenlet); + } + else if (refs + && refs.size() == 1 + && PyCFunction_Check(refs.at(0)) + && Py_REFCNT(refs.at(0)) == 2) { + assert(refs.REFCNT() == 1); + // Ok, we found a C method that refers to the + // main greenlet, and its only referenced + // twice, once in the list we just created, + // once from...somewhere else. If we can't + // find where else, then this is a leak. + // This happens in older versions of CPython + // that create a bound method object somewhere + // on the stack that we'll never get back to. + if (PyCFunction_GetFunction(refs.at(0).borrow()) == (PyCFunction)green_switch) { + BorrowedObject function_w = refs.at(0); + refs.clear(); // destroy the reference + // from the list. + // back to one reference. Can *it* be + // found? + assert(function_w.REFCNT() == 1); + refs = get_referrers.PyCall(function_w); + if (refs && refs.empty()) { + // Nope, it can't be found so it won't + // ever be GC'd. Drop it. + Py_CLEAR(function_w); + } + } + } + std::clock_t end = std::clock(); + ThreadState::_clocks_used_doing_gc += (end - begin); + } + } + } + + // We need to make sure this greenlet appears to be dead, + // because otherwise deallocing it would fail to raise an + // exception in it (the thread is dead) and put it back in our + // deleteme list. + if (this->current_greenlet) { + this->current_greenlet->murder_in_place(); + this->current_greenlet.CLEAR(); + } + + if (this->main_greenlet) { + // Couldn't have been the main greenlet that was running + // when the thread exited (because we already cleared this + // pointer if it was). This shouldn't be possible? + + // If the main greenlet was current when the thread died (it + // should be, right?) then we cleared its self pointer above + // when we cleared the current greenlet's main greenlet pointer. + // assert(this->main_greenlet->main_greenlet == this->main_greenlet + // || !this->main_greenlet->main_greenlet); + // // self reference, probably gone + // this->main_greenlet->main_greenlet.CLEAR(); + + // This will actually go away when the ivar is destructed. + this->main_greenlet.CLEAR(); + } + + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(NULL); + PyErr_Clear(); + } + + } + +}; + +ImmortalString ThreadState::get_referrers_name(nullptr); +PythonAllocator ThreadState::allocator; +std::clock_t ThreadState::_clocks_used_doing_gc(0); + + + + + +}; // namespace greenlet + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/TThreadStateCreator.hpp b/.venv/lib/python3.12/site-packages/greenlet/TThreadStateCreator.hpp new file mode 100644 index 00000000..2ec7ab55 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TThreadStateCreator.hpp @@ -0,0 +1,102 @@ +#ifndef GREENLET_THREAD_STATE_CREATOR_HPP +#define GREENLET_THREAD_STATE_CREATOR_HPP + +#include +#include + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_thread_support.hpp" + +#include "TThreadState.hpp" + +namespace greenlet { + + +typedef void (*ThreadStateDestructor)(ThreadState* const); + +template +class ThreadStateCreator +{ +private: + // Initialized to 1, and, if still 1, created on access. + // Set to 0 on destruction. + ThreadState* _state; + G_NO_COPIES_OF_CLS(ThreadStateCreator); + + inline bool has_initialized_state() const noexcept + { + return this->_state != (ThreadState*)1; + } + + inline bool has_state() const noexcept + { + return this->has_initialized_state() && this->_state != nullptr; + } + +public: + + // Only one of these, auto created per thread. + // Constructing the state constructs the MainGreenlet. + ThreadStateCreator() : + _state((ThreadState*)1) + { + } + + ~ThreadStateCreator() + { + if (this->has_state()) { + Destructor(this->_state); + } + + this->_state = nullptr; + } + + inline ThreadState& state() + { + // The main greenlet will own this pointer when it is created, + // which will be right after this. The plan is to give every + // greenlet a pointer to the main greenlet for the thread it + // runs in; if we are doing something cross-thread, we need to + // access the pointer from the main greenlet. Deleting the + // thread, and hence the thread-local storage, will delete the + // state pointer in the main greenlet. + if (!this->has_initialized_state()) { + // XXX: Assuming allocation never fails + this->_state = new ThreadState; + // For non-standard threading, we need to store an object + // in the Python thread state dictionary so that it can be + // DECREF'd when the thread ends (ideally; the dict could + // last longer) and clean this object up. + } + if (!this->_state) { + throw std::runtime_error("Accessing state after destruction."); + } + return *this->_state; + } + + operator ThreadState&() + { + return this->state(); + } + + operator ThreadState*() + { + return &this->state(); + } + + inline int tp_traverse(visitproc visit, void* arg) + { + if (this->has_state()) { + return this->_state->tp_traverse(visit, arg); + } + return 0; + } + +}; + + + +}; // namespace greenlet + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/TThreadStateDestroy.cpp b/.venv/lib/python3.12/site-packages/greenlet/TThreadStateDestroy.cpp new file mode 100644 index 00000000..37fcc8c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TThreadStateDestroy.cpp @@ -0,0 +1,258 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of the ThreadState destructors. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_THREADSTATE_DESTROY +#define T_THREADSTATE_DESTROY + +#include "TGreenlet.hpp" + +#include "greenlet_thread_support.hpp" +#include "greenlet_cpython_add_pending.hpp" +#include "greenlet_compiler_compat.hpp" +#include "TGreenletGlobals.cpp" +#include "TThreadState.hpp" +#include "TThreadStateCreator.hpp" + +namespace greenlet { + +extern "C" { + +struct ThreadState_DestroyNoGIL +{ + /** + This function uses the same lock that the PendingCallback does + */ + static void + MarkGreenletDeadAndQueueCleanup(ThreadState* const state) + { +#if GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK + return; +#endif + // We are *NOT* holding the GIL. Our thread is in the middle + // of its death throes and the Python thread state is already + // gone so we can't use most Python APIs. One that is safe is + // ``Py_AddPendingCall``, unless the interpreter itself has + // been torn down. There is a limited number of calls that can + // be queued: 32 (NPENDINGCALLS) in CPython 3.10, so we + // coalesce these calls using our own queue. + + if (!MarkGreenletDeadIfNeeded(state)) { + // No state, or no greenlet + return; + } + + // XXX: Because we don't have the GIL, this is a race condition. + if (!PyInterpreterState_Head()) { + // We have to leak the thread state, if the + // interpreter has shut down when we're getting + // deallocated, we can't run the cleanup code that + // deleting it would imply. + return; + } + + AddToCleanupQueue(state); + + } + +private: + + // If the state has an allocated main greenlet: + // - mark the greenlet as dead by disassociating it from the state; + // - return 1 + // Otherwise, return 0. + static bool + MarkGreenletDeadIfNeeded(ThreadState* const state) + { + if (state && state->has_main_greenlet()) { + // mark the thread as dead ASAP. + // this is racy! If we try to throw or switch to a + // greenlet from this thread from some other thread before + // we clear the state pointer, it won't realize the state + // is dead which can crash the process. + PyGreenlet* p(state->borrow_main_greenlet().borrow()); + assert(p->pimpl->thread_state() == state || p->pimpl->thread_state() == nullptr); + dynamic_cast(p->pimpl)->thread_state(nullptr); + return true; + } + return false; + } + + static void + AddToCleanupQueue(ThreadState* const state) + { + assert(state && state->has_main_greenlet()); + + // NOTE: Because we're not holding the GIL here, some other + // Python thread could run and call ``os.fork()``, which would + // be bad if that happened while we are holding the cleanup + // lock (it wouldn't function in the child process). + // Make a best effort to try to keep the duration we hold the + // lock short. + // TODO: On platforms that support it, use ``pthread_atfork`` to + // drop this lock. + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + + mod_globs->queue_to_destroy(state); + if (mod_globs->thread_states_to_destroy.size() == 1) { + // We added the first item to the queue. We need to schedule + // the cleanup. + + // A size greater than 1 means that we have already added the pending call, + // and in fact, it may be executing now. + // If it is executing, our lock makes sure that it will see the item we just added + // to the queue on its next iteration (after we release the lock) + // + // A size of 1 means there is no pending call, OR the pending call is + // currently executing, has dropped the lock, and is deleting the last item + // from the queue; its next iteration will go ahead and delete the item we just added. + // And the pending call we schedule here will have no work to do. + int result = AddPendingCall( + PendingCallback_DestroyQueueWithGIL, + nullptr); + if (result < 0) { + // Hmm, what can we do here? + fprintf(stderr, + "greenlet: WARNING: failed in call to Py_AddPendingCall; " + "expect a memory leak.\n"); + } + } + } + + static int + PendingCallback_DestroyQueueWithGIL(void* UNUSED(arg)) + { + // We're holding the GIL here, so no Python code should be able to + // run to call ``os.fork()``. + while (1) { + ThreadState* to_destroy; + { + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + if (mod_globs->thread_states_to_destroy.empty()) { + break; + } + to_destroy = mod_globs->take_next_to_destroy(); + } + assert(to_destroy); + assert(to_destroy->has_main_greenlet()); + // Drop the lock while we do the actual deletion. + // This allows other calls to MarkGreenletDeadAndQueueCleanup + // to enter and add to our queue. + DestroyOneWithGIL(to_destroy); + } + return 0; + } + + static void + DestroyOneWithGIL(const ThreadState* const state) + { + // Holding the GIL. + // Passed a non-shared pointer to the actual thread state. + // state -> main greenlet + assert(state->has_main_greenlet()); + PyGreenlet* main(state->borrow_main_greenlet()); + // When we need to do cross-thread operations, we check this. + // A NULL value means the thread died some time ago. + // We do this here, rather than in a Python dealloc function + // for the greenlet, in case there's still a reference out + // there. + dynamic_cast(main->pimpl)->thread_state(nullptr); + + delete state; // Deleting this runs the destructor, DECREFs the main greenlet. + } + + // ensure this is actually defined. + static_assert(GREENLET_BROKEN_PY_ADD_PENDING == 1 || GREENLET_BROKEN_PY_ADD_PENDING == 0, + "GREENLET_BROKEN_PY_ADD_PENDING not defined correctly."); + +#if GREENLET_BROKEN_PY_ADD_PENDING + static int _push_pending_call(struct _pending_calls *pending, + int (*func)(void *), void *arg) + { + int i = pending->last; + int j = (i + 1) % NPENDINGCALLS; + if (j == pending->first) { + return -1; /* Queue full */ + } + pending->calls[i].func = func; + pending->calls[i].arg = arg; + pending->last = j; + return 0; + } + + static int AddPendingCall(int (*func)(void *), void *arg) + { + _PyRuntimeState *runtime = &_PyRuntime; + if (!runtime) { + // obviously impossible + return 0; + } + struct _pending_calls *pending = &runtime->ceval.pending; + if (!pending->lock) { + return 0; + } + int result = 0; + PyThread_acquire_lock(pending->lock, WAIT_LOCK); + if (!pending->finishing) { + result = _push_pending_call(pending, func, arg); + } + PyThread_release_lock(pending->lock); + SIGNAL_PENDING_CALLS(&runtime->ceval); + return result; + } +#else + // Python < 3.8 or >= 3.9 + static int AddPendingCall(int (*func)(void*), void* arg) + { + // If the interpreter is in the middle of finalizing, we can't add a + // pending call. Trying to do so will end up in a SIGSEGV, as + // Py_AddPendingCall will not be able to get the interpreter and will + // try to dereference a NULL pointer. It's possible this can still + // segfault if we happen to get context switched, and maybe we should + // just always implement our own AddPendingCall, but I'd like to see if + // this works first +#if GREENLET_PY313 + if (Py_IsFinalizing()) { +#else + if (_Py_IsFinalizing()) { +#endif +#ifdef GREENLET_DEBUG + // No need to log in the general case. Yes, we'll leak, + // but we're shutting down so it should be ok. + fprintf(stderr, + "greenlet: WARNING: Interpreter is finalizing. Ignoring " + "call to Py_AddPendingCall; \n"); +#endif + return 0; + } + return Py_AddPendingCall(func, arg); + } +#endif + + + + +}; +}; + +}; // namespace greenlet + +// The intent when GET_THREAD_STATE() is needed multiple times in a +// function is to take a reference to its return value in a local +// variable, to avoid the thread-local indirection. On some platforms +// (macOS), accessing a thread-local involves a function call (plus an +// initial function call in each function that uses a thread local); +// in contrast, static volatile variables are at some pre-computed +// offset. +typedef greenlet::ThreadStateCreator ThreadStateCreator; +static thread_local ThreadStateCreator g_thread_state_global; +#define GET_THREAD_STATE() g_thread_state_global + +#endif //T_THREADSTATE_DESTROY diff --git a/.venv/lib/python3.12/site-packages/greenlet/TUserGreenlet.cpp b/.venv/lib/python3.12/site-packages/greenlet/TUserGreenlet.cpp new file mode 100644 index 00000000..73a81330 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/TUserGreenlet.cpp @@ -0,0 +1,662 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::UserGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_USER_GREENLET_CPP +#define T_USER_GREENLET_CPP + +#include "greenlet_internal.hpp" +#include "TGreenlet.hpp" + +#include "TThreadStateDestroy.cpp" + + +namespace greenlet { +using greenlet::refs::BorrowedMainGreenlet; +greenlet::PythonAllocator UserGreenlet::allocator; + +void* UserGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void UserGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + + +UserGreenlet::UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) + : Greenlet(p), _parent(the_parent) +{ +} + +UserGreenlet::~UserGreenlet() +{ + // Python 3.11: If we don't clear out the raw frame datastack + // when deleting an unfinished greenlet, + // TestLeaks.test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main fails. + this->python_state.did_finish(nullptr); + this->tp_clear(); +} + + +const BorrowedMainGreenlet +UserGreenlet::main_greenlet() const +{ + return this->_main_greenlet; +} + + +BorrowedMainGreenlet +UserGreenlet::find_main_greenlet_in_lineage() const +{ + if (this->started()) { + assert(this->_main_greenlet); + return BorrowedMainGreenlet(this->_main_greenlet); + } + + if (!this->_parent) { + /* garbage collected greenlet in chain */ + // XXX: WHAT? + return BorrowedMainGreenlet(nullptr); + } + + return this->_parent->find_main_greenlet_in_lineage(); +} + + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +UserGreenlet::throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state) +{ + /* The dying greenlet cannot be a parent of ts_current + because the 'parent' field chain would hold a + reference */ + UserGreenlet::ParentIsCurrentGuard with_current_parent(this, current_thread_state); + + // We don't care about the return value, only whether an + // exception happened. Whether or not an exception happens, + // we need to restore the parent in case the greenlet gets + // resurrected. + return Greenlet::throw_GreenletExit_during_dealloc(current_thread_state); +} + +ThreadState* +UserGreenlet::thread_state() const noexcept +{ + // TODO: maybe make this throw, if the thread state isn't there? + // if (!this->main_greenlet) { + // throw std::runtime_error("No thread state"); // TODO: Better exception + // } + if (!this->_main_greenlet) { + return nullptr; + } + return this->_main_greenlet->thread_state(); +} + + +bool +UserGreenlet::was_running_in_dead_thread() const noexcept +{ + return this->_main_greenlet && !this->thread_state(); +} + +OwnedObject +UserGreenlet::g_switch() +{ + assert(this->args() || PyErr_Occurred()); + + try { + this->check_switch_allowed(); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + + // Switching greenlets used to attempt to clean out ones that need + // deleted *if* we detected a thread switch. Should it still do + // that? + // An issue is that if we delete a greenlet from another thread, + // it gets queued to this thread, and ``kill_greenlet()`` switches + // back into the greenlet + + /* find the real target by ignoring dead greenlets, + and if necessary starting a greenlet. */ + switchstack_result_t err; + Greenlet* target = this; + // TODO: probably cleaner to handle the case where we do + // switch to ourself separately from the other cases. + // This can probably even further be simplified if we keep + // track of the switching_state we're going for and just call + // into g_switch() if it's not ourself. The main problem with that + // is that we would be using more stack space. + bool target_was_me = true; + bool was_initial_stub = false; + while (target) { + if (target->active()) { + if (!target_was_me) { + target->args() <<= this->args(); + assert(!this->args()); + } + err = target->g_switchstack(); + break; + } + if (!target->started()) { + // We never encounter a main greenlet that's not started. + assert(!target->main()); + UserGreenlet* real_target = static_cast(target); + assert(real_target); + void* dummymarker; + was_initial_stub = true; + if (!target_was_me) { + target->args() <<= this->args(); + assert(!this->args()); + } + try { + // This can only throw back to us while we're + // still in this greenlet. Once the new greenlet + // is bootstrapped, it has its own exception state. + err = real_target->g_initialstub(&dummymarker); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + catch (const GreenletStartedWhileInPython&) { + // The greenlet was started sometime before this + // greenlet actually switched to it, i.e., + // "concurrent" calls to switch() or throw(). + // We need to retry the switch. + // Note that the current greenlet has been reset + // to this one (or we wouldn't be running!) + continue; + } + break; + } + + target = target->parent(); + target_was_me = false; + } + // The ``this`` pointer and all other stack or register based + // variables are invalid now, at least where things succeed + // above. + // But this one, probably not so much? It's not clear if it's + // safe to throw an exception at this point. + + if (err.status < 0) { + // If we get here, either g_initialstub() + // failed, or g_switchstack() failed. Either one of those + // cases SHOULD leave us in the original greenlet with a valid + // stack. + return this->on_switchstack_or_initialstub_failure(target, err, target_was_me, was_initial_stub); + } + + // err.the_new_current_greenlet would be the same as ``target``, + // if target wasn't probably corrupt. + return err.the_new_current_greenlet->g_switch_finish(err); +} + + + +Greenlet::switchstack_result_t +UserGreenlet::g_initialstub(void* mark) +{ + OwnedObject run; + + // We need to grab a reference to the current switch arguments + // in case we're entered concurrently during the call to + // GetAttr() and have to try again. + // We'll restore them when we return in that case. + // Scope them tightly to avoid ref leaks. + { + SwitchingArgs args(this->args()); + + /* save exception in case getattr clears it */ + PyErrPieces saved; + + /* + self.run is the object to call in the new greenlet. + This could run arbitrary python code and switch greenlets! + */ + run = this->self().PyRequireAttr(mod_globs->str_run); + /* restore saved exception */ + saved.PyErrRestore(); + + + /* recheck that it's safe to switch in case greenlet reparented anywhere above */ + this->check_switch_allowed(); + + /* by the time we got here another start could happen elsewhere, + * that means it should now be a regular switch. + * This can happen if the Python code is a subclass that implements + * __getattribute__ or __getattr__, or makes ``run`` a descriptor; + * all of those can run arbitrary code that switches back into + * this greenlet. + */ + if (this->stack_state.started()) { + // the successful switch cleared these out, we need to + // restore our version. They will be copied on up to the + // next target. + assert(!this->args()); + this->args() <<= args; + throw GreenletStartedWhileInPython(); + } + } + + // Sweet, if we got here, we have the go-ahead and will switch + // greenlets. + // Nothing we do from here on out should allow for a thread or + // greenlet switch: No arbitrary calls to Python, including + // decref'ing + +#if GREENLET_USE_CFRAME + /* OK, we need it, we're about to switch greenlets, save the state. */ + /* + See green_new(). This is a stack-allocated variable used + while *self* is in PyObject_Call(). + We want to defer copying the state info until we're sure + we need it and are in a stable place to do so. + */ + _PyCFrame trace_info; + + this->python_state.set_new_cframe(trace_info); +#endif + /* start the greenlet */ + ThreadState& thread_state = GET_THREAD_STATE().state(); + this->stack_state = StackState(mark, + thread_state.borrow_current()->stack_state); + this->python_state.set_initial_state(PyThreadState_GET()); + this->exception_state.clear(); + this->_main_greenlet = thread_state.get_main_greenlet(); + + /* perform the initial switch */ + switchstack_result_t err = this->g_switchstack(); + /* returns twice! + The 1st time with ``err == 1``: we are in the new greenlet. + This one owns a greenlet that used to be current. + The 2nd time with ``err <= 0``: back in the caller's + greenlet; this happens if the child finishes or switches + explicitly to us. Either way, the ``err`` variable is + created twice at the same memory location, but possibly + having different ``origin`` values. Note that it's not + constructed for the second time until the switch actually happens. + */ + if (err.status == 1) { + // In the new greenlet. + + // This never returns! Calling inner_bootstrap steals + // the contents of our run object within this stack frame, so + // it is not valid to do anything with it. + try { + this->inner_bootstrap(err.origin_greenlet.relinquish_ownership(), + run.relinquish_ownership()); + } + // Getting a C++ exception here isn't good. It's probably a + // bug in the underlying greenlet, meaning it's probably a + // C++ extension. We're going to abort anyway, but try to + // display some nice information *if* possible. Some obscure + // platforms don't properly support this (old 32-bit Arm, see see + // https://github.com/python-greenlet/greenlet/issues/385); that's not + // great, but should usually be OK because, as mentioned above, we're + // terminating anyway. + // + // The catching is tested by + // ``test_cpp.CPPTests.test_unhandled_exception_in_greenlet_aborts``. + // + // PyErrOccurred can theoretically be thrown by + // inner_bootstrap() -> g_switch_finish(), but that should + // never make it back to here. It is a std::exception and + // would be caught if it is. + catch (const std::exception& e) { + std::string base = "greenlet: Unhandled C++ exception: "; + base += e.what(); + Py_FatalError(base.c_str()); + } + catch (...) { + // Some compilers/runtimes use exceptions internally. + // It appears that GCC on Linux with libstdc++ throws an + // exception internally at process shutdown time to unwind + // stacks and clean up resources. Depending on exactly + // where we are when the process exits, that could result + // in an unknown exception getting here. If we + // Py_FatalError() or abort() here, we interfere with + // orderly process shutdown. Throwing the exception on up + // is the right thing to do. + // + // gevent's ``examples/dns_mass_resolve.py`` demonstrates this. +#ifndef NDEBUG + fprintf(stderr, + "greenlet: inner_bootstrap threw unknown exception; " + "is the process terminating?\n"); +#endif + throw; + } + Py_FatalError("greenlet: inner_bootstrap returned with no exception.\n"); + } + + + // In contrast, notice that we're keeping the origin greenlet + // around as an owned reference; we need it to call the trace + // function for the switch back into the parent. It was only + // captured at the time the switch actually happened, though, + // so we haven't been keeping an extra reference around this + // whole time. + + /* back in the parent */ + if (err.status < 0) { + /* start failed badly, restore greenlet state */ + this->stack_state = StackState(); + this->_main_greenlet.CLEAR(); + // CAUTION: This may run arbitrary Python code. + run.CLEAR(); // inner_bootstrap didn't run, we own the reference. + } + + // In the success case, the spawned code (inner_bootstrap) will + // take care of decrefing this, so we relinquish ownership so as + // to not double-decref. + + run.relinquish_ownership(); + + return err; +} + + +void +UserGreenlet::inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run) +{ + // The arguments here would be another great place for move. + // As it is, we take them as a reference so that when we clear + // them we clear what's on the stack above us. Do that NOW, and + // without using a C++ RAII object, + // so there's no way that exiting the parent frame can clear it, + // or we clear it unexpectedly. This arises in the context of the + // interpreter shutting down. See https://github.com/python-greenlet/greenlet/issues/325 + //PyObject* run = _run.relinquish_ownership(); + + /* in the new greenlet */ + assert(this->thread_state()->borrow_current() == BorrowedGreenlet(this->_self)); + // C++ exceptions cannot propagate to the parent greenlet from + // here. (TODO: Do we need a catch(...) clause, perhaps on the + // function itself? ALl we could do is terminate the program.) + // NOTE: On 32-bit Windows, the call chain is extremely + // important here in ways that are subtle, having to do with + // the depth of the SEH list. The call to restore it MUST NOT + // add a new SEH handler to the list, or we'll restore it to + // the wrong thing. + this->thread_state()->restore_exception_state(); + /* stack variables from above are no good and also will not unwind! */ + // EXCEPT: That can't be true, we access run, among others, here. + + this->stack_state.set_active(); /* running */ + + // We're about to possibly run Python code again, which + // could switch back/away to/from us, so we need to grab the + // arguments locally. + SwitchingArgs args; + args <<= this->args(); + assert(!this->args()); + + // XXX: We could clear this much earlier, right? + // Or would that introduce the possibility of running Python + // code when we don't want to? + // CAUTION: This may run arbitrary Python code. + this->_run_callable.CLEAR(); + + + // The first switch we need to manually call the trace + // function here instead of in g_switch_finish, because we + // never return there. + if (OwnedObject tracefunc = this->thread_state()->get_tracefunc()) { + OwnedGreenlet trace_origin; + trace_origin = origin_greenlet; + try { + g_calltrace(tracefunc, + args ? mod_globs->event_switch : mod_globs->event_throw, + trace_origin, + this->_self); + } + catch (const PyErrOccurred&) { + /* Turn trace errors into switch throws */ + args.CLEAR(); + } + } + + // We no longer need the origin, it was only here for + // tracing. + // We may never actually exit this stack frame so we need + // to explicitly clear it. + // This could run Python code and switch. + Py_CLEAR(origin_greenlet); + + OwnedObject result; + if (!args) { + /* pending exception */ + result = NULL; + } + else { + /* call g.run(*args, **kwargs) */ + // This could result in further switches + try { + //result = run.PyCall(args.args(), args.kwargs()); + // CAUTION: Just invoking this, before the function even + // runs, may cause memory allocations, which may trigger + // GC, which may run arbitrary Python code. + result = OwnedObject::consuming(PyObject_Call(run, args.args().borrow(), args.kwargs().borrow())); + } + catch (...) { + // Unhandled C++ exception! + + // If we declare ourselves as noexcept, if we don't catch + // this here, most platforms will just abort() the + // process. But on 64-bit Windows with older versions of + // the C runtime, this can actually corrupt memory and + // just return. We see this when compiling with the + // Windows 7.0 SDK targeting Windows Server 2008, but not + // when using the Appveyor Visual Studio 2019 image. So + // this currently only affects Python 2.7 on Windows 64. + // That is, the tests pass and the runtime aborts + // everywhere else. + // + // However, if we catch it and try to continue with a + // Python error, then all Windows 64 bit platforms corrupt + // memory. So all we can do is manually abort, hopefully + // with a good error message. (Note that the above was + // tested WITHOUT the `/EHr` switch being used at compile + // time, so MSVC may have "optimized" out important + // checking. Using that switch, we may be in a better + // place in terms of memory corruption.) But sometimes it + // can't be caught here at all, which is confusing but not + // terribly surprising; so again, the G_NOEXCEPT_WIN32 + // plus "/EHr". + // + // Hopefully the basic C stdlib is still functional enough + // for us to at least print an error. + // + // It gets more complicated than that, though, on some + // platforms, specifically at least Linux/gcc/libstdc++. They use + // an exception to unwind the stack when a background + // thread exits. (See comments about noexcept.) So this + // may not actually represent anything untoward. On those + // platforms we allow throws of this to propagate, or + // attempt to anyway. +# if defined(WIN32) || defined(_WIN32) + Py_FatalError( + "greenlet: Unhandled C++ exception from a greenlet run function. " + "Because memory is likely corrupted, terminating process."); + std::abort(); +#else + throw; +#endif + } + } + // These lines may run arbitrary code + args.CLEAR(); + Py_CLEAR(run); + + if (!result + && mod_globs->PyExc_GreenletExit.PyExceptionMatches() + && (this->args())) { + // This can happen, for example, if our only reference + // goes away after we switch back to the parent. + // See test_dealloc_switch_args_not_lost + PyErrPieces clear_error; + result <<= this->args(); + result = single_result(result); + } + this->release_args(); + this->python_state.did_finish(PyThreadState_GET()); + + result = g_handle_exit(result); + assert(this->thread_state()->borrow_current() == this->_self); + + /* jump back to parent */ + this->stack_state.set_inactive(); /* dead */ + + + // TODO: Can we decref some things here? Release our main greenlet + // and maybe parent? + for (Greenlet* parent = this->_parent; + parent; + parent = parent->parent()) { + // We need to somewhere consume a reference to + // the result; in most cases we'll never have control + // back in this stack frame again. Calling + // green_switch actually adds another reference! + // This would probably be clearer with a specific API + // to hand results to the parent. + parent->args() <<= result; + assert(!result); + // The parent greenlet now owns the result; in the + // typical case we'll never get back here to assign to + // result and thus release the reference. + try { + result = parent->g_switch(); + } + catch (const PyErrOccurred&) { + // Ignore, keep passing the error on up. + } + + /* Return here means switch to parent failed, + * in which case we throw *current* exception + * to the next parent in chain. + */ + assert(!result); + } + /* We ran out of parents, cannot continue */ + PyErr_WriteUnraisable(this->self().borrow_o()); + Py_FatalError("greenlet: ran out of parent greenlets while propagating exception; " + "cannot continue"); + std::abort(); +} + +void +UserGreenlet::run(const BorrowedObject nrun) +{ + if (this->started()) { + throw AttributeError( + "run cannot be set " + "after the start of the greenlet"); + } + this->_run_callable = nrun; +} + +const OwnedGreenlet +UserGreenlet::parent() const +{ + return this->_parent; +} + +void +UserGreenlet::parent(const BorrowedObject raw_new_parent) +{ + if (!raw_new_parent) { + throw AttributeError("can't delete attribute"); + } + + BorrowedMainGreenlet main_greenlet_of_new_parent; + BorrowedGreenlet new_parent(raw_new_parent.borrow()); // could + // throw + // TypeError! + for (BorrowedGreenlet p = new_parent; p; p = p->parent()) { + if (p == this->self()) { + throw ValueError("cyclic parent chain"); + } + main_greenlet_of_new_parent = p->main_greenlet(); + } + + if (!main_greenlet_of_new_parent) { + throw ValueError("parent must not be garbage collected"); + } + + if (this->started() + && this->_main_greenlet != main_greenlet_of_new_parent) { + throw ValueError("parent cannot be on a different thread"); + } + + this->_parent = new_parent; +} + +void +UserGreenlet::murder_in_place() +{ + this->_main_greenlet.CLEAR(); + Greenlet::murder_in_place(); +} + +bool +UserGreenlet::belongs_to_thread(const ThreadState* thread_state) const +{ + return Greenlet::belongs_to_thread(thread_state) && this->_main_greenlet == thread_state->borrow_main_greenlet(); +} + + +int +UserGreenlet::tp_traverse(visitproc visit, void* arg) +{ + Py_VISIT(this->_parent.borrow_o()); + Py_VISIT(this->_main_greenlet.borrow_o()); + Py_VISIT(this->_run_callable.borrow_o()); + + return Greenlet::tp_traverse(visit, arg); +} + +int +UserGreenlet::tp_clear() +{ + Greenlet::tp_clear(); + this->_parent.CLEAR(); + this->_main_greenlet.CLEAR(); + this->_run_callable.CLEAR(); + return 0; +} + +UserGreenlet::ParentIsCurrentGuard::ParentIsCurrentGuard(UserGreenlet* p, + const ThreadState& thread_state) + : oldparent(p->_parent), + greenlet(p) +{ + p->_parent = thread_state.get_current(); +} + +UserGreenlet::ParentIsCurrentGuard::~ParentIsCurrentGuard() +{ + this->greenlet->_parent = oldparent; + oldparent.CLEAR(); +} + +}; //namespace greenlet +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/__init__.py b/.venv/lib/python3.12/site-packages/greenlet/__init__.py new file mode 100644 index 00000000..b2dcc9bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +The root of the greenlet package. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +__all__ = [ + '__version__', + '_C_API', + + 'GreenletExit', + 'error', + + 'getcurrent', + 'greenlet', + + 'gettrace', + 'settrace', +] + +# pylint:disable=no-name-in-module + +### +# Metadata +### +__version__ = '3.1.1' +from ._greenlet import _C_API # pylint:disable=no-name-in-module + +### +# Exceptions +### +from ._greenlet import GreenletExit +from ._greenlet import error + +### +# greenlets +### +from ._greenlet import getcurrent +from ._greenlet import greenlet + +### +# tracing +### +try: + from ._greenlet import gettrace + from ._greenlet import settrace +except ImportError: + # Tracing wasn't supported. + # XXX: The option to disable it was removed in 1.0, + # so this branch should be dead code. + pass + +### +# Constants +# These constants aren't documented and aren't recommended. +# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS +# is the same as ``sys.version_info[:2] >= 3.7`` +### +from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import + +# Controlling the use of the gc module. Provisional API for this greenlet +# implementation in 2.0. +from ._greenlet import CLOCKS_PER_SEC # pylint:disable=unused-import +from ._greenlet import enable_optional_cleanup # pylint:disable=unused-import +from ._greenlet import get_clocks_used_doing_optional_cleanup # pylint:disable=unused-import + +# Other APIS in the _greenlet module are for test support. diff --git a/.venv/lib/python3.12/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..b907dc22 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..971f70d9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet.cpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet.cpp new file mode 100644 index 00000000..e8d92a00 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet.cpp @@ -0,0 +1,320 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/* Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#include +#include +#include +#include + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +// Code after this point can assume access to things declared in stdint.h, +// including the fixed-width types. This goes for the platform-specific switch functions +// as well. +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenletGlobals.cpp" + +#include "TGreenlet.cpp" +#include "TMainGreenlet.cpp" +#include "TUserGreenlet.cpp" +#include "TBrokenGreenlet.cpp" +#include "TExceptionState.cpp" +#include "TPythonState.cpp" +#include "TStackState.cpp" + +#include "TThreadState.hpp" +#include "TThreadStateCreator.hpp" +#include "TThreadStateDestroy.cpp" + +#include "PyGreenlet.cpp" +#include "PyGreenletUnswitchable.cpp" +#include "CObjects.cpp" + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + + +// ******* Implementation of things from included files +template +greenlet::refs::_BorrowedGreenlet& greenlet::refs::_BorrowedGreenlet::operator=(const greenlet::refs::BorrowedObject& other) +{ + this->_set_raw_pointer(static_cast(other)); + return *this; +} + +template +inline greenlet::refs::_BorrowedGreenlet::operator Greenlet*() const noexcept +{ + if (!this->p) { + return nullptr; + } + return reinterpret_cast(this->p)->pimpl; +} + +template +greenlet::refs::_BorrowedGreenlet::_BorrowedGreenlet(const BorrowedObject& p) + : BorrowedReference(nullptr) +{ + + this->_set_raw_pointer(p.borrow()); +} + +template +inline greenlet::refs::_OwnedGreenlet::operator Greenlet*() const noexcept +{ + if (!this->p) { + return nullptr; + } + return reinterpret_cast(this->p)->pimpl; +} + + + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wmissing-field-initializers" +# pragma clang diagnostic ignored "-Wwritable-strings" +#elif defined(__GNUC__) +# pragma GCC diagnostic push +// warning: ISO C++ forbids converting a string constant to ‘char*’ +// (The python APIs aren't const correct and accept writable char*) +# pragma GCC diagnostic ignored "-Wwrite-strings" +#endif + + +/*********************************************************** + +A PyGreenlet is a range of C stack addresses that must be +saved and restored in such a way that the full range of the +stack contains valid data when we switch to it. + +Stack layout for a greenlet: + + | ^^^ | + | older data | + | | + stack_stop . |_______________| + . | | + . | greenlet data | + . | in stack | + . * |_______________| . . _____________ stack_copy + stack_saved + . | | | | + . | data | |greenlet data| + . | unrelated | | saved | + . | to | | in heap | + stack_start . | this | . . |_____________| stack_copy + | greenlet | + | | + | newer data | + | vvv | + + +Note that a greenlet's stack data is typically partly at its correct +place in the stack, and partly saved away in the heap, but always in +the above configuration: two blocks, the more recent one in the heap +and the older one still in the stack (either block may be empty). + +Greenlets are chained: each points to the previous greenlet, which is +the one that owns the data currently in the C stack above my +stack_stop. The currently running greenlet is the first element of +this chain. The main (initial) greenlet is the last one. Greenlets +whose stack is entirely in the heap can be skipped from the chain. + +The chain is not related to execution order, but only to the order +in which bits of C stack happen to belong to greenlets at a particular +point in time. + +The main greenlet doesn't have a stack_stop: it is responsible for the +complete rest of the C stack, and we don't know where it begins. We +use (char*) -1, the largest possible address. + +States: + stack_stop == NULL && stack_start == NULL: did not start yet + stack_stop != NULL && stack_start == NULL: already finished + stack_stop != NULL && stack_start != NULL: active + +The running greenlet's stack_start is undefined but not NULL. + + ***********************************************************/ + + + + +/***********************************************************/ + +/* Some functions must not be inlined: + * slp_restore_state, when inlined into slp_switch might cause + it to restore stack over its own local variables + * slp_save_state, when inlined would add its own local + variables to the saved stack, wasting space + * slp_switch, cannot be inlined for obvious reasons + * g_initialstub, when inlined would receive a pointer into its + own stack frame, leading to incomplete stack save/restore + +g_initialstub is a member function and declared virtual so that the +compiler always calls it through a vtable. + +slp_save_state and slp_restore_state are also member functions. They +are called from trampoline functions that themselves are declared as +not eligible for inlining. +*/ + +extern "C" { +static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref) +{ + return switching_thread_state->slp_save_state(stackref); +} +static void GREENLET_NOINLINE(slp_restore_state_trampoline)() +{ + switching_thread_state->slp_restore_state(); +} +} + + +/***********************************************************/ + + +#include "PyModule.cpp" + + + +static PyObject* +greenlet_internal_mod_init() noexcept +{ + static void* _PyGreenlet_API[PyGreenlet_API_pointers]; + + try { + CreatedModule m(greenlet_module_def); + + Require(PyType_Ready(&PyGreenlet_Type)); + Require(PyType_Ready(&PyGreenletUnswitchable_Type)); + + mod_globs = new greenlet::GreenletGlobals; + ThreadState::init(); + + m.PyAddObject("greenlet", PyGreenlet_Type); + m.PyAddObject("UnswitchableGreenlet", PyGreenletUnswitchable_Type); + m.PyAddObject("error", mod_globs->PyExc_GreenletError); + m.PyAddObject("GreenletExit", mod_globs->PyExc_GreenletExit); + + m.PyAddObject("GREENLET_USE_GC", 1); + m.PyAddObject("GREENLET_USE_TRACING", 1); + m.PyAddObject("GREENLET_USE_CONTEXT_VARS", 1L); + m.PyAddObject("GREENLET_USE_STANDARD_THREADING", 1L); + + OwnedObject clocks_per_sec = OwnedObject::consuming(PyLong_FromSsize_t(CLOCKS_PER_SEC)); + m.PyAddObject("CLOCKS_PER_SEC", clocks_per_sec); + + /* also publish module-level data as attributes of the greentype. */ + // XXX: This is weird, and enables a strange pattern of + // confusing the class greenlet with the module greenlet; with + // the exception of (possibly) ``getcurrent()``, this + // shouldn't be encouraged so don't add new items here. + for (const char* const* p = copy_on_greentype; *p; p++) { + OwnedObject o = m.PyRequireAttr(*p); + PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o.borrow()); + } + + /* + * Expose C API + */ + + /* types */ + _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; + + /* exceptions */ + _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)mod_globs->PyExc_GreenletError; + _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)mod_globs->PyExc_GreenletExit; + + /* methods */ + _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; + _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; + _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; + _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; + _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; + + /* Previously macros, but now need to be functions externally. */ + _PyGreenlet_API[PyGreenlet_MAIN_NUM] = (void*)Extern_PyGreenlet_MAIN; + _PyGreenlet_API[PyGreenlet_STARTED_NUM] = (void*)Extern_PyGreenlet_STARTED; + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM] = (void*)Extern_PyGreenlet_ACTIVE; + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM] = (void*)Extern_PyGreenlet_GET_PARENT; + + /* XXX: Note that our module name is ``greenlet._greenlet``, but for + backwards compatibility with existing C code, we need the _C_API to + be directly in greenlet. + */ + const NewReference c_api_object(Require( + PyCapsule_New( + (void*)_PyGreenlet_API, + "greenlet._C_API", + NULL))); + m.PyAddObject("_C_API", c_api_object); + assert(c_api_object.REFCNT() == 2); + + // cerr << "Sizes:" + // << "\n\tGreenlet : " << sizeof(Greenlet) + // << "\n\tUserGreenlet : " << sizeof(UserGreenlet) + // << "\n\tMainGreenlet : " << sizeof(MainGreenlet) + // << "\n\tExceptionState : " << sizeof(greenlet::ExceptionState) + // << "\n\tPythonState : " << sizeof(greenlet::PythonState) + // << "\n\tStackState : " << sizeof(greenlet::StackState) + // << "\n\tSwitchingArgs : " << sizeof(greenlet::SwitchingArgs) + // << "\n\tOwnedObject : " << sizeof(greenlet::refs::OwnedObject) + // << "\n\tBorrowedObject : " << sizeof(greenlet::refs::BorrowedObject) + // << "\n\tPyGreenlet : " << sizeof(PyGreenlet) + // << endl; + + return m.borrow(); // But really it's the main reference. + } + catch (const LockInitError& e) { + PyErr_SetString(PyExc_MemoryError, e.what()); + return NULL; + } + catch (const PyErrOccurred&) { + return NULL; + } + +} + +extern "C" { + +PyMODINIT_FUNC +PyInit__greenlet(void) +{ + return greenlet_internal_mod_init(); +} + +}; // extern C + +#ifdef __clang__ +# pragma clang diagnostic pop +#elif defined(__GNUC__) +# pragma GCC diagnostic pop +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet.h b/.venv/lib/python3.12/site-packages/greenlet/greenlet.h new file mode 100644 index 00000000..d02a16e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_allocator.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_allocator.hpp new file mode 100644 index 00000000..b452f544 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_allocator.hpp @@ -0,0 +1,63 @@ +#ifndef GREENLET_ALLOCATOR_HPP +#define GREENLET_ALLOCATOR_HPP + +#define PY_SSIZE_T_CLEAN +#include +#include +#include "greenlet_compiler_compat.hpp" + + +namespace greenlet +{ + // This allocator is stateless; all instances are identical. + // It can *ONLY* be used when we're sure we're holding the GIL + // (Python's allocators require the GIL). + template + struct PythonAllocator : public std::allocator { + + PythonAllocator(const PythonAllocator& UNUSED(other)) + : std::allocator() + { + } + + PythonAllocator(const std::allocator other) + : std::allocator(other) + {} + + template + PythonAllocator(const std::allocator& other) + : std::allocator(other) + { + } + + PythonAllocator() : std::allocator() {} + + T* allocate(size_t number_objects, const void* UNUSED(hint)=0) + { + void* p; + if (number_objects == 1) + p = PyObject_Malloc(sizeof(T)); + else + p = PyMem_Malloc(sizeof(T) * number_objects); + return static_cast(p); + } + + void deallocate(T* t, size_t n) + { + void* p = t; + if (n == 1) { + PyObject_Free(p); + } + else + PyMem_Free(p); + } + // This member is deprecated in C++17 and removed in C++20 + template< class U > + struct rebind { + typedef PythonAllocator other; + }; + + }; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_compiler_compat.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_compiler_compat.hpp new file mode 100644 index 00000000..af24bd83 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_compiler_compat.hpp @@ -0,0 +1,98 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_COMPILER_COMPAT_HPP +#define GREENLET_COMPILER_COMPAT_HPP + +/** + * Definitions to aid with compatibility with different compilers. + * + * .. caution:: Use extreme care with noexcept. + * Some compilers and runtimes, specifically gcc/libgcc/libstdc++ on + * Linux, implement stack unwinding by throwing an uncatchable + * exception, one that specifically does not appear to be an active + * exception to the rest of the runtime. If this happens while we're in a noexcept function, + * we have violated our dynamic exception contract, and so the runtime + * will call std::terminate(), which kills the process with the + * unhelpful message "terminate called without an active exception". + * + * This has happened in this scenario: A background thread is running + * a greenlet that has made a native call and released the GIL. + * Meanwhile, the main thread finishes and starts shutting down the + * interpreter. When the background thread is scheduled again and + * attempts to obtain the GIL, it notices that the interpreter is + * exiting and calls ``pthread_exit()``. This in turn starts to unwind + * the stack by throwing that exception. But we had the ``PyCall`` + * functions annotated as noexcept, so the runtime terminated us. + * + * #2 0x00007fab26fec2b7 in std::terminate() () from /lib/x86_64-linux-gnu/libstdc++.so.6 + * #3 0x00007fab26febb3c in __gxx_personality_v0 () from /lib/x86_64-linux-gnu/libstdc++.so.6 + * #4 0x00007fab26f34de6 in ?? () from /lib/x86_64-linux-gnu/libgcc_s.so.1 + * #6 0x00007fab276a34c6 in __GI___pthread_unwind at ./nptl/unwind.c:130 + * #7 0x00007fab2769bd3a in __do_cancel () at ../sysdeps/nptl/pthreadP.h:280 + * #8 __GI___pthread_exit (value=value@entry=0x0) at ./nptl/pthread_exit.c:36 + * #9 0x000000000052e567 in PyThread_exit_thread () at ../Python/thread_pthread.h:370 + * #10 0x00000000004d60b5 in take_gil at ../Python/ceval_gil.h:224 + * #11 0x00000000004d65f9 in PyEval_RestoreThread at ../Python/ceval.c:467 + * #12 0x000000000060cce3 in setipaddr at ../Modules/socketmodule.c:1203 + * #13 0x00000000006101cd in socket_gethostbyname + */ + +#include + +# define G_NO_COPIES_OF_CLS(Cls) private: \ + Cls(const Cls& other) = delete; \ + Cls& operator=(const Cls& other) = delete + +# define G_NO_ASSIGNMENT_OF_CLS(Cls) private: \ + Cls& operator=(const Cls& other) = delete + +# define G_NO_COPY_CONSTRUCTOR_OF_CLS(Cls) private: \ + Cls(const Cls& other) = delete; + + +// CAUTION: MSVC is stupidly picky: +// +// "The compiler ignores, without warning, any __declspec keywords +// placed after * or & and in front of the variable identifier in a +// declaration." +// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160) +// +// So pointer return types must be handled differently (because of the +// trailing *), or you get inscrutable compiler warnings like "error +// C2059: syntax error: ''" +// +// In C++ 11, there is a standard syntax for attributes, and +// GCC defines an attribute to use with this: [[gnu:noinline]]. +// In the future, this is expected to become standard. + +#if defined(__GNUC__) || defined(__clang__) +/* We used to check for GCC 4+ or 3.4+, but those compilers are + laughably out of date. Just assume they support it. */ +# define GREENLET_NOINLINE(name) __attribute__((noinline)) name +# define GREENLET_NOINLINE_P(rtype, name) rtype __attribute__((noinline)) name +# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) +#elif defined(_MSC_VER) +/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */ +# define GREENLET_NOINLINE(name) __declspec(noinline) name +# define GREENLET_NOINLINE_P(rtype, name) __declspec(noinline) rtype name +# define UNUSED(x) UNUSED_ ## x +#endif + +#if defined(_MSC_VER) +# define G_NOEXCEPT_WIN32 noexcept +#else +# define G_NOEXCEPT_WIN32 +#endif + +#if defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) +// 32-bit PPC/MacOSX. Only known to be tested on unreleased versions +// of macOS 10.6 using a macports build gcc 14. It appears that +// running C++ destructors of thread-local variables is broken. + +// See https://github.com/python-greenlet/greenlet/pull/419 +# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 1 +#else +# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 0 +#endif + + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_add_pending.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_add_pending.hpp new file mode 100644 index 00000000..0d28efd3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_add_pending.hpp @@ -0,0 +1,172 @@ +#ifndef GREENLET_CPYTHON_ADD_PENDING_HPP +#define GREENLET_CPYTHON_ADD_PENDING_HPP + +#if (PY_VERSION_HEX >= 0x30800A0 && PY_VERSION_HEX < 0x3090000) && !(defined(_WIN32) || defined(WIN32)) +// XXX: From Python 3.8a3 [1] up until Python 3.9a6 [2][3], +// ``Py_AddPendingCall`` would try to produce a Python exception if +// the interpreter was in the beginning of shutting down when this +// function is called. However, ``Py_AddPendingCall`` doesn't require +// the GIL, and we are absolutely not holding it when we make that +// call. That means that trying to create the Python exception is +// using the C API in an undefined state; here the C API detects this +// and aborts the process with an error ("Fatal Python error: Python +// memory allocator called without holding the GIL": Add -> +// PyErr_SetString -> PyUnicode_New -> PyObject_Malloc). This arises +// (obviously) in multi-threaded programs and happens if one thread is +// exiting and cleaning up its thread-local data while the other +// thread is trying to shut down the interpreter. A crash on shutdown +// is still a crash and could result in data loss (e.g., daemon +// threads are still running, pending signal handlers may be present, +// buffers may not be flushed, there may be __del__ that need run, +// etc), so we have to work around it. +// +// Of course, we can (and do) check for whether the interpreter is +// shutting down before calling ``Py_AddPendingCall``, but that's a +// race condition since we don't hold the GIL, and so we may not +// actually get the right answer. Plus, ``Py_FinalizeEx`` actually +// calls ``_Py_FinishPendingCalls`` (which sets the pending->finishing +// flag, which is used to gate creating the exceptioen) *before* +// publishing any other data that would let us detect the shutdown +// (such as runtime->finalizing). So that point is moot. +// +// Our solution for those versions is to inline the same code, without +// the problematic bit that sets the exception. Unfortunately, all of +// the structure definitions are private/opaque, *and* we can't +// actually count on being able to include their definitions from +// ``internal/pycore_*``, because on some platforms those header files +// are incomplete (i.e., on macOS with macports 3.8, the includes are +// fine, but on Ubuntu jammy with 3.8 from ppa:deadsnakes or GitHub +// Actions 3.8 (I think it's Ubuntu 18.04), they con't be used; at +// least, I couldn't get them to work). So we need to define the +// structures and _PyRuntime data member ourself. Yet more +// unfortunately, _PyRuntime won't link on Windows, so we can only do +// this on other platforms. +// +// [1] https://github.com/python/cpython/commit/842a2f07f2f08a935ef470bfdaeef40f87490cfc +// [2] https://github.com/python/cpython/commit/cfc3c2f8b34d3864717ab584c5b6c260014ba55a +// [3] https://github.com/python/cpython/issues/81308 +# define GREENLET_BROKEN_PY_ADD_PENDING 1 + +// When defining these structures, the important thing is to get +// binary compatibility, i.e., structure layout. For that, we only +// need to define fields up to the ones we use; after that they're +// irrelevant UNLESS the structure is included in another structure +// *before* the structure we're interested in --- in that case, it +// must be complete. Ellipsis indicate elided trailing members. +// Pointer types are changed to void* to keep from having to define +// more structures. + +// From "internal/pycore_atomic.h" + +// There are several different definitions of this, including the +// plain ``int`` version, a ``volatile int`` and an ``_Atomic int`` +// I don't think any of those change the size/layout. +typedef struct _Py_atomic_int { + volatile int _value; +} _Py_atomic_int; + +// This needs too much infrastructure, so we just do a regular store. +#define _Py_atomic_store_relaxed(ATOMIC_VAL, NEW_VAL) \ + (ATOMIC_VAL)->_value = NEW_VAL + + + +// From "internal/pycore_pymem.h" +#define NUM_GENERATIONS 3 + + +struct gc_generation { + PyGC_Head head; // We already have this defined. + int threshold; + int count; +}; +struct gc_generation_stats { + Py_ssize_t collections; + Py_ssize_t collected; + Py_ssize_t uncollectable; +}; + +struct _gc_runtime_state { + void *trash_delete_later; + int trash_delete_nesting; + int enabled; + int debug; + struct gc_generation generations[NUM_GENERATIONS]; + void *generation0; + struct gc_generation permanent_generation; + struct gc_generation_stats generation_stats[NUM_GENERATIONS]; + int collecting; + void *garbage; + void *callbacks; + Py_ssize_t long_lived_total; + Py_ssize_t long_lived_pending; +}; + +// From "internal/pycore_pystate.h" +struct _pending_calls { + int finishing; + PyThread_type_lock lock; + _Py_atomic_int calls_to_do; + int async_exc; +#define NPENDINGCALLS 32 + struct { + int (*func)(void *); + void *arg; + } calls[NPENDINGCALLS]; + int first; + int last; +}; + +struct _ceval_runtime_state { + int recursion_limit; + int tracing_possible; + _Py_atomic_int eval_breaker; + _Py_atomic_int gil_drop_request; + struct _pending_calls pending; + // ... +}; + +typedef struct pyruntimestate { + int preinitializing; + int preinitialized; + int core_initialized; + int initialized; + void *finalizing; + + struct pyinterpreters { + PyThread_type_lock mutex; + void *head; + void *main; + int64_t next_id; + } interpreters; + // XXX Remove this field once we have a tp_* slot. + struct _xidregistry { + PyThread_type_lock mutex; + void *head; + } xidregistry; + + unsigned long main_thread; + +#define NEXITFUNCS 32 + void (*exitfuncs[NEXITFUNCS])(void); + int nexitfuncs; + + struct _gc_runtime_state gc; + struct _ceval_runtime_state ceval; + // ... +} _PyRuntimeState; + +#define SIGNAL_PENDING_CALLS(ceval) \ + do { \ + _Py_atomic_store_relaxed(&(ceval)->pending.calls_to_do, 1); \ + _Py_atomic_store_relaxed(&(ceval)->eval_breaker, 1); \ + } while (0) + +extern _PyRuntimeState _PyRuntime; + +#else +# define GREENLET_BROKEN_PY_ADD_PENDING 0 +#endif + + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_compat.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_compat.hpp new file mode 100644 index 00000000..ce5fd882 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_compat.hpp @@ -0,0 +1,142 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_CPYTHON_COMPAT_H +#define GREENLET_CPYTHON_COMPAT_H + +/** + * Helpers for compatibility with multiple versions of CPython. + */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" + + +#if PY_VERSION_HEX >= 0x30A00B1 +# define GREENLET_PY310 1 +#else +# define GREENLET_PY310 0 +#endif + +/* +Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. +See https://github.com/python/cpython/pull/25276 +We have to save and restore this as well. + +Python 3.13 removed PyThreadState.cframe (GH-108035). +*/ +#if GREENLET_PY310 && PY_VERSION_HEX < 0x30D0000 +# define GREENLET_USE_CFRAME 1 +#else +# define GREENLET_USE_CFRAME 0 +#endif + + +#if PY_VERSION_HEX >= 0x30B00A4 +/* +Greenlet won't compile on anything older than Python 3.11 alpha 4 (see +https://bugs.python.org/issue46090). Summary of breaking internal changes: +- Python 3.11 alpha 1 changed how frame objects are represented internally. + - https://github.com/python/cpython/pull/30122 +- Python 3.11 alpha 3 changed how recursion limits are stored. + - https://github.com/python/cpython/pull/29524 +- Python 3.11 alpha 4 changed how exception state is stored. It also includes a + change to help greenlet save and restore the interpreter frame "data stack". + - https://github.com/python/cpython/pull/30122 + - https://github.com/python/cpython/pull/30234 +*/ +# define GREENLET_PY311 1 +#else +# define GREENLET_PY311 0 +#endif + + +#if PY_VERSION_HEX >= 0x30C0000 +# define GREENLET_PY312 1 +#else +# define GREENLET_PY312 0 +#endif + +#if PY_VERSION_HEX >= 0x30D0000 +# define GREENLET_PY313 1 +#else +# define GREENLET_PY313 0 +#endif + +#ifndef Py_SET_REFCNT +/* Py_REFCNT and Py_SIZE macros are converted to functions +https://bugs.python.org/issue39573 */ +# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) +#endif + +#ifndef _Py_DEC_REFTOTAL +/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: + https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 + + The symbol we use to replace it was removed by at least 3.12. +*/ +# ifdef Py_REF_DEBUG +# if GREENLET_PY312 +# define _Py_DEC_REFTOTAL +# else +# define _Py_DEC_REFTOTAL _Py_RefTotal-- +# endif +# else +# define _Py_DEC_REFTOTAL +# endif +#endif +// Define these flags like Cython does if we're on an old version. +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif + +#ifndef Py_TPFLAGS_HAVE_VERSION_TAG + #define Py_TPFLAGS_HAVE_VERSION_TAG 0 +#endif + +#define G_TPFLAGS_DEFAULT Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_VERSION_TAG | Py_TPFLAGS_CHECKTYPES | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_HAVE_GC + + +#if PY_VERSION_HEX < 0x03090000 +// The official version only became available in 3.9 +# define PyObject_GC_IsTracked(o) _PyObject_GC_IS_TRACKED(o) +#endif + + +// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_EnterTracing(PyThreadState *tstate) +{ + tstate->tracing++; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = 0; +#else + tstate->use_tracing = 0; +#endif +} +#endif + +// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) +{ + tstate->tracing--; + int use_tracing = (tstate->c_tracefunc != NULL + || tstate->c_profilefunc != NULL); +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = use_tracing; +#else + tstate->use_tracing = use_tracing; +#endif +} +#endif + +#if !defined(Py_C_RECURSION_LIMIT) && defined(C_RECURSION_LIMIT) +# define Py_C_RECURSION_LIMIT C_RECURSION_LIMIT +#endif + +#endif /* GREENLET_CPYTHON_COMPAT_H */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_exceptions.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_exceptions.hpp new file mode 100644 index 00000000..617f07c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_exceptions.hpp @@ -0,0 +1,171 @@ +#ifndef GREENLET_EXCEPTIONS_HPP +#define GREENLET_EXCEPTIONS_HPP + +#define PY_SSIZE_T_CLEAN +#include +#include +#include + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +namespace greenlet { + + class PyErrOccurred : public std::runtime_error + { + public: + + // CAUTION: In debug builds, may run arbitrary Python code. + static const PyErrOccurred + from_current() + { + assert(PyErr_Occurred()); +#ifndef NDEBUG + // This is not exception safe, and + // not necessarily safe in general (what if it switches?) + // But we only do this in debug mode, where we are in + // tight control of what exceptions are getting raised and + // can prevent those issues. + + // You can't call PyObject_Str with a pending exception. + PyObject* typ; + PyObject* val; + PyObject* tb; + + PyErr_Fetch(&typ, &val, &tb); + PyObject* typs = PyObject_Str(typ); + PyObject* vals = PyObject_Str(val ? val : typ); + const char* typ_msg = PyUnicode_AsUTF8(typs); + const char* val_msg = PyUnicode_AsUTF8(vals); + PyErr_Restore(typ, val, tb); + + std::string msg(typ_msg); + msg += ": "; + msg += val_msg; + PyErrOccurred ex(msg); + Py_XDECREF(typs); + Py_XDECREF(vals); + + return ex; +#else + return PyErrOccurred(); +#endif + } + + PyErrOccurred() : std::runtime_error("") + { + assert(PyErr_Occurred()); + } + + PyErrOccurred(const std::string& msg) : std::runtime_error(msg) + { + assert(PyErr_Occurred()); + } + + PyErrOccurred(PyObject* exc_kind, const char* const msg) + : std::runtime_error(msg) + { + PyErr_SetString(exc_kind, msg); + } + + PyErrOccurred(PyObject* exc_kind, const std::string msg) + : std::runtime_error(msg) + { + // This copies the c_str, so we don't have any lifetime + // issues to worry about. + PyErr_SetString(exc_kind, msg.c_str()); + } + + PyErrOccurred(PyObject* exc_kind, + const std::string msg, //This is the format + //string; that's not + //usually safe! + + PyObject* borrowed_obj_one, PyObject* borrowed_obj_two) + : std::runtime_error(msg) + { + + //This is designed specifically for the + //``check_switch_allowed`` function. + + // PyObject_Str and PyObject_Repr are safe to call with + // NULL pointers; they return the string "" in that + // case. + // This function always returns null. + PyErr_Format(exc_kind, + msg.c_str(), + borrowed_obj_one, borrowed_obj_two); + } + }; + + class TypeError : public PyErrOccurred + { + public: + TypeError(const char* const what) + : PyErrOccurred(PyExc_TypeError, what) + { + } + TypeError(const std::string what) + : PyErrOccurred(PyExc_TypeError, what) + { + } + }; + + class ValueError : public PyErrOccurred + { + public: + ValueError(const char* const what) + : PyErrOccurred(PyExc_ValueError, what) + { + } + }; + + class AttributeError : public PyErrOccurred + { + public: + AttributeError(const char* const what) + : PyErrOccurred(PyExc_AttributeError, what) + { + } + }; + + /** + * Calls `Py_FatalError` when constructed, so you can't actually + * throw this. It just makes static analysis easier. + */ + class PyFatalError : public std::runtime_error + { + public: + PyFatalError(const char* const msg) + : std::runtime_error(msg) + { + Py_FatalError(msg); + } + }; + + static inline PyObject* + Require(PyObject* p, const std::string& msg="") + { + if (!p) { + throw PyErrOccurred(msg); + } + return p; + }; + + static inline void + Require(const int retval) + { + if (retval < 0) { + throw PyErrOccurred(); + } + }; + + +}; +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_internal.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_internal.hpp new file mode 100644 index 00000000..f2b15d5f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_internal.hpp @@ -0,0 +1,107 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_INTERNAL_H +#define GREENLET_INTERNAL_H +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +/** + * Implementation helpers. + * + * C++ templates and inline functions should go here. + */ +#define PY_SSIZE_T_CLEAN +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_exceptions.hpp" +#include "TGreenlet.hpp" +#include "greenlet_allocator.hpp" + +#include +#include + +#define GREENLET_MODULE +struct _greenlet; +typedef struct _greenlet PyGreenlet; +namespace greenlet { + + class ThreadState; + // We can't use the PythonAllocator for this, because we push to it + // from the thread state destructor, which doesn't have the GIL, + // and Python's allocators can only be called with the GIL. + typedef std::vector cleanup_queue_t; + +}; + + +#define implementation_ptr_t greenlet::Greenlet* + + +#include "greenlet.h" + +void +greenlet::refs::MainGreenletExactChecker(void *p) +{ + if (!p) { + return; + } + // We control the class of the main greenlet exactly. + if (Py_TYPE(p) != &PyGreenlet_Type) { + std::string err("MainGreenlet: Expected exactly a greenlet, not a "); + err += Py_TYPE(p)->tp_name; + throw greenlet::TypeError(err); + } + + // Greenlets from dead threads no longer respond to main() with a + // true value; so in that case we need to perform an additional + // check. + Greenlet* g = static_cast(p)->pimpl; + if (g->main()) { + return; + } + if (!dynamic_cast(g)) { + std::string err("MainGreenlet: Expected exactly a main greenlet, not a "); + err += Py_TYPE(p)->tp_name; + throw greenlet::TypeError(err); + } +} + + + +template +inline greenlet::Greenlet* greenlet::refs::_OwnedGreenlet::operator->() const noexcept +{ + return reinterpret_cast(this->p)->pimpl; +} + +template +inline greenlet::Greenlet* greenlet::refs::_BorrowedGreenlet::operator->() const noexcept +{ + return reinterpret_cast(this->p)->pimpl; +} + +#include +#include + + +extern PyTypeObject PyGreenlet_Type; + + + +/** + * Forward declarations needed in multiple files. + */ +static PyObject* green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs); + + +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + + +#endif + +// Local Variables: +// flycheck-clang-include-path: ("../../include" "/opt/local/Library/Frameworks/Python.framework/Versions/3.10/include/python3.10") +// End: diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_refs.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_refs.hpp new file mode 100644 index 00000000..b7e5e3f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_refs.hpp @@ -0,0 +1,1118 @@ +#ifndef GREENLET_REFS_HPP +#define GREENLET_REFS_HPP + +#define PY_SSIZE_T_CLEAN +#include + +#include + +//#include "greenlet_internal.hpp" +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_exceptions.hpp" + +struct _greenlet; +struct _PyMainGreenlet; + +typedef struct _greenlet PyGreenlet; +extern PyTypeObject PyGreenlet_Type; + + +#ifdef GREENLET_USE_STDIO +#include +using std::cerr; +using std::endl; +#endif + +namespace greenlet +{ + class Greenlet; + + namespace refs + { + // Type checkers throw a TypeError if the argument is not + // null, and isn't of the required Python type. + // (We can't use most of the defined type checkers + // like PyList_Check, etc, directly, because they are + // implemented as macros.) + typedef void (*TypeChecker)(void*); + + void + NoOpChecker(void*) + { + return; + } + + void + GreenletChecker(void *p) + { + if (!p) { + return; + } + + PyTypeObject* typ = Py_TYPE(p); + // fast, common path. (PyObject_TypeCheck is a macro or + // static inline function, and it also does a + // direct comparison of the type pointers, but its fast + // path only handles one type) + if (typ == &PyGreenlet_Type) { + return; + } + + if (!PyObject_TypeCheck(p, &PyGreenlet_Type)) { + std::string err("GreenletChecker: Expected any type of greenlet, not "); + err += Py_TYPE(p)->tp_name; + throw TypeError(err); + } + } + + void + MainGreenletExactChecker(void *p); + + template + class PyObjectPointer; + + template + class OwnedReference; + + + template + class BorrowedReference; + + typedef BorrowedReference BorrowedObject; + typedef OwnedReference OwnedObject; + + class ImmortalObject; + class ImmortalString; + + template + class _OwnedGreenlet; + + typedef _OwnedGreenlet OwnedGreenlet; + typedef _OwnedGreenlet OwnedMainGreenlet; + + template + class _BorrowedGreenlet; + + typedef _BorrowedGreenlet BorrowedGreenlet; + + void + ContextExactChecker(void *p) + { + if (!p) { + return; + } + if (!PyContext_CheckExact(p)) { + throw TypeError( + "greenlet context must be a contextvars.Context or None" + ); + } + } + + typedef OwnedReference OwnedContext; + } +} + +namespace greenlet { + + + namespace refs { + // A set of classes to make reference counting rules in python + // code explicit. + // + // Rules of use: + // (1) Functions returning a new reference that the caller of the + // function is expected to dispose of should return a + // ``OwnedObject`` object. This object automatically releases its + // reference when it goes out of scope. It works like a ``std::shared_ptr`` + // and can be copied or used as a function parameter (but don't do + // that). Note that constructing a ``OwnedObject`` from a + // PyObject* steals the reference. + // (2) Parameters to functions should be either a + // ``OwnedObject&``, or, more generally, a ``PyObjectPointer&``. + // If the function needs to create its own new reference, it can + // do so by copying to a local ``OwnedObject``. + // (3) Functions returning an existing pointer that is NOT + // incref'd, and which the caller MUST NOT decref, + // should return a ``BorrowedObject``. + + // XXX: The following two paragraphs do not hold for all platforms. + // Notably, 32-bit PPC Linux passes structs by reference, not by + // value, so this actually doesn't work. (Although that's the only + // platform that doesn't work on.) DO NOT ATTEMPT IT. The + // unfortunate consequence of that is that the slots which we + // *know* are already type safe will wind up calling the type + // checker function (when we had the slots accepting + // BorrowedGreenlet, this was bypassed), so this slows us down. + // TODO: Optimize this again. + + // For a class with a single pointer member, whose constructor + // does nothing but copy a pointer parameter into the member, and + // which can then be converted back to the pointer type, compilers + // generate code that's the same as just passing the pointer. + // That is, func(BorrowedObject x) called like ``PyObject* p = + // ...; f(p)`` has 0 overhead. Similarly, they "unpack" to the + // pointer type with 0 overhead. + // + // If there are no virtual functions, no complex inheritance (maybe?) and + // no destructor, these can be directly used as parameters in + // Python callbacks like tp_init: the layout is the same as a + // single pointer. Only subclasses with trivial constructors that + // do nothing but set the single pointer member are safe to use + // that way. + + + // This is the base class for things that can be done with a + // PyObject pointer. It assumes nothing about memory management. + // NOTE: Nothing is virtual, so subclasses shouldn't add new + // storage fields or try to override these methods. + template + class PyObjectPointer + { + public: + typedef T PyType; + protected: + T* p; + public: + PyObjectPointer(T* it=nullptr) : p(it) + { + TC(p); + } + + // We don't allow automatic casting to PyObject* at this + // level, because then we could be passed to Py_DECREF/INCREF, + // but we want nothing to do with memory management. If you + // know better, then you can use the get() method, like on a + // std::shared_ptr. Except we name it borrow() to clarify that + // if this is a reference-tracked object, the pointer you get + // back will go away when the object does. + // TODO: This should probably not exist here, but be moved + // down to relevant sub-types. + + T* borrow() const noexcept + { + return this->p; + } + + PyObject* borrow_o() const noexcept + { + return reinterpret_cast(this->p); + } + + T* operator->() const noexcept + { + return this->p; + } + + bool is_None() const noexcept + { + return this->p == Py_None; + } + + PyObject* acquire_or_None() const noexcept + { + PyObject* result = this->p ? reinterpret_cast(this->p) : Py_None; + Py_INCREF(result); + return result; + } + + explicit operator bool() const noexcept + { + return this->p != nullptr; + } + + bool operator!() const noexcept + { + return this->p == nullptr; + } + + Py_ssize_t REFCNT() const noexcept + { + return p ? Py_REFCNT(p) : -42; + } + + PyTypeObject* TYPE() const noexcept + { + return p ? Py_TYPE(p) : nullptr; + } + + inline OwnedObject PyStr() const noexcept; + inline const std::string as_str() const noexcept; + inline OwnedObject PyGetAttr(const ImmortalObject& name) const noexcept; + inline OwnedObject PyRequireAttr(const char* const name) const; + inline OwnedObject PyRequireAttr(const ImmortalString& name) const; + inline OwnedObject PyCall(const BorrowedObject& arg) const; + inline OwnedObject PyCall(PyGreenlet* arg) const ; + inline OwnedObject PyCall(PyObject* arg) const ; + // PyObject_Call(this, args, kwargs); + inline OwnedObject PyCall(const BorrowedObject args, + const BorrowedObject kwargs) const; + inline OwnedObject PyCall(const OwnedObject& args, + const OwnedObject& kwargs) const; + + protected: + void _set_raw_pointer(void* t) + { + TC(t); + p = reinterpret_cast(t); + } + void* _get_raw_pointer() const + { + return p; + } + }; + +#ifdef GREENLET_USE_STDIO + template + std::ostream& operator<<(std::ostream& os, const PyObjectPointer& s) + { + const std::type_info& t = typeid(s); + os << t.name() + << "(addr=" << s.borrow() + << ", refcnt=" << s.REFCNT() + << ", value=" << s.as_str() + << ")"; + + return os; + } +#endif + + template + inline bool operator==(const PyObjectPointer& lhs, const PyObject* const rhs) noexcept + { + return static_cast(lhs.borrow_o()) == static_cast(rhs); + } + + template + inline bool operator==(const PyObjectPointer& lhs, const PyObjectPointer& rhs) noexcept + { + return lhs.borrow_o() == rhs.borrow_o(); + } + + template + inline bool operator!=(const PyObjectPointer& lhs, + const PyObjectPointer& rhs) noexcept + { + return lhs.borrow_o() != rhs.borrow_o(); + } + + template + class OwnedReference : public PyObjectPointer + { + private: + friend class OwnedList; + + protected: + explicit OwnedReference(T* it) : PyObjectPointer(it) + { + } + + public: + + // Constructors + + static OwnedReference consuming(PyObject* p) + { + return OwnedReference(reinterpret_cast(p)); + } + + static OwnedReference owning(T* p) + { + OwnedReference result(p); + Py_XINCREF(result.p); + return result; + } + + OwnedReference() : PyObjectPointer(nullptr) + {} + + explicit OwnedReference(const PyObjectPointer<>& other) + : PyObjectPointer(nullptr) + { + T* op = other.borrow(); + TC(op); + this->p = other.borrow(); + Py_XINCREF(this->p); + } + + // It would be good to make use of the C++11 distinction + // between move and copy operations, e.g., constructing from a + // pointer should be a move operation. + // In the common case of ``OwnedObject x = Py_SomeFunction()``, + // the call to the copy constructor will be elided completely. + OwnedReference(const OwnedReference& other) + : PyObjectPointer(other.p) + { + Py_XINCREF(this->p); + } + + static OwnedReference None() + { + Py_INCREF(Py_None); + return OwnedReference(Py_None); + } + + // We can assign from exactly our type without any extra checking + OwnedReference& operator=(const OwnedReference& other) + { + Py_XINCREF(other.p); + const T* tmp = this->p; + this->p = other.p; + Py_XDECREF(tmp); + return *this; + } + + OwnedReference& operator=(const BorrowedReference other) + { + return this->operator=(other.borrow()); + } + + OwnedReference& operator=(T* const other) + { + TC(other); + Py_XINCREF(other); + T* tmp = this->p; + this->p = other; + Py_XDECREF(tmp); + return *this; + } + + // We can assign from an arbitrary reference type + // if it passes our check. + template + OwnedReference& operator=(const OwnedReference& other) + { + X* op = other.borrow(); + TC(op); + return this->operator=(reinterpret_cast(op)); + } + + inline void steal(T* other) + { + assert(this->p == nullptr); + TC(other); + this->p = other; + } + + T* relinquish_ownership() + { + T* result = this->p; + this->p = nullptr; + return result; + } + + T* acquire() const + { + // Return a new reference. + // TODO: This may go away when we have reference objects + // throughout the code. + Py_XINCREF(this->p); + return this->p; + } + + // Nothing else declares a destructor, we're the leaf, so we + // should be able to get away without virtual. + ~OwnedReference() + { + Py_CLEAR(this->p); + } + + void CLEAR() + { + Py_CLEAR(this->p); + assert(this->p == nullptr); + } + }; + + static inline + void operator<<=(PyObject*& target, OwnedObject& o) + { + target = o.relinquish_ownership(); + } + + + class NewReference : public OwnedObject + { + private: + G_NO_COPIES_OF_CLS(NewReference); + public: + // Consumes the reference. Only use this + // for API return values. + NewReference(PyObject* it) : OwnedObject(it) + { + } + }; + + class NewDictReference : public NewReference + { + private: + G_NO_COPIES_OF_CLS(NewDictReference); + public: + NewDictReference() : NewReference(PyDict_New()) + { + if (!this->p) { + throw PyErrOccurred(); + } + } + + void SetItem(const char* const key, PyObject* value) + { + Require(PyDict_SetItemString(this->p, key, value)); + } + + void SetItem(const PyObjectPointer<>& key, PyObject* value) + { + Require(PyDict_SetItem(this->p, key.borrow_o(), value)); + } + }; + + template + class _OwnedGreenlet: public OwnedReference + { + private: + protected: + _OwnedGreenlet(T* it) : OwnedReference(it) + {} + + public: + _OwnedGreenlet() : OwnedReference() + {} + + _OwnedGreenlet(const _OwnedGreenlet& other) : OwnedReference(other) + { + } + _OwnedGreenlet(OwnedMainGreenlet& other) : + OwnedReference(reinterpret_cast(other.acquire())) + { + } + _OwnedGreenlet(const BorrowedGreenlet& other); + // Steals a reference. + static _OwnedGreenlet consuming(PyGreenlet* it) + { + return _OwnedGreenlet(reinterpret_cast(it)); + } + + inline _OwnedGreenlet& operator=(const OwnedGreenlet& other) + { + return this->operator=(other.borrow()); + } + + inline _OwnedGreenlet& operator=(const BorrowedGreenlet& other); + + _OwnedGreenlet& operator=(const OwnedMainGreenlet& other) + { + PyGreenlet* owned = other.acquire(); + Py_XDECREF(this->p); + this->p = reinterpret_cast(owned); + return *this; + } + + _OwnedGreenlet& operator=(T* const other) + { + OwnedReference::operator=(other); + return *this; + } + + T* relinquish_ownership() + { + T* result = this->p; + this->p = nullptr; + return result; + } + + PyObject* relinquish_ownership_o() + { + return reinterpret_cast(relinquish_ownership()); + } + + inline Greenlet* operator->() const noexcept; + inline operator Greenlet*() const noexcept; + }; + + template + class BorrowedReference : public PyObjectPointer + { + public: + // Allow implicit creation from PyObject* pointers as we + // transition to using these classes. Also allow automatic + // conversion to PyObject* for passing to C API calls and even + // for Py_INCREF/DECREF, because we ourselves do no memory management. + BorrowedReference(T* it) : PyObjectPointer(it) + {} + + BorrowedReference(const PyObjectPointer& ref) : PyObjectPointer(ref.borrow()) + {} + + BorrowedReference() : PyObjectPointer(nullptr) + {} + + operator T*() const + { + return this->p; + } + }; + + typedef BorrowedReference BorrowedObject; + //typedef BorrowedReference BorrowedGreenlet; + + template + class _BorrowedGreenlet : public BorrowedReference + { + public: + _BorrowedGreenlet() : + BorrowedReference(nullptr) + {} + + _BorrowedGreenlet(T* it) : + BorrowedReference(it) + {} + + _BorrowedGreenlet(const BorrowedObject& it); + + _BorrowedGreenlet(const OwnedGreenlet& it) : + BorrowedReference(it.borrow()) + {} + + _BorrowedGreenlet& operator=(const BorrowedObject& other); + + // We get one of these for PyGreenlet, but one for PyObject + // is handy as well + operator PyObject*() const + { + return reinterpret_cast(this->p); + } + Greenlet* operator->() const noexcept; + operator Greenlet*() const noexcept; + }; + + typedef _BorrowedGreenlet BorrowedGreenlet; + + template + _OwnedGreenlet::_OwnedGreenlet(const BorrowedGreenlet& other) + : OwnedReference(reinterpret_cast(other.borrow())) + { + Py_XINCREF(this->p); + } + + + class BorrowedMainGreenlet + : public _BorrowedGreenlet + { + public: + BorrowedMainGreenlet(const OwnedMainGreenlet& it) : + _BorrowedGreenlet(it.borrow()) + {} + BorrowedMainGreenlet(PyGreenlet* it=nullptr) + : _BorrowedGreenlet(it) + {} + }; + + template + _OwnedGreenlet& _OwnedGreenlet::operator=(const BorrowedGreenlet& other) + { + return this->operator=(other.borrow()); + } + + + class ImmortalObject : public PyObjectPointer<> + { + private: + G_NO_ASSIGNMENT_OF_CLS(ImmortalObject); + public: + explicit ImmortalObject(PyObject* it) : PyObjectPointer<>(it) + { + } + + ImmortalObject(const ImmortalObject& other) + : PyObjectPointer<>(other.p) + { + + } + + /** + * Become the new owner of the object. Does not change the + * reference count. + */ + ImmortalObject& operator=(PyObject* it) + { + assert(this->p == nullptr); + this->p = it; + return *this; + } + + static ImmortalObject consuming(PyObject* it) + { + return ImmortalObject(it); + } + + inline operator PyObject*() const + { + return this->p; + } + }; + + class ImmortalString : public ImmortalObject + { + private: + G_NO_COPIES_OF_CLS(ImmortalString); + const char* str; + public: + ImmortalString(const char* const str) : + ImmortalObject(str ? Require(PyUnicode_InternFromString(str)) : nullptr) + { + this->str = str; + } + + inline ImmortalString& operator=(const char* const str) + { + if (!this->p) { + this->p = Require(PyUnicode_InternFromString(str)); + this->str = str; + } + else { + assert(this->str == str); + } + return *this; + } + + inline operator std::string() const + { + return this->str; + } + + }; + + class ImmortalEventName : public ImmortalString + { + private: + G_NO_COPIES_OF_CLS(ImmortalEventName); + public: + ImmortalEventName(const char* const str) : ImmortalString(str) + {} + }; + + class ImmortalException : public ImmortalObject + { + private: + G_NO_COPIES_OF_CLS(ImmortalException); + public: + ImmortalException(const char* const name, PyObject* base=nullptr) : + ImmortalObject(name + // Python 2.7 isn't const correct + ? Require(PyErr_NewException((char*)name, base, nullptr)) + : nullptr) + {} + + inline bool PyExceptionMatches() const + { + return PyErr_ExceptionMatches(this->p) > 0; + } + + }; + + template + inline OwnedObject PyObjectPointer::PyStr() const noexcept + { + if (!this->p) { + return OwnedObject(); + } + return OwnedObject::consuming(PyObject_Str(reinterpret_cast(this->p))); + } + + template + inline const std::string PyObjectPointer::as_str() const noexcept + { + // NOTE: This is not Python exception safe. + if (this->p) { + // The Python APIs return a cached char* value that's only valid + // as long as the original object stays around, and we're + // about to (probably) toss it. Hence the copy to std::string. + OwnedObject py_str = this->PyStr(); + if (!py_str) { + return "(nil)"; + } + return PyUnicode_AsUTF8(py_str.borrow()); + } + return "(nil)"; + } + + template + inline OwnedObject PyObjectPointer::PyGetAttr(const ImmortalObject& name) const noexcept + { + assert(this->p); + return OwnedObject::consuming(PyObject_GetAttr(reinterpret_cast(this->p), name)); + } + + template + inline OwnedObject PyObjectPointer::PyRequireAttr(const char* const name) const + { + assert(this->p); + return OwnedObject::consuming(Require(PyObject_GetAttrString(this->p, name), name)); + } + + template + inline OwnedObject PyObjectPointer::PyRequireAttr(const ImmortalString& name) const + { + assert(this->p); + return OwnedObject::consuming(Require( + PyObject_GetAttr( + reinterpret_cast(this->p), + name + ), + name + )); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject& arg) const + { + return this->PyCall(arg.borrow()); + } + + template + inline OwnedObject PyObjectPointer::PyCall(PyGreenlet* arg) const + { + return this->PyCall(reinterpret_cast(arg)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(PyObject* arg) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_CallFunctionObjArgs(this->p, arg, NULL)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject args, + const BorrowedObject kwargs) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_Call(this->p, args, kwargs)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const OwnedObject& args, + const OwnedObject& kwargs) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_Call(this->p, args.borrow(), kwargs.borrow())); + } + + inline void + ListChecker(void * p) + { + if (!p) { + return; + } + if (!PyList_Check(p)) { + throw TypeError("Expected a list"); + } + } + + class OwnedList : public OwnedReference + { + private: + G_NO_ASSIGNMENT_OF_CLS(OwnedList); + public: + // TODO: Would like to use move. + explicit OwnedList(const OwnedObject& other) + : OwnedReference(other) + { + } + + OwnedList& operator=(const OwnedObject& other) + { + if (other && PyList_Check(other.p)) { + // Valid list. Own a new reference to it, discard the + // reference to what we did own. + PyObject* new_ptr = other.p; + Py_INCREF(new_ptr); + Py_XDECREF(this->p); + this->p = new_ptr; + } + else { + // Either the other object was NULL (an error) or it + // wasn't a list. Either way, we're now invalidated. + Py_XDECREF(this->p); + this->p = nullptr; + } + return *this; + } + + inline bool empty() const + { + return PyList_GET_SIZE(p) == 0; + } + + inline Py_ssize_t size() const + { + return PyList_GET_SIZE(p); + } + + inline BorrowedObject at(const Py_ssize_t index) const + { + return PyList_GET_ITEM(p, index); + } + + inline void clear() + { + PyList_SetSlice(p, 0, PyList_GET_SIZE(p), NULL); + } + }; + + // Use this to represent the module object used at module init + // time. + // This could either be a borrowed (Py2) or new (Py3) reference; + // either way, we don't want to do any memory management + // on it here, Python itself will handle that. + // XXX: Actually, that's not quite right. On Python 3, if an + // exception occurs before we return to the interpreter, this will + // leak; but all previous versions also had that problem. + class CreatedModule : public PyObjectPointer<> + { + private: + G_NO_COPIES_OF_CLS(CreatedModule); + public: + CreatedModule(PyModuleDef& mod_def) : PyObjectPointer<>( + Require(PyModule_Create(&mod_def))) + { + } + + // PyAddObject(): Add a reference to the object to the module. + // On return, the reference count of the object is unchanged. + // + // The docs warn that PyModule_AddObject only steals the + // reference on success, so if it fails after we've incref'd + // or allocated, we're responsible for the decref. + void PyAddObject(const char* name, const long new_bool) + { + OwnedObject p = OwnedObject::consuming(Require(PyBool_FromLong(new_bool))); + this->PyAddObject(name, p); + } + + void PyAddObject(const char* name, const OwnedObject& new_object) + { + // The caller already owns a reference they will decref + // when their variable goes out of scope, we still need to + // incref/decref. + this->PyAddObject(name, new_object.borrow()); + } + + void PyAddObject(const char* name, const ImmortalObject& new_object) + { + this->PyAddObject(name, new_object.borrow()); + } + + void PyAddObject(const char* name, PyTypeObject& type) + { + this->PyAddObject(name, reinterpret_cast(&type)); + } + + void PyAddObject(const char* name, PyObject* new_object) + { + Py_INCREF(new_object); + try { + Require(PyModule_AddObject(this->p, name, new_object)); + } + catch (const PyErrOccurred&) { + Py_DECREF(p); + throw; + } + } + }; + + class PyErrFetchParam : public PyObjectPointer<> + { + // Not an owned object, because we can't be initialized with + // one, and we only sometimes acquire ownership. + private: + G_NO_COPIES_OF_CLS(PyErrFetchParam); + public: + // To allow declaring these and passing them to + // PyErr_Fetch we implement the empty constructor, + // and the address operator. + PyErrFetchParam() : PyObjectPointer<>(nullptr) + { + } + + PyObject** operator&() + { + return &this->p; + } + + // This allows us to pass one directly without the &, + // BUT it has higher precedence than the bool operator + // if it's not explicit. + operator PyObject**() + { + return &this->p; + } + + // We don't want to be able to pass these to Py_DECREF and + // such so we don't have the implicit PyObject* conversion. + + inline PyObject* relinquish_ownership() + { + PyObject* result = this->p; + this->p = nullptr; + return result; + } + + ~PyErrFetchParam() + { + Py_XDECREF(p); + } + }; + + class OwnedErrPiece : public OwnedObject + { + private: + + public: + // Unlike OwnedObject, this increments the refcount. + OwnedErrPiece(PyObject* p=nullptr) : OwnedObject(p) + { + this->acquire(); + } + + PyObject** operator&() + { + return &this->p; + } + + inline operator PyObject*() const + { + return this->p; + } + + operator PyTypeObject*() const + { + return reinterpret_cast(this->p); + } + }; + + class PyErrPieces + { + private: + OwnedErrPiece type; + OwnedErrPiece instance; + OwnedErrPiece traceback; + bool restored; + public: + // Takes new references; if we're destroyed before + // restoring the error, we drop the references. + PyErrPieces(PyObject* t, PyObject* v, PyObject* tb) : + type(t), + instance(v), + traceback(tb), + restored(0) + { + this->normalize(); + } + + PyErrPieces() : + restored(0) + { + // PyErr_Fetch transfers ownership to us, so + // we don't actually need to INCREF; but we *do* + // need to DECREF if we're not restored. + PyErrFetchParam t, v, tb; + PyErr_Fetch(&t, &v, &tb); + type.steal(t.relinquish_ownership()); + instance.steal(v.relinquish_ownership()); + traceback.steal(tb.relinquish_ownership()); + } + + void PyErrRestore() + { + // can only do this once + assert(!this->restored); + this->restored = true; + PyErr_Restore( + this->type.relinquish_ownership(), + this->instance.relinquish_ownership(), + this->traceback.relinquish_ownership()); + assert(!this->type && !this->instance && !this->traceback); + } + + private: + void normalize() + { + // First, check the traceback argument, replacing None, + // with NULL + if (traceback.is_None()) { + traceback = nullptr; + } + + if (traceback && !PyTraceBack_Check(traceback.borrow())) { + throw PyErrOccurred(PyExc_TypeError, + "throw() third argument must be a traceback object"); + } + + if (PyExceptionClass_Check(type)) { + // If we just had a type, we'll now have a type and + // instance. + // The type's refcount will have gone up by one + // because of the instance and the instance will have + // a refcount of one. Either way, we owned, and still + // do own, exactly one reference. + PyErr_NormalizeException(&type, &instance, &traceback); + + } + else if (PyExceptionInstance_Check(type)) { + /* Raising an instance --- usually that means an + object that is a subclass of BaseException, but on + Python 2, that can also mean an arbitrary old-style + object. The value should be a dummy. */ + if (instance && !instance.is_None()) { + throw PyErrOccurred( + PyExc_TypeError, + "instance exception may not have a separate value"); + } + /* Normalize to raise , */ + this->instance = this->type; + this->type = PyExceptionInstance_Class(instance.borrow()); + + /* + It would be tempting to do this: + + Py_ssize_t type_count = Py_REFCNT(Py_TYPE(instance.borrow())); + this->type = PyExceptionInstance_Class(instance.borrow()); + assert(this->type.REFCNT() == type_count + 1); + + But that doesn't work on Python 2 in the case of + old-style instances: The result of Py_TYPE is going to + be the global shared that all + old-style classes have, while the return of Instance_Class() + will be the Python-level class object. The two are unrelated. + */ + } + else { + /* Not something you can raise. throw() fails. */ + PyErr_Format(PyExc_TypeError, + "exceptions must be classes, or instances, not %s", + Py_TYPE(type.borrow())->tp_name); + throw PyErrOccurred(); + } + } + }; + + // PyArg_Parse's O argument returns a borrowed reference. + class PyArgParseParam : public BorrowedObject + { + private: + G_NO_COPIES_OF_CLS(PyArgParseParam); + public: + explicit PyArgParseParam(PyObject* p=nullptr) : BorrowedObject(p) + { + } + + inline PyObject** operator&() + { + return &this->p; + } + }; + +};}; + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_slp_switch.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_slp_switch.hpp new file mode 100644 index 00000000..bd4b7ae1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_slp_switch.hpp @@ -0,0 +1,99 @@ +#ifndef GREENLET_SLP_SWITCH_HPP +#define GREENLET_SLP_SWITCH_HPP + +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" + +/* + * the following macros are spliced into the OS/compiler + * specific code, in order to simplify maintenance. + */ +// We can save about 10% of the time it takes to switch greenlets if +// we thread the thread state through the slp_save_state() and the +// following slp_restore_state() calls from +// slp_switch()->g_switchstack() (which already needs to access it). +// +// However: +// +// that requires changing the prototypes and implementations of the +// switching functions. If we just change the prototype of +// slp_switch() to accept the argument and update the macros, without +// changing the implementation of slp_switch(), we get crashes on +// 64-bit Linux and 32-bit x86 (for reasons that aren't 100% clear); +// on the other hand, 64-bit macOS seems to be fine. Also, 64-bit +// windows is an issue because slp_switch is written fully in assembly +// and currently ignores its argument so some code would have to be +// adjusted there to pass the argument on to the +// ``slp_save_state_asm()`` function (but interestingly, because of +// the calling convention, the extra argument is just ignored and +// things function fine, albeit slower, if we just modify +// ``slp_save_state_asm`()` to fetch the pointer to pass to the +// macro.) +// +// Our compromise is to use a *glabal*, untracked, weak, pointer +// to the necessary thread state during the process of switching only. +// This is safe because we're protected by the GIL, and if we're +// running this code, the thread isn't exiting. This also nets us a +// 10-12% speed improvement. + +static greenlet::Greenlet* volatile switching_thread_state = nullptr; + + +extern "C" { +static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref); +static void GREENLET_NOINLINE(slp_restore_state_trampoline)(); +} + + +#define SLP_SAVE_STATE(stackref, stsizediff) \ +do { \ + assert(switching_thread_state); \ + stackref += STACK_MAGIC; \ + if (slp_save_state_trampoline((char*)stackref)) \ + return -1; \ + if (!switching_thread_state->active()) \ + return 1; \ + stsizediff = switching_thread_state->stack_start() - (char*)stackref; \ +} while (0) + +#define SLP_RESTORE_STATE() slp_restore_state_trampoline() + +#define SLP_EVAL +extern "C" { +#define slp_switch GREENLET_NOINLINE(slp_switch) +#include "slp_platformselect.h" +} +#undef slp_switch + +#ifndef STACK_MAGIC +# error \ + "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." +#endif /* !STACK_MAGIC */ + + + +#ifdef EXTERNAL_ASM +/* CCP addition: Make these functions, to be called from assembler. + * The token include file for the given platform should enable the + * EXTERNAL_ASM define so that this is included. + */ +extern "C" { +intptr_t +slp_save_state_asm(intptr_t* ref) +{ + intptr_t diff; + SLP_SAVE_STATE(ref, diff); + return diff; +} + +void +slp_restore_state_asm(void) +{ + SLP_RESTORE_STATE(); +} + +extern int slp_switch(void); +}; +#endif + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/greenlet_thread_support.hpp b/.venv/lib/python3.12/site-packages/greenlet/greenlet_thread_support.hpp new file mode 100644 index 00000000..3ded7d2b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/greenlet_thread_support.hpp @@ -0,0 +1,31 @@ +#ifndef GREENLET_THREAD_SUPPORT_HPP +#define GREENLET_THREAD_SUPPORT_HPP + +/** + * Defines various utility functions to help greenlet integrate well + * with threads. This used to be needed when we supported Python + * 2.7 on Windows, which used a very old compiler. We wrote an + * alternative implementation using Python APIs and POSIX or Windows + * APIs, but that's no longer needed. So this file is a shadow of its + * former self --- but may be needed in the future. + */ + +#include +#include +#include + +#include "greenlet_compiler_compat.hpp" + +namespace greenlet { + typedef std::mutex Mutex; + typedef std::lock_guard LockGuard; + class LockInitError : public std::runtime_error + { + public: + LockInitError(const char* what) : std::runtime_error(what) + {}; + }; +}; + + +#endif /* GREENLET_THREAD_SUPPORT_HPP */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/__init__.py b/.venv/lib/python3.12/site-packages/greenlet/platform/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..35193102 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/.venv/lib/python3.12/site-packages/greenlet/platform/setup_switch_x64_masm.cmd new file mode 100644 index 00000000..09285955 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/setup_switch_x64_masm.cmd @@ -0,0 +1,2 @@ +call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 +ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_aarch64_gcc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_aarch64_gcc.h new file mode 100644 index 00000000..058617c4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_aarch64_gcc.h @@ -0,0 +1,124 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall + * 13-Apr-13 Add support for strange GCC caller-save decisions + * 08-Apr-13 File creation. Michael Matz + * + * NOTES + * + * Simply save all callee saved registers + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ + "x27", "x28", "x30" /* aka lr */, \ + "v8", "v9", "v10", "v11", \ + "v12", "v13", "v14", "v15" + +/* + * Recall: + asm asm-qualifiers ( AssemblerTemplate + : OutputOperands + [ : InputOperands + [ : Clobbers ] ]) + + or (if asm-qualifiers contains 'goto') + + asm asm-qualifiers ( AssemblerTemplate + : OutputOperands + : InputOperands + : Clobbers + : GotoLabels) + + and OutputOperands are + + [ [asmSymbolicName] ] constraint (cvariablename) + + When a name is given, refer to it as ``%[the name]``. + When not given, ``%i`` where ``i`` is the zero-based index. + + constraints starting with ``=`` means only writing; ``+`` means + reading and writing. + + This is followed by ``r`` (must be register) or ``m`` (must be memory) + and these can be combined. + + The ``cvariablename`` is actually an lvalue expression. + + In AArch65, 31 general purpose registers. If named X0... they are + 64-bit. If named W0... they are the bottom 32 bits of the + corresponding 64 bit register. + + XZR and WZR are hardcoded to 0, and ignore writes. + + Arguments are in X0..X7. C++ uses X0 for ``this``. X0 holds simple return + values (?) + + Whenever a W register is written, the top half of the X register is zeroed. + */ + +static int +slp_switch(void) +{ + int err; + void *fp; + /* Windowz uses a 32-bit long on a 64-bit platform, unlike the rest of + the world, and in theory we can be compiled with GCC/llvm on 64-bit + windows. So we need a fixed-width type. + */ + int64_t *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp,sp,%0\n" + "add x29,x29,%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + /* SLP_SAVE_STATE macro contains some return statements + (of -1 and 1). It falls through only when + the return value of slp_save_state() is zero, which + is placed in x0. + In that case we (slp_switch) also want to return zero + (also in x0 of course). + Now, some GCC versions (seen with 4.8) think it's a + good idea to save/restore x0 around the call to + slp_restore_state(), instead of simply zeroing it + at the return below. But slp_restore_state + writes random values to the stack slot used for this + save/restore (from when it once was saved above in + SLP_SAVE_STATE, when it was still uninitialized), so + "restoring" that precious zero actually makes us + return random values. There are some ways to make + GCC not use that zero value in the normal return path + (e.g. making err volatile, but that costs a little + stack space), and the simplest is to call a function + that returns an unknown value (which happens to be zero), + so the saved/restored value is unused. + + Thus, this line stores a 0 into the ``err`` variable + (which must be held in a register for this instruction, + of course). The ``w`` qualifier causes the instruction + to use W0 instead of X0, otherwise we get a warning + about a value size mismatch (because err is an int, + and aarch64 platforms are LP64: 32-bit int, 64 bit long + and pointer). + */ + __asm__ volatile ("mov %w0, #0" : "=r" (err)); + } + __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_alpha_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_alpha_unix.h new file mode 100644 index 00000000..7e07abfc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_alpha_unix.h @@ -0,0 +1,30 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ + "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq $30, %0, $30\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_amd64_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_amd64_unix.h new file mode 100644 index 00000000..d4701105 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_amd64_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 18-Aug-11 Alexey Borzenkov + * Correctly save rbp, csr and cw + * 01-Apr-04 Hye-Shik Chang + * Ported from i386 to amd64. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + +static int +slp_switch(void) +{ + int err; + void* rbp; + void* rbx; + unsigned int csr; + unsigned short cw; + /* This used to be declared 'register', but that does nothing in + modern compilers and is explicitly forbidden in some new + standards. */ + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); + __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); + __asm__ ("movq %%rsp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq %0, %%rsp\n" + "addq %0, %%rbp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); + } + __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); + __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_gcc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_gcc.h new file mode 100644 index 00000000..655003aa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_gcc.h @@ -0,0 +1,79 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro + * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I + * read that these do not need to be saved. Also added notes and + * errors related to the frame pointer. Richard Tew. + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#ifdef __thumb__ +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" +#else +#define REG_FP "fp" +#define REG_FPFP "fp,fp" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" +#endif +#if defined(__SOFTFP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#elif defined(__VFP_FP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" +#elif defined(__MAVERICK__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ + "mvf8", "mvf9", "mvf10", "mvf11", \ + "mvf12", "mvf13", "mvf14", "mvf15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" +#endif + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + int *stackref, stsizediff; + int result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return result; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_ios.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_ios.h new file mode 100644 index 00000000..9e640e15 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_ios.h @@ -0,0 +1,67 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 31-May-15 iOS support. Ported from arm32. Proton + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + int *stackref, stsizediff, result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + : REGS_TO_SAVE /* Clobber registers, force compiler to + * recalculate address of void *fp from REG_SP or REG_FP */ + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ( + "ldr " REG_FP ", %1\n\t" + "mov %0, #0" + : "=r" (result) + : "m" (fp) + : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ + ); + return result; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.asm b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.asm new file mode 100644 index 00000000..29f9c225 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.asm @@ -0,0 +1,53 @@ + AREA switch_arm64_masm, CODE, READONLY; + GLOBAL slp_switch [FUNC] + EXTERN slp_save_state_asm + EXTERN slp_restore_state_asm + +slp_switch + ; push callee saved registers to stack + stp x19, x20, [sp, #-16]! + stp x21, x22, [sp, #-16]! + stp x23, x24, [sp, #-16]! + stp x25, x26, [sp, #-16]! + stp x27, x28, [sp, #-16]! + stp x29, x30, [sp, #-16]! + stp d8, d9, [sp, #-16]! + stp d10, d11, [sp, #-16]! + stp d12, d13, [sp, #-16]! + stp d14, d15, [sp, #-16]! + + ; call slp_save_state_asm with stack pointer + mov x0, sp + bl slp_save_state_asm + + ; early return for return value of 1 and -1 + cmp x0, #-1 + b.eq RETURN + cmp x0, #1 + b.eq RETURN + + ; increment stack and frame pointer + add sp, sp, x0 + add x29, x29, x0 + + bl slp_restore_state_asm + + ; store return value for successful completion of routine + mov x0, #0 + +RETURN + ; pop registers from stack + ldp d14, d15, [sp], #16 + ldp d12, d13, [sp], #16 + ldp d10, d11, [sp], #16 + ldp d8, d9, [sp], #16 + ldp x29, x30, [sp], #16 + ldp x27, x28, [sp], #16 + ldp x25, x26, [sp], #16 + ldp x23, x24, [sp], #16 + ldp x21, x22, [sp], #16 + ldp x19, x20, [sp], #16 + + ret + + END diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.obj b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.obj new file mode 100644 index 00000000..f6f220e4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.obj differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_msvc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_msvc.h new file mode 100644 index 00000000..7ab7f45b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_msvc.h @@ -0,0 +1,17 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 21-Oct-21 Niyas Sait + * First version to enable win/arm64 support. + */ + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_csky_gcc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_csky_gcc.h new file mode 100644 index 00000000..ac469d3a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_csky_gcc.h @@ -0,0 +1,48 @@ +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_FP "r8" +#ifdef __CSKYABIV2__ +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ + "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ + "r23", "r24", "r25" + +#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ + "vr13", "vr14", "vr15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#endif +#else +#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" +#endif + + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + int *stackref, stsizediff; + int result; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addu sp,%0\n" + "addu "REG_FP",%0\n" + : + : "r" (stsizediff) + ); + + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movi %0, 0" : "=r" (result)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + + return result; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_loongarch64_linux.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_loongarch64_linux.h new file mode 100644 index 00000000..9eaf34ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_loongarch64_linux.h @@ -0,0 +1,31 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "fp", \ + "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move %0, $sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add.d $sp, $sp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move %0, $zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_m68k_gcc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_m68k_gcc.h new file mode 100644 index 00000000..da761c2d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_m68k_gcc.h @@ -0,0 +1,38 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 2014-01-06 Andreas Schwab + * File created. + */ + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ + "%a2", "%a3", "%a4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + void *fp, *a5; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); + __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); + __asm__ ("move.l %%sp, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile ("clr.l %0" : "=g" (err)); + } + __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); + __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_mips_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_mips_unix.h new file mode 100644 index 00000000..b9003e94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_mips_unix.h @@ -0,0 +1,64 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 20-Sep-14 Matt Madison + * Re-code the saving of the gp register for MIPS64. + * 05-Jan-08 Thiemo Seufer + * Ported from ppc. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ + "$23", "$30" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; +#ifdef __mips64 + uint64_t gpsave; +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); +#ifdef __mips64 + __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); +#endif + __asm__ ("move %0, $29" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ __volatile__ ( +#ifdef __mips64 + "daddu $29, %0\n" +#else + "addu $29, %0\n" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } +#ifdef __mips64 + __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); + __asm__ __volatile__ ("move %0, $0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_aix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_aix.h new file mode 100644 index 00000000..e7e0b877 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_aix.h @@ -0,0 +1,103 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-Oct-20 Jesse Gorzinski + * Copied from Linux PPC64 implementation + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 6 + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_linux.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_linux.h new file mode 100644 index 00000000..3c324d00 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_linux.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#if _CALL_ELF == 2 +#define STACK_MAGIC 4 +#else +#define STACK_MAGIC 6 +#endif + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_aix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_aix.h new file mode 100644 index 00000000..6d93c132 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_aix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Mar-11 Floris Bruynooghe + * Do not add stsizediff to general purpose + * register (GPR) 30 as this is a non-volatile and + * unused by the PowerOpen Environment, therefore + * this was modifying a user register instead of the + * frame pointer (which does not seem to exist). + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_linux.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_linux.h new file mode 100644 index 00000000..e83ad70a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_linux.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_macosx.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_macosx.h new file mode 100644 index 00000000..bd414c68 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_macosx.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("; asm block 2\n\tmr %0, r1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "; asm block 3\n" + "\tmr r11, %0\n" + "\tadd r1, r1, r11\n" + "\tadd r30, r30, r11\n" + : /* no outputs */ + : "r" (stsizediff) + : "r11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_unix.h new file mode 100644 index 00000000..bb188080 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_unix.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_riscv_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_riscv_unix.h new file mode 100644 index 00000000..e74f37af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_riscv_unix.h @@ -0,0 +1,36 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ + "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ + "fs10", "fs11" + +static int +slp_switch(void) +{ + long fp; + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, fp" : "=r" (fp) : ); + __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp, sp, %0\n\t" + "add fp, fp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("ld fp, %0" : : "m" (fp)); + __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_s390_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_s390_unix.h new file mode 100644 index 00000000..9199367f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_s390_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 25-Jan-12 Alexey Borzenkov + * Fixed Linux/S390 port to work correctly with + * different optimization options both on 31-bit + * and 64-bit. Thanks to Stefan Raabe for lots + * of testing. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 06-Oct-02 Gustavo Niemeyer + * Ported to Linux/S390. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#ifdef __s390x__ +#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ +#else +#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ +#endif + +/* Technically, r11-r13 also need saving, but function prolog starts + with stm(g) and since there are so many saved registers already + it won't be optimized, resulting in all r6-r15 being saved */ +#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ + "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); +#ifdef __s390x__ + __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); +#else + __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); +#endif + { + SLP_SAVE_STATE(stackref, stsizediff); +/* N.B. + r11 may be used as the frame pointer, and in that case it cannot be + clobbered and needs offsetting just like the stack pointer (but in cases + where frame pointer isn't used we might clobber it accidentally). What's + scary is that r11 is 2nd (and even 1st when GOT is used) callee saved + register that gcc would chose for surviving function calls. However, + since r6-r10 are clobbered above, their cost for reuse is reduced, so + gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), + making it relatively safe to offset in all cases. :) */ + __asm__ volatile ( +#ifdef __s390x__ + "agr 15, %0\n\t" + "agr 11, %0" +#else + "ar 15, %0\n\t" + "ar 11, %0" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); + return ret; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_sh_gcc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_sh_gcc.h new file mode 100644 index 00000000..5ecc3b39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_sh_gcc.h @@ -0,0 +1,36 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "r8", "r9", "r10", "r11", "r13", \ + "fr12", "fr13", "fr14", "fr15" + +// r12 Global context pointer, GP +// r14 Frame pointer, FP +// r15 Stack pointer, SP + +static int +slp_switch(void) +{ + int err; + void* fp; + int *stackref, stsizediff; + __asm__ volatile("" : : : REGS_TO_SAVE); + __asm__ volatile("mov.l r14, %0" : "=m"(fp) : :); + __asm__("mov r15, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile( + "add %0, r15\n" + "add %0, r14\n" + : /* no outputs */ + : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile("mov r0, %0" : "=r"(err) : :); + } + __asm__ volatile("mov.l %0, r14" : : "m"(fp) :); + __asm__ volatile("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_sparc_sun_gcc.h new file mode 100644 index 00000000..96990c39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_sparc_sun_gcc.h @@ -0,0 +1,92 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-May-15 Alexey Borzenkov + * Move stack spilling code inside save/restore functions + * 30-Aug-13 Floris Bruynooghe + Clean the register windows again before returning. + This does not clobber the PIC register as it leaves + the current window intact and is required for multi- + threaded code to work correctly. + * 08-Mar-11 Floris Bruynooghe + * No need to set return value register explicitly + * before the stack and framepointer are adjusted + * as none of the other registers are influenced by + * this. Also don't needlessly clean the windows + * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that + * clobbers the gcc PIC register (%l7). + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * added support for SunOS sparc with gcc + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + + +#define STACK_MAGIC 0 + + +#if defined(__sparcv9) +#define SLP_FLUSHW __asm__ volatile ("flushw") +#else +#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ +#endif + +/* On sparc we need to spill register windows inside save/restore functions */ +#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW +#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW + + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + + /* Put current stack pointer into stackref. + * Register spilling is done in save/restore. + */ + __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); + + { + /* Thou shalt put SLP_SAVE_STATE into a local block */ + /* Copy the current stack onto the heap */ + SLP_SAVE_STATE(stackref, stsizediff); + + /* Increment stack and frame pointer by stsizediff */ + __asm__ volatile ( + "add %0, %%sp, %%sp\n\t" + "add %0, %%fp, %%fp" + : : "r" (stsizediff)); + + /* Copy new stack from it's save store on the heap */ + SLP_RESTORE_STATE(); + + __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); + return err; + } +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x32_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x32_unix.h new file mode 100644 index 00000000..893369c7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x32_unix.h @@ -0,0 +1,63 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 17-Aug-12 Fantix King + * Ported from amd64. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + + +static int +slp_switch(void) +{ + void* ebp; + void* ebx; + unsigned int csr; + unsigned short cw; + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.asm b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.asm new file mode 100644 index 00000000..f5c72a27 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.asm @@ -0,0 +1,111 @@ +; +; stack switching code for MASM on x641 +; Kristjan Valur Jonsson, sept 2005 +; + + +;prototypes for our calls +slp_save_state_asm PROTO +slp_restore_state_asm PROTO + + +pushxmm MACRO reg + sub rsp, 16 + .allocstack 16 + movaps [rsp], reg ; faster than movups, but we must be aligned + ; .savexmm128 reg, offset (don't know what offset is, no documentation) +ENDM +popxmm MACRO reg + movaps reg, [rsp] ; faster than movups, but we must be aligned + add rsp, 16 +ENDM + +pushreg MACRO reg + push reg + .pushreg reg +ENDM +popreg MACRO reg + pop reg +ENDM + + +.code +slp_switch PROC FRAME + ;realign stack to 16 bytes after return address push, makes the following faster + sub rsp,8 + .allocstack 8 + + pushxmm xmm15 + pushxmm xmm14 + pushxmm xmm13 + pushxmm xmm12 + pushxmm xmm11 + pushxmm xmm10 + pushxmm xmm9 + pushxmm xmm8 + pushxmm xmm7 + pushxmm xmm6 + + pushreg r15 + pushreg r14 + pushreg r13 + pushreg r12 + + pushreg rbp + pushreg rbx + pushreg rdi + pushreg rsi + + sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) + .allocstack 10h +.endprolog + + lea rcx, [rsp+10h] ;load stack base that we are saving + call slp_save_state_asm ;pass stackpointer, return offset in eax + cmp rax, 1 + je EXIT1 + cmp rax, -1 + je EXIT2 + ;actual stack switch: + add rsp, rax + call slp_restore_state_asm + xor rax, rax ;return 0 + +EXIT: + + add rsp, 10h + popreg rsi + popreg rdi + popreg rbx + popreg rbp + + popreg r12 + popreg r13 + popreg r14 + popreg r15 + + popxmm xmm6 + popxmm xmm7 + popxmm xmm8 + popxmm xmm9 + popxmm xmm10 + popxmm xmm11 + popxmm xmm12 + popxmm xmm13 + popxmm xmm14 + popxmm xmm15 + + add rsp, 8 + ret + +EXIT1: + mov rax, 1 + jmp EXIT + +EXIT2: + sar rax, 1 + jmp EXIT + +slp_switch ENDP + +END \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.obj b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.obj new file mode 100644 index 00000000..64e3e6b8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.obj differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_msvc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_msvc.h new file mode 100644 index 00000000..601ea560 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_msvc.h @@ -0,0 +1,60 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +/* Avoid alloca redefined warning on mingw64 */ +#ifndef alloca +#define alloca _alloca +#endif + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +/* +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; + return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; +} + +#endif +*/ \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_msvc.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_msvc.h new file mode 100644 index 00000000..0f3a59f5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_msvc.h @@ -0,0 +1,326 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +#define alloca _alloca + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +/* Some magic to quell warnings and keep slp_switch() from crashing when built + with VC90. Disable global optimizations, and the warning: frame pointer + register 'ebp' modified by inline assembly code. + + We used to just disable global optimizations ("g") but upstream stackless + Python, as well as stackman, turn off all optimizations. + +References: +https://github.com/stackless-dev/stackman/blob/dbc72fe5207a2055e658c819fdeab9731dee78b9/stackman/platforms/switch_x86_msvc.h +https://github.com/stackless-dev/stackless/blob/main-slp/Stackless/platf/switch_x86_msvc.h +*/ +#define WIN32_LEAN_AND_MEAN +#include + +#pragma optimize("", off) /* so that autos are stored on the stack */ +#pragma warning(disable:4731) +#pragma warning(disable:4733) /* disable warning about modifying FS[0] */ + +/** + * Most modern compilers and environments handle C++ exceptions without any + * special help from us. MSVC on 32-bit windows is an exception. There, C++ + * exceptions are dealt with using Windows' Structured Exception Handling + * (SEH). + * + * SEH is implemented as a singly linked list of nodes. The + * head of this list is stored in the Thread Information Block, which itself + * is pointed to from the FS register. It's the first field in the structure, + * or offset 0, so we can access it using assembly FS:[0], or the compiler + * intrinsics and field offset information from the headers (as we do below). + * Somewhat unusually, the tail of the list doesn't have prev == NULL, it has + * prev == 0xFFFFFFFF. + * + * SEH was designed for C, and traditionally uses the MSVC compiler + * intrinsincs __try{}/__except{}. It is also utilized for C++ exceptions by + * MSVC; there, every throw of a C++ exception raises a SEH error with the + * ExceptionCode 0xE06D7363; the SEH handler list is then traversed to + * deal with the exception. + * + * If the SEH list is corrupt, then when a C++ exception is thrown the program + * will abruptly exit with exit code 1. This does not use std::terminate(), so + * std::set_terminate() is useless to debug this. + * + * The SEH list is closely tied to the call stack; entering a function that + * uses __try{} or most C++ functions will push a new handler onto the front + * of the list. Returning from the function will remove the handler. Saving + * and restoring the head node of the SEH list (FS:[0]) per-greenlet is NOT + * ENOUGH to make SEH or exceptions work. + * + * Stack switching breaks SEH because the call stack no longer necessarily + * matches the SEH list. For example, given greenlet A that switches to + * greenlet B, at the moment of entering greenlet B, we will have any SEH + * handlers from greenlet A on the SEH list; greenlet B can then add its own + * handlers to the SEH list. When greenlet B switches back to greenlet A, + * greenlet B's handlers would still be on the SEH stack, but when switch() + * returns control to greenlet A, we have replaced the contents of the stack + * in memory, so all the address that greenlet B added to the SEH list are now + * invalid: part of the call stack has been unwound, but the SEH list was out + * of sync with the call stack. The net effect is that exception handling + * stops working. + * + * Thus, when switching greenlets, we need to be sure that the SEH list + * matches the effective call stack, "cutting out" any handlers that were + * pushed by the greenlet that switched out and which are no longer valid. + * + * The easiest way to do this is to capture the SEH list at the time the main + * greenlet for a thread is created, and, when initially starting a greenlet, + * start a new SEH list for it, which contains nothing but the handler + * established for the new greenlet itself, with the tail being the handlers + * for the main greenlet. If we then save and restore the SEH per-greenlet, + * they won't interfere with each others SEH lists. (No greenlet can unwind + * the call stack past the handlers established by the main greenlet). + * + * By observation, a new thread starts with three SEH handlers on the list. By + * the time we get around to creating the main greenlet, though, there can be + * many more, established by transient calls that lead to the creation of the + * main greenlet. Therefore, 3 is a magic constant telling us when to perform + * the initial slice. + * + * All of this can be debugged using a vectored exception handler, which + * operates independently of the SEH handler list, and is called first. + * Walking the SEH list at key points can also be helpful. + * + * References: + * https://en.wikipedia.org/wiki/Win32_Thread_Information_Block + * https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 + * https://docs.microsoft.com/en-us/cpp/cpp/try-except-statement?view=msvc-160 + * https://docs.microsoft.com/en-us/cpp/cpp/structured-exception-handling-c-cpp?view=msvc-160 + * https://docs.microsoft.com/en-us/windows/win32/debug/structured-exception-handling + * https://docs.microsoft.com/en-us/windows/win32/debug/using-a-vectored-exception-handler + * https://bytepointer.com/resources/pietrek_crash_course_depths_of_win32_seh.htm + */ +#define GREENLET_NEEDS_EXCEPTION_STATE_SAVED + + +typedef struct _GExceptionRegistration { + struct _GExceptionRegistration* prev; + void* handler_f; +} GExceptionRegistration; + +static void +slp_set_exception_state(const void *const seh_state) +{ + // Because the stack from from which we do this is ALSO a handler, and + // that one we want to keep, we need to relink the current SEH handler + // frame to point to this one, cutting out the middle men, as it were. + // + // Entering a try block doesn't change the SEH frame, but entering a + // function containing a try block does. + GExceptionRegistration* current_seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + current_seh_state->prev = (GExceptionRegistration*)seh_state; +} + + +static GExceptionRegistration* +x86_slp_get_third_oldest_handler() +{ + GExceptionRegistration* a = NULL; /* Closest to the top */ + GExceptionRegistration* b = NULL; /* second */ + GExceptionRegistration* c = NULL; + GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + a = b = c = seh_state; + + while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { + if ((void*)seh_state->prev < (void*)100) { + fprintf(stderr, "\tERROR: Broken SEH chain.\n"); + return NULL; + } + a = b; + b = c; + c = seh_state; + + seh_state = seh_state->prev; + } + return a ? a : (b ? b : c); +} + + +static void* +slp_get_exception_state() +{ + // XXX: There appear to be three SEH handlers on the stack already at the + // start of the thread. Is that a guarantee? Almost certainly not. Yet in + // all observed cases it has been three. This is consistent with + // faulthandler off or on, and optimizations off or on. It may not be + // consistent with other operating system versions, though: we only have + // CI on one or two versions (don't ask what there are). + // In theory we could capture the number of handlers on the chain when + // PyInit__greenlet is called: there are probably only the default + // handlers at that point (unless we're embedded and people have used + // __try/__except or a C++ handler)? + return x86_slp_get_third_oldest_handler(); +} + +static int +slp_switch(void) +{ + /* MASM syntax is typically reversed from other assemblers. + It is usually + */ + int *stackref, stsizediff; + /* store the structured exception state for this stack */ + DWORD seh_state = __readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + __asm mov stackref, esp; + /* modify EBX, ESI and EDI in order to get them preserved */ + __asm mov ebx, ebx; + __asm xchg esi, edi; + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm { + mov eax, stsizediff + add esp, eax + add ebp, eax + } + SLP_RESTORE_STATE(); + } + __writefsdword(FIELD_OFFSET(NT_TIB, ExceptionList), seh_state); + return 0; +} + +/* re-enable ebp warning and global optimizations. */ +#pragma optimize("", on) +#pragma warning(default:4731) +#pragma warning(default:4733) /* disable warning about modifying FS[0] */ + + +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +#define STACKLESS_SPY + +#ifdef GREENLET_DEBUG + +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + int stackbase = ((int)&stackref) & 0xfffff000; + return (int)p >= stackbase && (int)p < stackbase + 0x00100000; +} + +static void +x86_slp_show_seh_chain() +{ + GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + fprintf(stderr, "====== SEH Chain ======\n"); + while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { + fprintf(stderr, "\tSEH_chain addr: %p handler: %p prev: %p\n", + seh_state, + seh_state->handler_f, seh_state->prev); + if ((void*)seh_state->prev < (void*)100) { + fprintf(stderr, "\tERROR: Broken chain.\n"); + break; + } + seh_state = seh_state->prev; + } + fprintf(stderr, "====== End SEH Chain ======\n"); + fflush(NULL); + return; +} + +//addVectoredExceptionHandler constants: +//CALL_FIRST means call this exception handler first; +//CALL_LAST means call this exception handler last +#define CALL_FIRST 1 +#define CALL_LAST 0 + +LONG WINAPI +GreenletVectorHandler(PEXCEPTION_POINTERS ExceptionInfo) +{ + // We get one of these for every C++ exception, with code + // E06D7363 + // This is a special value that means "C++ exception from MSVC" + // https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 + // + // Install in the module init function with: + // AddVectoredExceptionHandler(CALL_FIRST, GreenletVectorHandler); + PEXCEPTION_RECORD ExceptionRecord = ExceptionInfo->ExceptionRecord; + + fprintf(stderr, + "GOT VECTORED EXCEPTION:\n" + "\tExceptionCode : %p\n" + "\tExceptionFlags : %p\n" + "\tExceptionAddr : %p\n" + "\tNumberparams : %ld\n", + ExceptionRecord->ExceptionCode, + ExceptionRecord->ExceptionFlags, + ExceptionRecord->ExceptionAddress, + ExceptionRecord->NumberParameters + ); + if (ExceptionRecord->ExceptionFlags & 1) { + fprintf(stderr, "\t\tEH_NONCONTINUABLE\n" ); + } + if (ExceptionRecord->ExceptionFlags & 2) { + fprintf(stderr, "\t\tEH_UNWINDING\n" ); + } + if (ExceptionRecord->ExceptionFlags & 4) { + fprintf(stderr, "\t\tEH_EXIT_UNWIND\n" ); + } + if (ExceptionRecord->ExceptionFlags & 8) { + fprintf(stderr, "\t\tEH_STACK_INVALID\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x10) { + fprintf(stderr, "\t\tEH_NESTED_CALL\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x20) { + fprintf(stderr, "\t\tEH_TARGET_UNWIND\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x40) { + fprintf(stderr, "\t\tEH_COLLIDED_UNWIND\n" ); + } + fprintf(stderr, "\n"); + fflush(NULL); + for(DWORD i = 0; i < ExceptionRecord->NumberParameters; i++) { + fprintf(stderr, "\t\t\tParam %ld: %lX\n", i, ExceptionRecord->ExceptionInformation[i]); + } + + if (ExceptionRecord->NumberParameters == 3) { + fprintf(stderr, "\tAbout to traverse SEH chain\n"); + // C++ Exception records have 3 params. + x86_slp_show_seh_chain(); + } + + return EXCEPTION_CONTINUE_SEARCH; +} + + + + +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_unix.h b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_unix.h new file mode 100644 index 00000000..493fa6ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_unix.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 19-Aug-11 Alexey Borzenkov + * Correctly save ebp, ebx and cw + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'ebx' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) +# define ATTR_NOCLONE __attribute__((noclone)) +#else +# define ATTR_NOCLONE +#endif + +static int +slp_switch(void) +{ + int err; +#ifdef _WIN32 + void *seh; +#endif + void *ebp, *ebx; + unsigned short cw; + int *stackref, stsizediff; + __asm__ volatile ("" : : : "esi", "edi"); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); +#ifdef _WIN32 + __asm__ volatile ( + "movl %%fs:0x0, %%eax\n" + "movl %%eax, %0\n" + : "=m" (seh) + : + : "eax"); +#endif + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + } +#ifdef _WIN32 + __asm__ volatile ( + "movl %0, %%eax\n" + "movl %%eax, %%fs:0x0\n" + : + : "m" (seh) + : "eax"); +#endif + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : "esi", "edi"); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/.venv/lib/python3.12/site-packages/greenlet/slp_platformselect.h b/.venv/lib/python3.12/site-packages/greenlet/slp_platformselect.h new file mode 100644 index 00000000..49456482 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/slp_platformselect.h @@ -0,0 +1,75 @@ +/* + * Platform Selection for Stackless Python + */ +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) +# include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ +#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) +# include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ +#elif defined(MS_WIN64) && defined(_M_ARM64) +# include "platform/switch_arm64_msvc.h" /* MS Visual Studio on ARM64 */ +#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) +# include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ +#elif defined(__GNUC__) && defined(__amd64__) +# include "platform/switch_amd64_unix.h" /* gcc on amd64 */ +#elif defined(__GNUC__) && defined(__i386__) +# include "platform/switch_x86_unix.h" /* gcc on X86 */ +#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) +# include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) +# include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ +#elif defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) +# include "platform/switch_ppc_macosx.h" /* Apple MacOS X on 32-bit PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) +# include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) +# include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc__) && defined(__NetBSD__) +#include "platform/switch_ppc_unix.h" /* gcc on NetBSD/powerpc */ +#elif defined(__GNUC__) && defined(sparc) +# include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ +#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) +# iiclude "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) +# include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) +# include "platform/switch_x86_unix.h" /* SunStudio on x86 */ +#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) +# include "platform/switch_s390_unix.h" /* Linux/S390 */ +#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) +# include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ +#elif defined(__GNUC__) && defined(__arm__) +# ifdef __APPLE__ +# include +# endif +# if TARGET_OS_IPHONE +# include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ +# else +# include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ +# endif +#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) +# include "platform/switch_mips_unix.h" /* Linux/MIPS */ +#elif defined(__GNUC__) && defined(__aarch64__) +# include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ +#elif defined(__GNUC__) && defined(__mc68000__) +# include "platform/switch_m68k_gcc.h" /* gcc on m68k */ +#elif defined(__GNUC__) && defined(__csky__) +#include "platform/switch_csky_gcc.h" /* gcc on csky */ +# elif defined(__GNUC__) && defined(__riscv) +# include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ +#elif defined(__GNUC__) && defined(__alpha__) +# include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ +#elif defined(MS_WIN32) && defined(__llvm__) && defined(__aarch64__) +# include "platform/switch_aarch64_gcc.h" /* LLVM Aarch64 ABI for Windows */ +#elif defined(__GNUC__) && defined(__loongarch64) && defined(__linux__) +# include "platform/switch_loongarch64_linux.h" /* LoongArch64 */ +#elif defined(__GNUC__) && defined(__sh__) +# include "platform/switch_sh_gcc.h" /* SuperH */ +#endif + +#ifdef __cplusplus +}; +#endif diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__init__.py b/.venv/lib/python3.12/site-packages/greenlet/tests/__init__.py new file mode 100644 index 00000000..e69392e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/__init__.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +""" +Tests for greenlet. + +""" +import os +import sys +import unittest + +from gc import collect +from gc import get_objects +from threading import active_count as active_thread_count +from time import sleep +from time import time + +import psutil + +from greenlet import greenlet as RawGreenlet +from greenlet import getcurrent + +from greenlet._greenlet import get_pending_cleanup_count +from greenlet._greenlet import get_total_main_greenlets + +from . import leakcheck + +PY312 = sys.version_info[:2] >= (3, 12) +PY313 = sys.version_info[:2] >= (3, 13) + +WIN = sys.platform.startswith("win") +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') + +class TestCaseMetaClass(type): + # wrap each test method with + # a) leak checks + def __new__(cls, classname, bases, classDict): + # pylint and pep8 fight over what this should be called (mcs or cls). + # pylint gets it right, but we can't scope disable pep8, so we go with + # its convention. + # pylint: disable=bad-mcs-classmethod-argument + check_totalrefcount = True + + # Python 3: must copy, we mutate the classDict. Interestingly enough, + # it doesn't actually error out, but under 3.6 we wind up wrapping + # and re-wrapping the same items over and over and over. + for key, value in list(classDict.items()): + if key.startswith('test') and callable(value): + classDict.pop(key) + if check_totalrefcount: + value = leakcheck.wrap_refcount(value) + classDict[key] = value + return type.__new__(cls, classname, bases, classDict) + + +class TestCase(TestCaseMetaClass( + "NewBase", + (unittest.TestCase,), + {})): + + cleanup_attempt_sleep_duration = 0.001 + cleanup_max_sleep_seconds = 1 + + def wait_for_pending_cleanups(self, + initial_active_threads=None, + initial_main_greenlets=None): + initial_active_threads = initial_active_threads or self.threads_before_test + initial_main_greenlets = initial_main_greenlets or self.main_greenlets_before_test + sleep_time = self.cleanup_attempt_sleep_duration + # NOTE: This is racy! A Python-level thread object may be dead + # and gone, but the C thread may not yet have fired its + # destructors and added to the queue. There's no particular + # way to know that's about to happen. We try to watch the + # Python threads to make sure they, at least, have gone away. + # Counting the main greenlets, which we can easily do deterministically, + # also helps. + + # Always sleep at least once to let other threads run + sleep(sleep_time) + quit_after = time() + self.cleanup_max_sleep_seconds + # TODO: We could add an API that calls us back when a particular main greenlet is deleted? + # It would have to drop the GIL + while ( + get_pending_cleanup_count() + or active_thread_count() > initial_active_threads + or (not self.expect_greenlet_leak + and get_total_main_greenlets() > initial_main_greenlets)): + sleep(sleep_time) + if time() > quit_after: + print("Time limit exceeded.") + print("Threads: Waiting for only", initial_active_threads, + "-->", active_thread_count()) + print("MGlets : Waiting for only", initial_main_greenlets, + "-->", get_total_main_greenlets()) + break + collect() + + def count_objects(self, kind=list, exact_kind=True): + # pylint:disable=unidiomatic-typecheck + # Collect the garbage. + for _ in range(3): + collect() + if exact_kind: + return sum( + 1 + for x in get_objects() + if type(x) is kind + ) + # instances + return sum( + 1 + for x in get_objects() + if isinstance(x, kind) + ) + + greenlets_before_test = 0 + threads_before_test = 0 + main_greenlets_before_test = 0 + expect_greenlet_leak = False + + def count_greenlets(self): + """ + Find all the greenlets and subclasses tracked by the GC. + """ + return self.count_objects(RawGreenlet, False) + + def setUp(self): + # Ensure the main greenlet exists, otherwise the first test + # gets a false positive leak + super().setUp() + getcurrent() + self.threads_before_test = active_thread_count() + self.main_greenlets_before_test = get_total_main_greenlets() + self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) + self.greenlets_before_test = self.count_greenlets() + + def tearDown(self): + if getattr(self, 'skipTearDown', False): + return + + self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) + super().tearDown() + + def get_expected_returncodes_for_aborted_process(self): + import signal + # The child should be aborted in an unusual way. On POSIX + # platforms, this is done with abort() and signal.SIGABRT, + # which is reflected in a negative return value; however, on + # Windows, even though we observe the child print "Fatal + # Python error: Aborted" and in older versions of the C + # runtime "This application has requested the Runtime to + # terminate it in an unusual way," it always has an exit code + # of 3. This is interesting because 3 is the error code for + # ERROR_PATH_NOT_FOUND; BUT: the C runtime abort() function + # also uses this code. + # + # If we link to the static C library on Windows, the error + # code changes to '0xc0000409' (hex(3221226505)), which + # apparently is STATUS_STACK_BUFFER_OVERRUN; but "What this + # means is that nowadays when you get a + # STATUS_STACK_BUFFER_OVERRUN, it doesn’t actually mean that + # there is a stack buffer overrun. It just means that the + # application decided to terminate itself with great haste." + # + # + # On windows, we've also seen '0xc0000005' (hex(3221225477)). + # That's "Access Violation" + # + # See + # https://devblogs.microsoft.com/oldnewthing/20110519-00/?p=10623 + # and + # https://docs.microsoft.com/en-us/previous-versions/k089yyh0(v=vs.140)?redirectedfrom=MSDN + # and + # https://devblogs.microsoft.com/oldnewthing/20190108-00/?p=100655 + expected_exit = ( + -signal.SIGABRT, + # But beginning on Python 3.11, the faulthandler + # that prints the C backtraces sometimes segfaults after + # reporting the exception but before printing the stack. + # This has only been seen on linux/gcc. + -signal.SIGSEGV, + ) if not WIN else ( + 3, + 0xc0000409, + 0xc0000005, + ) + return expected_exit + + def get_process_uss(self): + """ + Return the current process's USS in bytes. + + uss is available on Linux, macOS, Windows. Also known as + "Unique Set Size", this is the memory which is unique to a + process and which would be freed if the process was terminated + right now. + + If this is not supported by ``psutil``, this raises the + :exc:`unittest.SkipTest` exception. + """ + try: + return psutil.Process().memory_full_info().uss + except AttributeError as e: + raise unittest.SkipTest("uss not supported") from e + + def run_script(self, script_name, show_output=True): + import subprocess + script = os.path.join( + os.path.dirname(__file__), + script_name, + ) + + try: + return subprocess.check_output([sys.executable, script], + encoding='utf-8', + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as ex: + if show_output: + print('-----') + print('Failed to run script', script) + print('~~~~~') + print(ex.output) + print('------') + raise + + + def assertScriptRaises(self, script_name, exitcodes=None): + import subprocess + with self.assertRaises(subprocess.CalledProcessError) as exc: + output = self.run_script(script_name, show_output=False) + __traceback_info__ = output + # We're going to fail the assertion if we get here, at least + # preserve the output in the traceback. + + if exitcodes is None: + exitcodes = self.get_expected_returncodes_for_aborted_process() + self.assertIn(exc.exception.returncode, exitcodes) + return exc.exception diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d5540fd6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc new file mode 100644 index 00000000..4510fdb1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc new file mode 100644 index 00000000..6d56f6fb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc new file mode 100644 index 00000000..763160cd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc new file mode 100644 index 00000000..c4f5e211 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc new file mode 100644 index 00000000..c4176803 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc new file mode 100644 index 00000000..03f8ce61 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc new file mode 100644 index 00000000..381e2d60 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc new file mode 100644 index 00000000..776acf80 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc new file mode 100644 index 00000000..e35d2735 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc new file mode 100644 index 00000000..4e3b972d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc new file mode 100644 index 00000000..8f5ce28d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc new file mode 100644 index 00000000..9a487c7a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc new file mode 100644 index 00000000..a4655c53 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc new file mode 100644 index 00000000..c1394cc0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc new file mode 100644 index 00000000..ca27815d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc new file mode 100644 index 00000000..a7fec568 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc new file mode 100644 index 00000000..c4095da6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc new file mode 100644 index 00000000..779c264a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc new file mode 100644 index 00000000..a8059221 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc new file mode 100644 index 00000000..b08eaddb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc new file mode 100644 index 00000000..622a29dd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc new file mode 100644 index 00000000..f72abb28 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.c b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.c new file mode 100644 index 00000000..05e81c03 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.c @@ -0,0 +1,231 @@ +/* This is a set of functions used by test_extension_interface.py to test the + * Greenlet C API. + */ + +#include "../greenlet.h" + +#ifndef Py_RETURN_NONE +# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None +#endif + +#define TEST_MODULE_NAME "_test_extension" + +static PyObject* +test_switch(PyObject* self, PyObject* greenlet) +{ + PyObject* result = NULL; + + if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_INCREF(result); + return result; +} + +static PyObject* +test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* g = NULL; + PyObject* result = NULL; + + PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); + + if (g == NULL || !PyGreenlet_Check(g)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch(g, NULL, kwargs); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_XINCREF(result); + return result; +} + +static PyObject* +test_getcurrent(PyObject* self) +{ + PyGreenlet* g = PyGreenlet_GetCurrent(); + if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { + PyErr_SetString(PyExc_AssertionError, + "getcurrent() returned an invalid greenlet"); + Py_XDECREF(g); + return NULL; + } + Py_DECREF(g); + Py_RETURN_NONE; +} + +static PyObject* +test_setparent(PyObject* self, PyObject* arg) +{ + PyGreenlet* current; + PyGreenlet* greenlet = NULL; + + if (arg == NULL || !PyGreenlet_Check(arg)) { + PyErr_BadArgument(); + return NULL; + } + if ((current = PyGreenlet_GetCurrent()) == NULL) { + return NULL; + } + greenlet = (PyGreenlet*)arg; + if (PyGreenlet_SetParent(greenlet, current)) { + Py_DECREF(current); + return NULL; + } + Py_DECREF(current); + if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_new_greenlet(PyObject* self, PyObject* callable) +{ + PyObject* result = NULL; + PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); + + if (!greenlet) { + return NULL; + } + + result = PyGreenlet_Switch(greenlet, NULL, NULL); + Py_CLEAR(greenlet); + if (result == NULL) { + return NULL; + } + + Py_INCREF(result); + return result; +} + +static PyObject* +test_raise_dead_greenlet(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); + return NULL; +} + +static PyObject* +test_raise_greenlet_error(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); + return NULL; +} + +static PyObject* +test_throw(PyObject* self, PyGreenlet* g) +{ + const char msg[] = "take that sucka!"; + PyObject* msg_obj = Py_BuildValue("s", msg); + PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); + Py_DECREF(msg_obj); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_throw_exact(PyObject* self, PyObject* args) +{ + PyGreenlet* g = NULL; + PyObject* typ = NULL; + PyObject* val = NULL; + PyObject* tb = NULL; + + if (!PyArg_ParseTuple(args, "OOOO:throw", &g, &typ, &val, &tb)) { + return NULL; + } + + PyGreenlet_Throw(g, typ, val, tb); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_switch", + (PyCFunction)test_switch, + METH_O, + "Switch to the provided greenlet sending provided arguments, and \n" + "return the results."}, + {"test_switch_kwargs", + (PyCFunction)test_switch_kwargs, + METH_VARARGS | METH_KEYWORDS, + "Switch to the provided greenlet sending the provided keyword args."}, + {"test_getcurrent", + (PyCFunction)test_getcurrent, + METH_NOARGS, + "Test PyGreenlet_GetCurrent()"}, + {"test_setparent", + (PyCFunction)test_setparent, + METH_O, + "Se the parent of the provided greenlet and switch to it."}, + {"test_new_greenlet", + (PyCFunction)test_new_greenlet, + METH_O, + "Test PyGreenlet_New()"}, + {"test_raise_dead_greenlet", + (PyCFunction)test_raise_dead_greenlet, + METH_NOARGS, + "Just raise greenlet.GreenletExit"}, + {"test_raise_greenlet_error", + (PyCFunction)test_raise_greenlet_error, + METH_NOARGS, + "Just raise greenlet.error"}, + {"test_throw", + (PyCFunction)test_throw, + METH_O, + "Throw a ValueError at the provided greenlet"}, + {"test_throw_exact", + (PyCFunction)test_throw_exact, + METH_VARARGS, + "Throw exactly the arguments given at the provided greenlet"}, + {NULL, NULL, 0, NULL} +}; + + +#define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + TEST_MODULE_NAME, + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension(void) +{ + PyObject* module = NULL; + module = PyModule_Create(&moduledef); + + if (module == NULL) { + return NULL; + } + + PyGreenlet_Import(); + return module; +} diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..4413bee7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpp b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpp new file mode 100644 index 00000000..5cbe6a76 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpp @@ -0,0 +1,226 @@ +/* This is a set of functions used to test C++ exceptions are not + * broken during greenlet switches + */ + +#include "../greenlet.h" +#include "../greenlet_compiler_compat.hpp" +#include +#include + +struct exception_t { + int depth; + exception_t(int depth) : depth(depth) {} +}; + +/* Functions are called via pointers to prevent inlining */ +static void (*p_test_exception_throw_nonstd)(int depth); +static void (*p_test_exception_throw_std)(); +static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); + +static void +test_exception_throw_nonstd(int depth) +{ + throw exception_t(depth); +} + +static void +test_exception_throw_std() +{ + throw std::runtime_error("Thrown from an extension."); +} + +static PyObject* +test_exception_switch_recurse(int depth, int left) +{ + if (left > 0) { + return p_test_exception_switch_recurse(depth, left - 1); + } + + PyObject* result = NULL; + PyGreenlet* self = PyGreenlet_GetCurrent(); + if (self == NULL) + return NULL; + + try { + if (PyGreenlet_Switch(PyGreenlet_GET_PARENT(self), NULL, NULL) == NULL) { + Py_DECREF(self); + return NULL; + } + p_test_exception_throw_nonstd(depth); + PyErr_SetString(PyExc_RuntimeError, + "throwing C++ exception didn't work"); + } + catch (const exception_t& e) { + if (e.depth != depth) + PyErr_SetString(PyExc_AssertionError, "depth mismatch"); + else + result = PyLong_FromLong(depth); + } + catch (...) { + PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); + } + + Py_DECREF(self); + return result; +} + +/* test_exception_switch(int depth) + * - recurses depth times + * - switches to parent inside try/catch block + * - throws an exception that (expected to be caught in the same function) + * - verifies depth matches (exceptions shouldn't be caught in other greenlets) + */ +static PyObject* +test_exception_switch(PyObject* UNUSED(self), PyObject* args) +{ + int depth; + if (!PyArg_ParseTuple(args, "i", &depth)) + return NULL; + return p_test_exception_switch_recurse(depth, depth); +} + + +static PyObject* +py_test_exception_throw_nonstd(PyObject* self, PyObject* args) +{ + if (!PyArg_ParseTuple(args, "")) + return NULL; + p_test_exception_throw_nonstd(0); + PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); + return NULL; +} + +static PyObject* +py_test_exception_throw_std(PyObject* self, PyObject* args) +{ + if (!PyArg_ParseTuple(args, "")) + return NULL; + p_test_exception_throw_std(); + PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); + return NULL; +} + +static PyObject* +py_test_call(PyObject* self, PyObject* arg) +{ + PyObject* noargs = PyTuple_New(0); + PyObject* ret = PyObject_Call(arg, noargs, nullptr); + Py_DECREF(noargs); + return ret; +} + + + +/* test_exception_switch_and_do_in_g2(g2func) + * - creates new greenlet g2 to run g2func + * - switches to g2 inside try/catch block + * - verifies that no exception has been caught + * + * it is used together with test_exception_throw to verify that unhandled + * exceptions thrown in one greenlet do not propagate to other greenlet nor + * segfault the process. + */ +static PyObject* +test_exception_switch_and_do_in_g2(PyObject* self, PyObject* args) +{ + PyObject* g2func = NULL; + PyObject* result = NULL; + + if (!PyArg_ParseTuple(args, "O", &g2func)) + return NULL; + PyGreenlet* g2 = PyGreenlet_New(g2func, NULL); + if (!g2) { + return NULL; + } + + try { + result = PyGreenlet_Switch(g2, NULL, NULL); + if (!result) { + return NULL; + } + } + catch (const exception_t& e) { + /* if we are here the memory can be already corrupted and the program + * might crash before below py-level exception might become printed. + * -> print something to stderr to make it clear that we had entered + * this catch block. + * See comments in inner_bootstrap() + */ +#if defined(WIN32) || defined(_WIN32) + fprintf(stderr, "C++ exception unexpectedly caught in g1\n"); + PyErr_SetString(PyExc_AssertionError, "C++ exception unexpectedly caught in g1"); + Py_XDECREF(result); + return NULL; +#else + throw; +#endif + } + + Py_XDECREF(result); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_exception_switch", + (PyCFunction)&test_exception_switch, + METH_VARARGS, + "Switches to parent twice, to test exception handling and greenlet " + "switching."}, + {"test_exception_switch_and_do_in_g2", + (PyCFunction)&test_exception_switch_and_do_in_g2, + METH_VARARGS, + "Creates new greenlet g2 to run g2func and switches to it inside try/catch " + "block. Used together with test_exception_throw to verify that unhandled " + "C++ exceptions thrown in a greenlet doe not corrupt memory."}, + {"test_exception_throw_nonstd", + (PyCFunction)&py_test_exception_throw_nonstd, + METH_VARARGS, + "Throws non-standard C++ exception. Calling this function directly should abort the process." + }, + {"test_exception_throw_std", + (PyCFunction)&py_test_exception_throw_std, + METH_VARARGS, + "Throws standard C++ exception. Calling this function directly should abort the process." + }, + {"test_call", + (PyCFunction)&py_test_call, + METH_O, + "Call the given callable. Unlike calling it directly, this creates a " + "new C-level stack frame, which may be helpful in testing." + }, + {NULL, NULL, 0, NULL} +}; + + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + "greenlet.tests._test_extension_cpp", + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension_cpp(void) +{ + PyObject* module = NULL; + + module = PyModule_Create(&moduledef); + + if (module == NULL) { + return NULL; + } + + PyGreenlet_Import(); + if (_PyGreenlet_API == NULL) { + return NULL; + } + + p_test_exception_throw_nonstd = test_exception_throw_nonstd; + p_test_exception_throw_std = test_exception_throw_std; + p_test_exception_switch_recurse = test_exception_switch_recurse; + + return module; +} diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..e7b30ca4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_clearing_run_switches.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_clearing_run_switches.py new file mode 100644 index 00000000..6dd1492f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_clearing_run_switches.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +If we have a run callable passed to the constructor or set as an +attribute, but we don't actually use that (because ``__getattribute__`` +or the like interferes), then when we clear callable before beginning +to run, there's an opportunity for Python code to run. + +""" +import greenlet + +g = None +main = greenlet.getcurrent() + +results = [] + +class RunCallable: + + def __del__(self): + results.append(('RunCallable', '__del__')) + main.switch('from RunCallable') + + +class G(greenlet.greenlet): + + def __getattribute__(self, name): + if name == 'run': + results.append(('G.__getattribute__', 'run')) + return run_func + return object.__getattribute__(self, name) + + +def run_func(): + results.append(('run_func', 'enter')) + + +g = G(RunCallable()) +# Try to start G. It will get to the point where it deletes +# its run callable C++ variable in inner_bootstrap. That triggers +# the __del__ method, which switches back to main before g +# actually even starts running. +x = g.switch() +results.append(('main: g.switch()', x)) +# In the C++ code, this results in g->g_switch() appearing to return, even though +# it has yet to run. +print('In main with', x, flush=True) +g.switch() +print('RESULTS', results) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_cpp_exception.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_cpp_exception.py new file mode 100644 index 00000000..fa4dc2eb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_cpp_exception.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Helper for testing a C++ exception throw aborts the process. + +Takes one argument, the name of the function in :mod:`_test_extension_cpp` to call. +""" +import sys +import greenlet +from greenlet.tests import _test_extension_cpp +print('fail_cpp_exception is running') + +def run_unhandled_exception_in_greenlet_aborts(): + def _(): + _test_extension_cpp.test_exception_switch_and_do_in_g2( + _test_extension_cpp.test_exception_throw_nonstd + ) + g1 = greenlet.greenlet(_) + g1.switch() + + +func_name = sys.argv[1] +try: + func = getattr(_test_extension_cpp, func_name) +except AttributeError: + if func_name == run_unhandled_exception_in_greenlet_aborts.__name__: + func = run_unhandled_exception_in_greenlet_aborts + elif func_name == 'run_as_greenlet_target': + g = greenlet.greenlet(_test_extension_cpp.test_exception_throw_std) + func = g.switch + else: + raise +print('raising', func, flush=True) +func() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_initialstub_already_started.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_initialstub_already_started.py new file mode 100644 index 00000000..c1a44efd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_initialstub_already_started.py @@ -0,0 +1,78 @@ +""" +Testing initialstub throwing an already started exception. +""" + +import greenlet + +a = None +b = None +c = None +main = greenlet.getcurrent() + +# If we switch into a dead greenlet, +# we go looking for its parents. +# if a parent is not yet started, we start it. + +results = [] + +def a_run(*args): + #results.append('A') + results.append(('Begin A', args)) + + +def c_run(): + results.append('Begin C') + b.switch('From C') + results.append('C done') + +class A(greenlet.greenlet): pass + +class B(greenlet.greenlet): + doing_it = False + def __getattribute__(self, name): + if name == 'run' and not self.doing_it: + assert greenlet.getcurrent() is c + self.doing_it = True + results.append('Switch to b from B.__getattribute__ in ' + + type(greenlet.getcurrent()).__name__) + b.switch() + results.append('B.__getattribute__ back from main in ' + + type(greenlet.getcurrent()).__name__) + if name == 'run': + name = '_B_run' + return object.__getattribute__(self, name) + + def _B_run(self, *arg): + results.append(('Begin B', arg)) + results.append('_B_run switching to main') + main.switch('From B') + +class C(greenlet.greenlet): + pass +a = A(a_run) +b = B(parent=a) +c = C(c_run, b) + +# Start a child; while running, it will start B, +# but starting B will ALSO start B. +result = c.switch() +results.append(('main from c', result)) + +# Switch back to C, which was in the middle of switching +# already. This will throw the ``GreenletStartedWhileInPython`` +# exception, which results in parent A getting started (B is finished) +c.switch() + +results.append(('A dead?', a.dead, 'B dead?', b.dead, 'C dead?', c.dead)) + +# A and B should both be dead now. +assert a.dead +assert b.dead +assert not c.dead + +result = c.switch() +results.append(('main from c.2', result)) +# Now C is dead +assert c.dead + +print("RESULTS:", results) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_slp_switch.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_slp_switch.py new file mode 100644 index 00000000..09905269 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_slp_switch.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +A test helper for seeing what happens when slp_switch() +fails. +""" +# pragma: no cover + +import greenlet + + +print('fail_slp_switch is running', flush=True) + +runs = [] +def func(): + runs.append(1) + greenlet.getcurrent().parent.switch() + runs.append(2) + greenlet.getcurrent().parent.switch() + runs.append(3) + +g = greenlet._greenlet.UnswitchableGreenlet(func) +g.switch() +assert runs == [1] +g.switch() +assert runs == [1, 2] +g.force_slp_switch_error = True + +# This should crash. +g.switch() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets.py new file mode 100644 index 00000000..e151b19a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets.py @@ -0,0 +1,44 @@ +""" +Uses a trace function to switch greenlets at unexpected times. + +In the trace function, we switch from the current greenlet to another +greenlet, which switches +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = False + +def tracefunc(*args): + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch() + print('\tLEAVE TRACE', *args) + +def g1_run(): + print('In g1_run') + global switch_to_g2 + switch_to_g2 = True + from_parent = greenlet.getcurrent().parent.switch() + print('Return to g1_run') + print('From parent', from_parent) + +def g2_run(): + #g1.switch() + greenlet.getcurrent().parent.switch() + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +# This switch didn't actually finish! +# And if it did, it would raise TypeError +# because g1_run() doesn't take any arguments. +g1.switch(1) +print('Back in main') +g1.switch(2) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets2.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets2.py new file mode 100644 index 00000000..1f6b66bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets2.py @@ -0,0 +1,55 @@ +""" +Like fail_switch_three_greenlets, but the call into g1_run would actually be +valid. +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = True + +results = [] + +def tracefunc(*args): + results.append(('trace', args[0])) + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch('g2 from tracefunc') + print('\tLEAVE TRACE', *args) + +def g1_run(arg): + results.append(('g1 arg', arg)) + print('In g1_run') + from_parent = greenlet.getcurrent().parent.switch('from g1_run') + results.append(('g1 from parent', from_parent)) + return 'g1 done' + +def g2_run(arg): + #g1.switch() + results.append(('g2 arg', arg)) + parent = greenlet.getcurrent().parent.switch('from g2_run') + global switch_to_g2 + switch_to_g2 = False + results.append(('g2 from parent', parent)) + return 'g2 done' + + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +x = g1.switch('g1 from main') +results.append(('main g1', x)) +print('Back in main', x) +x = g1.switch('g2 from main') +results.append(('main g2', x)) +print('back in amain again', x) +x = g1.switch('g1 from main 2') +results.append(('main g1.2', x)) +x = g2.switch() +results.append(('main g2.2', x)) +print("RESULTS:", results) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_two_greenlets.py b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_two_greenlets.py new file mode 100644 index 00000000..3e52345a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_two_greenlets.py @@ -0,0 +1,41 @@ +""" +Uses a trace function to switch greenlets at unexpected times. + +In the trace function, we switch from the current greenlet to another +greenlet, which switches +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = False + +def tracefunc(*args): + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch() + print('\tLEAVE TRACE', *args) + +def g1_run(): + print('In g1_run') + global switch_to_g2 + switch_to_g2 = True + greenlet.getcurrent().parent.switch() + print('Return to g1_run') + print('Falling off end of g1_run') + +def g2_run(): + g1.switch() + print('Falling off end of g2') + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +g1.switch() +print('Falling off end of main') +g2.switch() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/leakcheck.py b/.venv/lib/python3.12/site-packages/greenlet/tests/leakcheck.py new file mode 100644 index 00000000..a5152fb2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/leakcheck.py @@ -0,0 +1,319 @@ +# Copyright (c) 2018 gevent community +# Copyright (c) 2021 greenlet community +# +# This was originally part of gevent's test suite. The main author +# (Jason Madden) vendored a copy of it into greenlet. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import print_function + +import os +import sys +import gc + +from functools import wraps +import unittest + + +import objgraph + +# graphviz 0.18 (Nov 7 2021), available only on Python 3.6 and newer, +# has added type hints (sigh). It wants to use ``typing.Literal`` for +# some stuff, but that's only available on Python 3.9+. If that's not +# found, it creates a ``unittest.mock.MagicMock`` object and annotates +# with that. These are GC'able objects, and doing almost *anything* +# with them results in an explosion of objects. For example, trying to +# compare them for equality creates new objects. This causes our +# leakchecks to fail, with reports like: +# +# greenlet.tests.leakcheck.LeakCheckError: refcount increased by [337, 1333, 343, 430, 530, 643, 769] +# _Call 1820 +546 +# dict 4094 +76 +# MagicProxy 585 +73 +# tuple 2693 +66 +# _CallList 24 +3 +# weakref 1441 +1 +# function 5996 +1 +# type 736 +1 +# cell 592 +1 +# MagicMock 8 +1 +# +# To avoid this, we *could* filter this type of object out early. In +# principle it could leak, but we don't use mocks in greenlet, so it +# doesn't leak from us. However, a further issue is that ``MagicMock`` +# objects have subobjects that are also GC'able, like ``_Call``, and +# those create new mocks of their own too. So we'd have to filter them +# as well, and they're not public. That's OK, we can workaround the +# problem by being very careful to never compare by equality or other +# user-defined operators, only using object identity or other builtin +# functions. + +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') +SKIP_LEAKCHECKS = RUNNING_ON_MANYLINUX or os.environ.get('GREENLET_SKIP_LEAKCHECKS') +SKIP_FAILING_LEAKCHECKS = os.environ.get('GREENLET_SKIP_FAILING_LEAKCHECKS') +ONLY_FAILING_LEAKCHECKS = os.environ.get('GREENLET_ONLY_FAILING_LEAKCHECKS') + +def ignores_leakcheck(func): + """ + Ignore the given object during leakchecks. + + Can be applied to a method, in which case the method will run, but + will not be subject to leak checks. + + If applied to a class, the entire class will be skipped during leakchecks. This + is intended to be used for classes that are very slow and cause problems such as + test timeouts; typically it will be used for classes that are subclasses of a base + class and specify variants of behaviour (such as pool sizes). + """ + func.ignore_leakcheck = True + return func + +def fails_leakcheck(func): + """ + Mark that the function is known to leak. + """ + func.fails_leakcheck = True + if SKIP_FAILING_LEAKCHECKS: + func = unittest.skip("Skipping known failures")(func) + return func + +class LeakCheckError(AssertionError): + pass + +if hasattr(sys, 'getobjects'): + # In a Python build with ``--with-trace-refs``, make objgraph + # trace *all* the objects, not just those that are tracked by the + # GC + class _MockGC(object): + def get_objects(self): + return sys.getobjects(0) # pylint:disable=no-member + def __getattr__(self, name): + return getattr(gc, name) + objgraph.gc = _MockGC() + fails_strict_leakcheck = fails_leakcheck +else: + def fails_strict_leakcheck(func): + """ + Decorator for a function that is known to fail when running + strict (``sys.getobjects()``) leakchecks. + + This type of leakcheck finds all objects, even those, such as + strings, which are not tracked by the garbage collector. + """ + return func + +class ignores_types_in_strict_leakcheck(object): + def __init__(self, types): + self.types = types + def __call__(self, func): + func.leakcheck_ignore_types = self.types + return func + +class _RefCountChecker(object): + + # Some builtin things that we ignore + # XXX: Those things were ignored by gevent, but they're important here, + # presumably. + IGNORED_TYPES = () #(tuple, dict, types.FrameType, types.TracebackType) + + def __init__(self, testcase, function): + self.testcase = testcase + self.function = function + self.deltas = [] + self.peak_stats = {} + self.ignored_types = () + + # The very first time we are called, we have already been + # self.setUp() by the test runner, so we don't need to do it again. + self.needs_setUp = False + + def _include_object_p(self, obj): + # pylint:disable=too-many-return-statements + # + # See the comment block at the top. We must be careful to + # avoid invoking user-defined operations. + if obj is self: + return False + kind = type(obj) + # ``self._include_object_p == obj`` returns NotImplemented + # for non-function objects, which causes the interpreter + # to try to reverse the order of arguments...which leads + # to the explosion of mock objects. We don't want that, so we implement + # the check manually. + if kind == type(self._include_object_p): + try: + # pylint:disable=not-callable + exact_method_equals = self._include_object_p.__eq__(obj) + except AttributeError: + # Python 2.7 methods may only have __cmp__, and that raises a + # TypeError for non-method arguments + # pylint:disable=no-member + exact_method_equals = self._include_object_p.__cmp__(obj) == 0 + + if exact_method_equals is not NotImplemented and exact_method_equals: + return False + + # Similarly, we need to check identity in our __dict__ to avoid mock explosions. + for x in self.__dict__.values(): + if obj is x: + return False + + + if kind in self.ignored_types or kind in self.IGNORED_TYPES: + return False + + return True + + def _growth(self): + return objgraph.growth(limit=None, peak_stats=self.peak_stats, + filter=self._include_object_p) + + def _report_diff(self, growth): + if not growth: + return "" + + lines = [] + width = max(len(name) for name, _, _ in growth) + for name, count, delta in growth: + lines.append('%-*s%9d %+9d' % (width, name, count, delta)) + + diff = '\n'.join(lines) + return diff + + + def _run_test(self, args, kwargs): + gc_enabled = gc.isenabled() + gc.disable() + + if self.needs_setUp: + self.testcase.setUp() + self.testcase.skipTearDown = False + try: + self.function(self.testcase, *args, **kwargs) + finally: + self.testcase.tearDown() + self.testcase.doCleanups() + self.testcase.skipTearDown = True + self.needs_setUp = True + if gc_enabled: + gc.enable() + + def _growth_after(self): + # Grab post snapshot + # pylint:disable=no-member + if 'urlparse' in sys.modules: + sys.modules['urlparse'].clear_cache() + if 'urllib.parse' in sys.modules: + sys.modules['urllib.parse'].clear_cache() + + return self._growth() + + def _check_deltas(self, growth): + # Return false when we have decided there is no leak, + # true if we should keep looping, raises an assertion + # if we have decided there is a leak. + + deltas = self.deltas + if not deltas: + # We haven't run yet, no data, keep looping + return True + + if gc.garbage: + raise LeakCheckError("Generated uncollectable garbage %r" % (gc.garbage,)) + + + # the following configurations are classified as "no leak" + # [0, 0] + # [x, 0, 0] + # [... a, b, c, d] where a+b+c+d = 0 + # + # the following configurations are classified as "leak" + # [... z, z, z] where z > 0 + + if deltas[-2:] == [0, 0] and len(deltas) in (2, 3): + return False + + if deltas[-3:] == [0, 0, 0]: + return False + + if len(deltas) >= 4 and sum(deltas[-4:]) == 0: + return False + + if len(deltas) >= 3 and deltas[-1] > 0 and deltas[-1] == deltas[-2] and deltas[-2] == deltas[-3]: + diff = self._report_diff(growth) + raise LeakCheckError('refcount increased by %r\n%s' % (deltas, diff)) + + # OK, we don't know for sure yet. Let's search for more + if sum(deltas[-3:]) <= 0 or sum(deltas[-4:]) <= 0 or deltas[-4:].count(0) >= 2: + # this is suspicious, so give a few more runs + limit = 11 + else: + limit = 7 + if len(deltas) >= limit: + raise LeakCheckError('refcount increased by %r\n%s' + % (deltas, + self._report_diff(growth))) + + # We couldn't decide yet, keep going + return True + + def __call__(self, args, kwargs): + for _ in range(3): + gc.collect() + + expect_failure = getattr(self.function, 'fails_leakcheck', False) + if expect_failure: + self.testcase.expect_greenlet_leak = True + self.ignored_types = getattr(self.function, "leakcheck_ignore_types", ()) + + # Capture state before; the incremental will be + # updated by each call to _growth_after + growth = self._growth() + + try: + while self._check_deltas(growth): + self._run_test(args, kwargs) + + growth = self._growth_after() + + self.deltas.append(sum((stat[2] for stat in growth))) + except LeakCheckError: + if not expect_failure: + raise + else: + if expect_failure: + raise LeakCheckError("Expected %s to leak but it did not." % (self.function,)) + +def wrap_refcount(method): + if getattr(method, 'ignore_leakcheck', False) or SKIP_LEAKCHECKS: + return method + + @wraps(method) + def wrapper(self, *args, **kwargs): # pylint:disable=too-many-branches + if getattr(self, 'ignore_leakcheck', False): + raise unittest.SkipTest("This class ignored during leakchecks") + if ONLY_FAILING_LEAKCHECKS and not getattr(method, 'fails_leakcheck', False): + raise unittest.SkipTest("Only running tests that fail leakchecks.") + return _RefCountChecker(self, method)(args, kwargs) + + return wrapper diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_contextvars.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_contextvars.py new file mode 100644 index 00000000..9a16f671 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_contextvars.py @@ -0,0 +1,310 @@ +from __future__ import print_function + +import gc +import sys +import unittest + +from functools import partial +from unittest import skipUnless +from unittest import skipIf + +from greenlet import greenlet +from greenlet import getcurrent +from . import TestCase + + +try: + from contextvars import Context + from contextvars import ContextVar + from contextvars import copy_context + # From the documentation: + # + # Important: Context Variables should be created at the top module + # level and never in closures. Context objects hold strong + # references to context variables which prevents context variables + # from being properly garbage collected. + ID_VAR = ContextVar("id", default=None) + VAR_VAR = ContextVar("var", default=None) + ContextVar = None +except ImportError: + Context = ContextVar = copy_context = None + +# We don't support testing if greenlet's built-in context var support is disabled. +@skipUnless(Context is not None, "ContextVar not supported") +class ContextVarsTests(TestCase): + def _new_ctx_run(self, *args, **kwargs): + return copy_context().run(*args, **kwargs) + + def _increment(self, greenlet_id, callback, counts, expect): + ctx_var = ID_VAR + if expect is None: + self.assertIsNone(ctx_var.get()) + else: + self.assertEqual(ctx_var.get(), expect) + ctx_var.set(greenlet_id) + for _ in range(2): + counts[ctx_var.get()] += 1 + callback() + + def _test_context(self, propagate_by): + # pylint:disable=too-many-branches + ID_VAR.set(0) + + callback = getcurrent().switch + counts = dict((i, 0) for i in range(5)) + + lets = [ + greenlet(partial( + partial( + copy_context().run, + self._increment + ) if propagate_by == "run" else self._increment, + greenlet_id=i, + callback=callback, + counts=counts, + expect=( + i - 1 if propagate_by == "share" else + 0 if propagate_by in ("set", "run") else None + ) + )) + for i in range(1, 5) + ] + + for let in lets: + if propagate_by == "set": + let.gr_context = copy_context() + elif propagate_by == "share": + let.gr_context = getcurrent().gr_context + + for i in range(2): + counts[ID_VAR.get()] += 1 + for let in lets: + let.switch() + + if propagate_by == "run": + # Must leave each context.run() in reverse order of entry + for let in reversed(lets): + let.switch() + else: + # No context.run(), so fine to exit in any order. + for let in lets: + let.switch() + + for let in lets: + self.assertTrue(let.dead) + # When using run(), we leave the run() as the greenlet dies, + # and there's no context "underneath". When not using run(), + # gr_context still reflects the context the greenlet was + # running in. + if propagate_by == 'run': + self.assertIsNone(let.gr_context) + else: + self.assertIsNotNone(let.gr_context) + + + if propagate_by == "share": + self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) + else: + self.assertEqual(set(counts.values()), set([2])) + + def test_context_propagated_by_context_run(self): + self._new_ctx_run(self._test_context, "run") + + def test_context_propagated_by_setting_attribute(self): + self._new_ctx_run(self._test_context, "set") + + def test_context_not_propagated(self): + self._new_ctx_run(self._test_context, None) + + def test_context_shared(self): + self._new_ctx_run(self._test_context, "share") + + def test_break_ctxvars(self): + let1 = greenlet(copy_context().run) + let2 = greenlet(copy_context().run) + let1.switch(getcurrent().switch) + let2.switch(getcurrent().switch) + # Since let2 entered the current context and let1 exits its own, the + # interpreter emits: + # RuntimeError: cannot exit context: thread state references a different context object + let1.switch() + + def test_not_broken_if_using_attribute_instead_of_context_run(self): + let1 = greenlet(getcurrent().switch) + let2 = greenlet(getcurrent().switch) + let1.gr_context = copy_context() + let2.gr_context = copy_context() + let1.switch() + let2.switch() + let1.switch() + let2.switch() + + def test_context_assignment_while_running(self): + # pylint:disable=too-many-statements + ID_VAR.set(None) + + def target(): + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + + # Context is created on first use + ID_VAR.set(1) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(ID_VAR.get(), 1) + self.assertEqual(gr.gr_context[ID_VAR], 1) + + # Clearing the context makes it get re-created as another + # empty context when next used + old_context = gr.gr_context + gr.gr_context = None # assign None while running + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + ID_VAR.set(2) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(ID_VAR.get(), 2) + self.assertEqual(gr.gr_context[ID_VAR], 2) + + new_context = gr.gr_context + getcurrent().parent.switch((old_context, new_context)) + # parent switches us back to old_context + + self.assertEqual(ID_VAR.get(), 1) + gr.gr_context = new_context # assign non-None while running + self.assertEqual(ID_VAR.get(), 2) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + gr.gr_context = old_context + self.assertEqual(ID_VAR.get(), 1) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + + gr = greenlet(target) + + with self.assertRaisesRegex(AttributeError, "can't delete context attribute"): + del gr.gr_context + + self.assertIsNone(gr.gr_context) + old_context, new_context = gr.switch() + self.assertIs(new_context, gr.gr_context) + self.assertEqual(old_context[ID_VAR], 1) + self.assertEqual(new_context[ID_VAR], 2) + self.assertEqual(new_context.run(ID_VAR.get), 2) + gr.gr_context = old_context # assign non-None while suspended + gr.switch() + self.assertIs(gr.gr_context, new_context) + gr.gr_context = None # assign None while suspended + gr.switch() + self.assertIs(gr.gr_context, old_context) + gr.gr_context = None + gr.switch() + self.assertIsNone(gr.gr_context) + + # Make sure there are no reference leaks + gr = None + gc.collect() + self.assertEqual(sys.getrefcount(old_context), 2) + self.assertEqual(sys.getrefcount(new_context), 2) + + def test_context_assignment_different_thread(self): + import threading + VAR_VAR.set(None) + ctx = Context() + + is_running = threading.Event() + should_suspend = threading.Event() + did_suspend = threading.Event() + should_exit = threading.Event() + holder = [] + + def greenlet_in_thread_fn(): + VAR_VAR.set(1) + is_running.set() + should_suspend.wait(10) + VAR_VAR.set(2) + getcurrent().parent.switch() + holder.append(VAR_VAR.get()) + + def thread_fn(): + gr = greenlet(greenlet_in_thread_fn) + gr.gr_context = ctx + holder.append(gr) + gr.switch() + did_suspend.set() + should_exit.wait(10) + gr.switch() + del gr + greenlet() # trigger cleanup + + thread = threading.Thread(target=thread_fn, daemon=True) + thread.start() + is_running.wait(10) + gr = holder[0] + + # Can't access or modify context if the greenlet is running + # in a different thread + with self.assertRaisesRegex(ValueError, "running in a different"): + getattr(gr, 'gr_context') + with self.assertRaisesRegex(ValueError, "running in a different"): + gr.gr_context = None + + should_suspend.set() + did_suspend.wait(10) + + # OK to access and modify context if greenlet is suspended + self.assertIs(gr.gr_context, ctx) + self.assertEqual(gr.gr_context[VAR_VAR], 2) + gr.gr_context = None + + should_exit.set() + thread.join(10) + + self.assertEqual(holder, [gr, None]) + + # Context can still be accessed/modified when greenlet is dead: + self.assertIsNone(gr.gr_context) + gr.gr_context = ctx + self.assertIs(gr.gr_context, ctx) + + # Otherwise we leak greenlets on some platforms. + # XXX: Should be able to do this automatically + del holder[:] + gr = None + thread = None + + def test_context_assignment_wrong_type(self): + g = greenlet() + with self.assertRaisesRegex(TypeError, + "greenlet context must be a contextvars.Context or None"): + g.gr_context = self + + +@skipIf(Context is not None, "ContextVar supported") +class NoContextVarsTests(TestCase): + def test_contextvars_errors(self): + let1 = greenlet(getcurrent().switch) + self.assertFalse(hasattr(let1, 'gr_context')) + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + + with self.assertRaises(AttributeError): + let1.gr_context = None + + let1.switch() + + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + + with self.assertRaises(AttributeError): + let1.gr_context = None + + del let1 + + +if __name__ == '__main__': + unittest.main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_cpp.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_cpp.py new file mode 100644 index 00000000..2d0cc9c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_cpp.py @@ -0,0 +1,73 @@ +from __future__ import print_function +from __future__ import absolute_import + +import subprocess +import unittest + +import greenlet +from . import _test_extension_cpp +from . import TestCase +from . import WIN + +class CPPTests(TestCase): + def test_exception_switch(self): + greenlets = [] + for i in range(4): + g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) + g.switch(i) + greenlets.append(g) + for i, g in enumerate(greenlets): + self.assertEqual(g.switch(), i) + + def _do_test_unhandled_exception(self, target): + import os + import sys + script = os.path.join( + os.path.dirname(__file__), + 'fail_cpp_exception.py', + ) + args = [sys.executable, script, target.__name__ if not isinstance(target, str) else target] + __traceback_info__ = args + with self.assertRaises(subprocess.CalledProcessError) as exc: + subprocess.check_output( + args, + encoding='utf-8', + stderr=subprocess.STDOUT + ) + + ex = exc.exception + expected_exit = self.get_expected_returncodes_for_aborted_process() + self.assertIn(ex.returncode, expected_exit) + self.assertIn('fail_cpp_exception is running', ex.output) + return ex.output + + + def test_unhandled_nonstd_exception_aborts(self): + # verify that plain unhandled throw aborts + self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_nonstd) + + def test_unhandled_std_exception_aborts(self): + # verify that plain unhandled throw aborts + self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_std) + + @unittest.skipIf(WIN, "XXX: This does not crash on Windows") + # Meaning the exception is getting lost somewhere... + def test_unhandled_std_exception_as_greenlet_function_aborts(self): + # verify that plain unhandled throw aborts + output = self._do_test_unhandled_exception('run_as_greenlet_target') + self.assertIn( + # We really expect this to be prefixed with "greenlet: Unhandled C++ exception:" + # as added by our handler for std::exception (see TUserGreenlet.cpp), but + # that's not correct everywhere --- our handler never runs before std::terminate + # gets called (for example, on arm32). + 'Thrown from an extension.', + output + ) + + def test_unhandled_exception_in_greenlet_aborts(self): + # verify that unhandled throw called in greenlet aborts too + self._do_test_unhandled_exception('run_unhandled_exception_in_greenlet_aborts') + + +if __name__ == '__main__': + unittest.main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_extension_interface.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_extension_interface.py new file mode 100644 index 00000000..34b66567 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_extension_interface.py @@ -0,0 +1,115 @@ +from __future__ import print_function +from __future__ import absolute_import + +import sys + +import greenlet +from . import _test_extension +from . import TestCase + +# pylint:disable=c-extension-no-member + +class CAPITests(TestCase): + def test_switch(self): + self.assertEqual( + 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) + + def test_switch_kwargs(self): + def adder(x, y): + return x * y + g = greenlet.greenlet(adder) + self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) + + def test_setparent(self): + # pylint:disable=disallowed-name + def foo(): + def bar(): + greenlet.getcurrent().parent.switch() + + # This final switch should go back to the main greenlet, since + # the test_setparent() function in the C extension should have + # reparented this greenlet. + greenlet.getcurrent().parent.switch() + raise AssertionError("Should never have reached this code") + child = greenlet.greenlet(bar) + child.switch() + greenlet.getcurrent().parent.switch(child) + greenlet.getcurrent().parent.throw( + AssertionError("Should never reach this code")) + foo_child = greenlet.greenlet(foo).switch() + self.assertEqual(None, _test_extension.test_setparent(foo_child)) + + def test_getcurrent(self): + _test_extension.test_getcurrent() + + def test_new_greenlet(self): + self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) + + def test_raise_greenlet_dead(self): + self.assertRaises( + greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) + + def test_raise_greenlet_error(self): + self.assertRaises( + greenlet.error, _test_extension.test_raise_greenlet_error) + + def test_throw(self): + seen = [] + + def foo(): # pylint:disable=disallowed-name + try: + greenlet.getcurrent().parent.switch() + except ValueError: + seen.append(sys.exc_info()[1]) + except greenlet.GreenletExit: + raise AssertionError + g = greenlet.greenlet(foo) + g.switch() + _test_extension.test_throw(g) + self.assertEqual(len(seen), 1) + self.assertTrue( + isinstance(seen[0], ValueError), + "ValueError was not raised in foo()") + self.assertEqual( + str(seen[0]), + 'take that sucka!', + "message doesn't match") + + def test_non_traceback_param(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + Exception, + Exception(), + self + ) + self.assertEqual(str(exc.exception), + "throw() third argument must be a traceback object") + + def test_instance_of_wrong_type(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + Exception(), + BaseException(), + None, + ) + + self.assertEqual(str(exc.exception), + "instance exception may not have a separate value") + + def test_not_throwable(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + "abc", + None, + None, + ) + self.assertEqual(str(exc.exception), + "exceptions must be classes, or instances, not str") + + +if __name__ == '__main__': + import unittest + unittest.main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_gc.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_gc.py new file mode 100644 index 00000000..994addb9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_gc.py @@ -0,0 +1,86 @@ +import gc + +import weakref + +import greenlet + + +from . import TestCase +from .leakcheck import fails_leakcheck +# These only work with greenlet gc support +# which is no longer optional. +assert greenlet.GREENLET_USE_GC + +class GCTests(TestCase): + def test_dead_circular_ref(self): + o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) + gc.collect() + if o() is not None: + import sys + print("O IS NOT NONE.", sys.getrefcount(o())) + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + def test_circular_greenlet(self): + class circular_greenlet(greenlet.greenlet): + self = None + o = circular_greenlet() + o.self = o + o = weakref.ref(o) + gc.collect() + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + def test_inactive_ref(self): + class inactive_greenlet(greenlet.greenlet): + def __init__(self): + greenlet.greenlet.__init__(self, run=self.run) + + def run(self): + pass + o = inactive_greenlet() + o = weakref.ref(o) + gc.collect() + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + @fails_leakcheck + def test_finalizer_crash(self): + # This test is designed to crash when active greenlets + # are made garbage collectable, until the underlying + # problem is resolved. How does it work: + # - order of object creation is important + # - array is created first, so it is moved to unreachable first + # - we create a cycle between a greenlet and this array + # - we create an object that participates in gc, is only + # referenced by a greenlet, and would corrupt gc lists + # on destruction, the easiest is to use an object with + # a finalizer + # - because array is the first object in unreachable it is + # cleared first, which causes all references to greenlet + # to disappear and causes greenlet to be destroyed, but since + # it is still live it causes a switch during gc, which causes + # an object with finalizer to be destroyed, which causes stack + # corruption and then a crash + + class object_with_finalizer(object): + def __del__(self): + pass + array = [] + parent = greenlet.getcurrent() + def greenlet_body(): + greenlet.getcurrent().object = object_with_finalizer() + try: + parent.switch() + except greenlet.GreenletExit: + print("Got greenlet exit!") + finally: + del greenlet.getcurrent().object + g = greenlet.greenlet(greenlet_body) + g.array = array + array.append(g) + g.switch() + del array + del g + greenlet.getcurrent() + gc.collect() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_generator.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_generator.py new file mode 100644 index 00000000..ca4a644b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_generator.py @@ -0,0 +1,59 @@ + +from greenlet import greenlet + +from . import TestCase + +class genlet(greenlet): + parent = None + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def __next__(self): + self.parent = greenlet.getcurrent() + result = self.switch() + if self: + return result + + raise StopIteration + + next = __next__ + + +def Yield(value): + g = greenlet.getcurrent() + while not isinstance(g, genlet): + if g is None: + raise RuntimeError('yield outside a genlet') + g = g.parent + g.parent.switch(value) + + +def generator(func): + class Generator(genlet): + fn = (func,) + return Generator + +# ____________________________________________________________ + + +class GeneratorTests(TestCase): + def test_generator(self): + seen = [] + + def g(n): + for i in range(n): + seen.append(i) + Yield(i) + g = generator(g) + for _ in range(3): + for j in g(5): + seen.append(j) + self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_generator_nested.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_generator_nested.py new file mode 100644 index 00000000..8d752a63 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_generator_nested.py @@ -0,0 +1,168 @@ + +from greenlet import greenlet +from . import TestCase +from .leakcheck import fails_leakcheck + +class genlet(greenlet): + parent = None + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + self.child = None + + def run(self): + # Note the function is packed in a tuple + # to avoid creating a bound method for it. + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def set_child(self, child): + self.child = child + + def __next__(self): + if self.child: + child = self.child + while child.child: + tmp = child + child = child.child + tmp.child = None + + result = child.switch() + else: + self.parent = greenlet.getcurrent() + result = self.switch() + + if self: + return result + + raise StopIteration + + next = __next__ + +def Yield(value, level=1): + g = greenlet.getcurrent() + + while level != 0: + if not isinstance(g, genlet): + raise RuntimeError('yield outside a genlet') + if level > 1: + g.parent.set_child(g) + g = g.parent + level -= 1 + + g.switch(value) + + +def Genlet(func): + class TheGenlet(genlet): + fn = (func,) + return TheGenlet + +# ____________________________________________________________ + + +def g1(n, seen): + for i in range(n): + seen.append(i + 1) + yield i + + +def g2(n, seen): + for i in range(n): + seen.append(i + 1) + Yield(i) + +g2 = Genlet(g2) + + +def nested(i): + Yield(i) + + +def g3(n, seen): + for i in range(n): + seen.append(i + 1) + nested(i) +g3 = Genlet(g3) + + +def a(n): + if n == 0: + return + for ii in ax(n - 1): + Yield(ii) + Yield(n) +ax = Genlet(a) + + +def perms(l): + if len(l) > 1: + for e in l: + # No syntactical sugar for generator expressions + x = [Yield([e] + p) for p in perms([x for x in l if x != e])] + assert x + else: + Yield(l) +perms = Genlet(perms) + + +def gr1(n): + for ii in range(1, n): + Yield(ii) + Yield(ii * ii, 2) + +gr1 = Genlet(gr1) + + +def gr2(n, seen): + for ii in gr1(n): + seen.append(ii) + +gr2 = Genlet(gr2) + + +class NestedGeneratorTests(TestCase): + def test_layered_genlets(self): + seen = [] + for ii in gr2(5, seen): + seen.append(ii) + self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) + + @fails_leakcheck + def test_permutations(self): + gen_perms = perms(list(range(4))) + permutations = list(gen_perms) + self.assertEqual(len(permutations), 4 * 3 * 2 * 1) + self.assertIn([0, 1, 2, 3], permutations) + self.assertIn([3, 2, 1, 0], permutations) + res = [] + for ii in zip(perms(list(range(4))), perms(list(range(3)))): + res.append(ii) + self.assertEqual( + res, + [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), + ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), + ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) + # XXX Test to make sure we are working as a generator expression + + def test_genlet_simple(self): + for g in g1, g2, g3: + seen = [] + for _ in range(3): + for j in g(5, seen): + seen.append(j) + self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) + + def test_genlet_bad(self): + try: + Yield(10) + except RuntimeError: + pass + + def test_nested_genlets(self): + seen = [] + for ii in ax(5): + seen.append(ii) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet.py new file mode 100644 index 00000000..c4aabea7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet.py @@ -0,0 +1,1324 @@ +import gc +import sys +import time +import threading +import unittest + +from abc import ABCMeta +from abc import abstractmethod + +import greenlet +from greenlet import greenlet as RawGreenlet +from . import TestCase +from . import RUNNING_ON_MANYLINUX +from . import PY313 +from .leakcheck import fails_leakcheck + + +# We manually manage locks in many tests +# pylint:disable=consider-using-with +# pylint:disable=too-many-public-methods +# This module is quite large. +# TODO: Refactor into separate test files. For example, +# put all the regression tests that used to produce +# crashes in test_greenlet_no_crash; put tests that DO deliberately crash +# the interpreter into test_greenlet_crash. +# pylint:disable=too-many-lines + +class SomeError(Exception): + pass + + +def fmain(seen): + try: + greenlet.getcurrent().parent.switch() + except: + seen.append(sys.exc_info()[0]) + raise + raise SomeError + + +def send_exception(g, exc): + # note: send_exception(g, exc) can be now done with g.throw(exc). + # the purpose of this test is to explicitly check the propagation rules. + def crasher(exc): + raise exc + g1 = RawGreenlet(crasher, parent=g) + g1.switch(exc) + + +class TestGreenlet(TestCase): + + def _do_simple_test(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.append(3) + g = RawGreenlet(f) + lst.append(0) + g.switch() + lst.append(2) + g.switch() + lst.append(4) + self.assertEqual(lst, list(range(5))) + + def test_simple(self): + self._do_simple_test() + + def test_switch_no_run_raises_AttributeError(self): + g = RawGreenlet() + with self.assertRaises(AttributeError) as exc: + g.switch() + + self.assertIn("run", str(exc.exception)) + + def test_throw_no_run_raises_AttributeError(self): + g = RawGreenlet() + with self.assertRaises(AttributeError) as exc: + g.throw(SomeError) + + self.assertIn("run", str(exc.exception)) + + def test_parent_equals_None(self): + g = RawGreenlet(parent=None) + self.assertIsNotNone(g) + self.assertIs(g.parent, greenlet.getcurrent()) + + def test_run_equals_None(self): + g = RawGreenlet(run=None) + self.assertIsNotNone(g) + self.assertIsNone(g.run) + + def test_two_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.extend([1, 1]) + g = RawGreenlet(f) + h = RawGreenlet(f) + g.switch() + self.assertEqual(len(lst), 1) + h.switch() + self.assertEqual(len(lst), 2) + h.switch() + self.assertEqual(len(lst), 4) + self.assertEqual(h.dead, True) + g.switch() + self.assertEqual(len(lst), 6) + self.assertEqual(g.dead, True) + + def test_two_recursive_children(self): + lst = [] + + def f(): + lst.append('b') + greenlet.getcurrent().parent.switch() + + def g(): + lst.append('a') + g = RawGreenlet(f) + g.switch() + lst.append('c') + + g = RawGreenlet(g) + self.assertEqual(sys.getrefcount(g), 2) + g.switch() + self.assertEqual(lst, ['a', 'b', 'c']) + # Just the one in this frame, plus the one on the stack we pass to the function + self.assertEqual(sys.getrefcount(g), 2) + + def test_threads(self): + success = [] + + def f(): + self._do_simple_test() + success.append(True) + ths = [threading.Thread(target=f) for i in range(10)] + for th in ths: + th.start() + for th in ths: + th.join(10) + self.assertEqual(len(success), len(ths)) + + def test_exception(self): + seen = [] + g1 = RawGreenlet(fmain) + g2 = RawGreenlet(fmain) + g1.switch(seen) + g2.switch(seen) + g2.parent = g1 + + self.assertEqual(seen, []) + #with self.assertRaises(SomeError): + # p("***Switching back") + # g2.switch() + # Creating this as a bound method can reveal bugs that + # are hidden on newer versions of Python that avoid creating + # bound methods for direct expressions; IOW, don't use the `with` + # form! + self.assertRaises(SomeError, g2.switch) + self.assertEqual(seen, [SomeError]) + + value = g2.switch() + self.assertEqual(value, ()) + self.assertEqual(seen, [SomeError]) + + value = g2.switch(25) + self.assertEqual(value, 25) + self.assertEqual(seen, [SomeError]) + + + def test_send_exception(self): + seen = [] + g1 = RawGreenlet(fmain) + g1.switch(seen) + self.assertRaises(KeyError, send_exception, g1, KeyError) + self.assertEqual(seen, [KeyError]) + + def test_dealloc(self): + seen = [] + g1 = RawGreenlet(fmain) + g2 = RawGreenlet(fmain) + g1.switch(seen) + g2.switch(seen) + self.assertEqual(seen, []) + del g1 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit]) + del g2 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) + + def test_dealloc_catches_GreenletExit_throws_other(self): + def run(): + try: + greenlet.getcurrent().parent.switch() + except greenlet.GreenletExit: + raise SomeError from None + + g = RawGreenlet(run) + g.switch() + # Destroying the only reference to the greenlet causes it + # to get GreenletExit; when it in turn raises, even though we're the parent + # we don't get the exception, it just gets printed. + # When we run on 3.8 only, we can use sys.unraisablehook + oldstderr = sys.stderr + from io import StringIO + stderr = sys.stderr = StringIO() + try: + del g + finally: + sys.stderr = oldstderr + + v = stderr.getvalue() + self.assertIn("Exception", v) + self.assertIn('ignored', v) + self.assertIn("SomeError", v) + + + @unittest.skipIf( + PY313 and RUNNING_ON_MANYLINUX, + "Sometimes flaky (getting one GreenletExit in the second list)" + # Probably due to funky timing interactions? + # TODO: FIXME Make that work. + ) + + def test_dealloc_other_thread(self): + seen = [] + someref = [] + + bg_glet_created_running_and_no_longer_ref_in_bg = threading.Event() + fg_ref_released = threading.Event() + bg_should_be_clear = threading.Event() + ok_to_exit_bg_thread = threading.Event() + + def f(): + g1 = RawGreenlet(fmain) + g1.switch(seen) + someref.append(g1) + del g1 + gc.collect() + + bg_glet_created_running_and_no_longer_ref_in_bg.set() + fg_ref_released.wait(3) + + RawGreenlet() # trigger release + bg_should_be_clear.set() + ok_to_exit_bg_thread.wait(3) + RawGreenlet() # One more time + + t = threading.Thread(target=f) + t.start() + bg_glet_created_running_and_no_longer_ref_in_bg.wait(10) + + self.assertEqual(seen, []) + self.assertEqual(len(someref), 1) + del someref[:] + gc.collect() + # g1 is not released immediately because it's from another thread + self.assertEqual(seen, []) + fg_ref_released.set() + bg_should_be_clear.wait(3) + try: + self.assertEqual(seen, [greenlet.GreenletExit]) + finally: + ok_to_exit_bg_thread.set() + t.join(10) + del seen[:] + del someref[:] + + def test_frame(self): + def f1(): + f = sys._getframe(0) # pylint:disable=protected-access + self.assertEqual(f.f_back, None) + greenlet.getcurrent().parent.switch(f) + return "meaning of life" + g = RawGreenlet(f1) + frame = g.switch() + self.assertTrue(frame is g.gr_frame) + self.assertTrue(g) + + from_g = g.switch() + self.assertFalse(g) + self.assertEqual(from_g, 'meaning of life') + self.assertEqual(g.gr_frame, None) + + def test_thread_bug(self): + def runner(x): + g = RawGreenlet(lambda: time.sleep(x)) + g.switch() + t1 = threading.Thread(target=runner, args=(0.2,)) + t2 = threading.Thread(target=runner, args=(0.3,)) + t1.start() + t2.start() + t1.join(10) + t2.join(10) + + def test_switch_kwargs(self): + def run(a, b): + self.assertEqual(a, 4) + self.assertEqual(b, 2) + return 42 + x = RawGreenlet(run).switch(a=4, b=2) + self.assertEqual(x, 42) + + def test_switch_kwargs_to_parent(self): + def run(x): + greenlet.getcurrent().parent.switch(x=x) + greenlet.getcurrent().parent.switch(2, x=3) + return x, x ** 2 + g = RawGreenlet(run) + self.assertEqual({'x': 3}, g.switch(3)) + self.assertEqual(((2,), {'x': 3}), g.switch()) + self.assertEqual((3, 9), g.switch()) + + def test_switch_to_another_thread(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = RawGreenlet(lambda: None) + created_event.set() + done_event.wait(10) + thread = threading.Thread(target=run) + thread.start() + created_event.wait(10) + with self.assertRaises(greenlet.error): + data['g'].switch() + done_event.set() + thread.join(10) + # XXX: Should handle this automatically + data.clear() + + def test_exc_state(self): + def f(): + try: + raise ValueError('fun') + except: # pylint:disable=bare-except + exc_info = sys.exc_info() + RawGreenlet(h).switch() + self.assertEqual(exc_info, sys.exc_info()) + + def h(): + self.assertEqual(sys.exc_info(), (None, None, None)) + + RawGreenlet(f).switch() + + def test_instance_dict(self): + def f(): + greenlet.getcurrent().test = 42 + def deldict(g): + del g.__dict__ + def setdict(g, value): + g.__dict__ = value + g = RawGreenlet(f) + self.assertEqual(g.__dict__, {}) + g.switch() + self.assertEqual(g.test, 42) + self.assertEqual(g.__dict__, {'test': 42}) + g.__dict__ = g.__dict__ + self.assertEqual(g.__dict__, {'test': 42}) + self.assertRaises(TypeError, deldict, g) + self.assertRaises(TypeError, setdict, g, 42) + + def test_running_greenlet_has_no_run(self): + has_run = [] + def func(): + has_run.append( + hasattr(greenlet.getcurrent(), 'run') + ) + + g = RawGreenlet(func) + g.switch() + self.assertEqual(has_run, [False]) + + def test_deepcopy(self): + import copy + self.assertRaises(TypeError, copy.copy, RawGreenlet()) + self.assertRaises(TypeError, copy.deepcopy, RawGreenlet()) + + def test_parent_restored_on_kill(self): + hub = RawGreenlet(lambda: None) + main = greenlet.getcurrent() + result = [] + def worker(): + try: + # Wait to be killed by going back to the test. + main.switch() + except greenlet.GreenletExit: + # Resurrect and switch to parent + result.append(greenlet.getcurrent().parent) + result.append(greenlet.getcurrent()) + hub.switch() + g = RawGreenlet(worker, parent=hub) + g.switch() + # delete the only reference, thereby raising GreenletExit + del g + self.assertTrue(result) + self.assertIs(result[0], main) + self.assertIs(result[1].parent, hub) + # Delete them, thereby breaking the cycle between the greenlet + # and the frame, which otherwise would never be collectable + # XXX: We should be able to automatically fix this. + del result[:] + hub = None + main = None + + def test_parent_return_failure(self): + # No run causes AttributeError on switch + g1 = RawGreenlet() + # Greenlet that implicitly switches to parent + g2 = RawGreenlet(lambda: None, parent=g1) + # AttributeError should propagate to us, no fatal errors + with self.assertRaises(AttributeError): + g2.switch() + + def test_throw_exception_not_lost(self): + class mygreenlet(RawGreenlet): + def __getattribute__(self, name): + try: + raise Exception # pylint:disable=broad-exception-raised + except: # pylint:disable=bare-except + pass + return RawGreenlet.__getattribute__(self, name) + g = mygreenlet(lambda: None) + self.assertRaises(SomeError, g.throw, SomeError()) + + @fails_leakcheck + def _do_test_throw_to_dead_thread_doesnt_crash(self, wait_for_cleanup=False): + result = [] + def worker(): + greenlet.getcurrent().parent.switch() + + def creator(): + g = RawGreenlet(worker) + g.switch() + result.append(g) + if wait_for_cleanup: + # Let this greenlet eventually be cleaned up. + g.switch() + greenlet.getcurrent() + t = threading.Thread(target=creator) + t.start() + t.join(10) + del t + # But, depending on the operating system, the thread + # deallocator may not actually have run yet! So we can't be + # sure about the error message unless we wait. + if wait_for_cleanup: + self.wait_for_pending_cleanups() + with self.assertRaises(greenlet.error) as exc: + result[0].throw(SomeError) + + if not wait_for_cleanup: + s = str(exc.exception) + self.assertTrue( + s == "cannot switch to a different thread (which happens to have exited)" + or 'Cannot switch' in s + ) + else: + self.assertEqual( + str(exc.exception), + "cannot switch to a different thread (which happens to have exited)", + ) + + if hasattr(result[0].gr_frame, 'clear'): + # The frame is actually executing (it thinks), we can't clear it. + with self.assertRaises(RuntimeError): + result[0].gr_frame.clear() + # Unfortunately, this doesn't actually clear the references, they're in the + # fast local array. + if not wait_for_cleanup: + # f_locals has no clear method in Python 3.13 + if hasattr(result[0].gr_frame.f_locals, 'clear'): + result[0].gr_frame.f_locals.clear() + else: + self.assertIsNone(result[0].gr_frame) + + del creator + worker = None + del result[:] + # XXX: we ought to be able to automatically fix this. + # See issue 252 + self.expect_greenlet_leak = True # direct us not to wait for it to go away + + @fails_leakcheck + def test_throw_to_dead_thread_doesnt_crash(self): + self._do_test_throw_to_dead_thread_doesnt_crash() + + def test_throw_to_dead_thread_doesnt_crash_wait(self): + self._do_test_throw_to_dead_thread_doesnt_crash(True) + + @fails_leakcheck + def test_recursive_startup(self): + class convoluted(RawGreenlet): + def __init__(self): + RawGreenlet.__init__(self) + self.count = 0 + def __getattribute__(self, name): + if name == 'run' and self.count == 0: + self.count = 1 + self.switch(43) + return RawGreenlet.__getattribute__(self, name) + def run(self, value): + while True: + self.parent.switch(value) + g = convoluted() + self.assertEqual(g.switch(42), 43) + # Exits the running greenlet, otherwise it leaks + # XXX: We should be able to automatically fix this + #g.throw(greenlet.GreenletExit) + #del g + self.expect_greenlet_leak = True + + def test_threaded_updatecurrent(self): + # released when main thread should execute + lock1 = threading.Lock() + lock1.acquire() + # released when another thread should execute + lock2 = threading.Lock() + lock2.acquire() + class finalized(object): + def __del__(self): + # happens while in green_updatecurrent() in main greenlet + # should be very careful not to accidentally call it again + # at the same time we must make sure another thread executes + lock2.release() + lock1.acquire() + # now ts_current belongs to another thread + def deallocator(): + greenlet.getcurrent().parent.switch() + def fthread(): + lock2.acquire() + greenlet.getcurrent() + del g[0] + lock1.release() + lock2.acquire() + greenlet.getcurrent() + lock1.release() + main = greenlet.getcurrent() + g = [RawGreenlet(deallocator)] + g[0].bomb = finalized() + g[0].switch() + t = threading.Thread(target=fthread) + t.start() + # let another thread grab ts_current and deallocate g[0] + lock2.release() + lock1.acquire() + # this is the corner stone + # getcurrent() will notice that ts_current belongs to another thread + # and start the update process, which would notice that g[0] should + # be deallocated, and that will execute an object's finalizer. Now, + # that object will let another thread run so it can grab ts_current + # again, which would likely crash the interpreter if there's no + # check for this case at the end of green_updatecurrent(). This test + # passes if getcurrent() returns correct result, but it's likely + # to randomly crash if it's not anyway. + self.assertEqual(greenlet.getcurrent(), main) + # wait for another thread to complete, just in case + t.join(10) + + def test_dealloc_switch_args_not_lost(self): + seen = [] + def worker(): + # wait for the value + value = greenlet.getcurrent().parent.switch() + # delete all references to ourself + del worker[0] + initiator.parent = greenlet.getcurrent().parent + # switch to main with the value, but because + # ts_current is the last reference to us we + # return here immediately, where we resurrect ourself. + try: + greenlet.getcurrent().parent.switch(value) + finally: + seen.append(greenlet.getcurrent()) + def initiator(): + return 42 # implicitly falls thru to parent + + worker = [RawGreenlet(worker)] + + worker[0].switch() # prime worker + initiator = RawGreenlet(initiator, worker[0]) + value = initiator.switch() + self.assertTrue(seen) + self.assertEqual(value, 42) + + def test_tuple_subclass(self): + # The point of this test is to see what happens when a custom + # tuple subclass is used as an object passed directly to the C + # function ``green_switch``; part of ``green_switch`` checks + # the ``len()`` of the ``args`` tuple, and that can call back + # into Python. Here, when it calls back into Python, we + # recursively enter ``green_switch`` again. + + # This test is really only relevant on Python 2. The builtin + # `apply` function directly passes the given args tuple object + # to the underlying function, whereas the Python 3 version + # unpacks and repacks into an actual tuple. This could still + # happen using the C API on Python 3 though. We should write a + # builtin version of apply() ourself. + def _apply(func, a, k): + func(*a, **k) + + class mytuple(tuple): + def __len__(self): + greenlet.getcurrent().switch() + return tuple.__len__(self) + args = mytuple() + kwargs = dict(a=42) + def switchapply(): + _apply(greenlet.getcurrent().parent.switch, args, kwargs) + g = RawGreenlet(switchapply) + self.assertEqual(g.switch(), kwargs) + + def test_abstract_subclasses(self): + AbstractSubclass = ABCMeta( + 'AbstractSubclass', + (RawGreenlet,), + {'run': abstractmethod(lambda self: None)}) + + class BadSubclass(AbstractSubclass): + pass + + class GoodSubclass(AbstractSubclass): + def run(self): + pass + + GoodSubclass() # should not raise + self.assertRaises(TypeError, BadSubclass) + + def test_implicit_parent_with_threads(self): + if not gc.isenabled(): + return # cannot test with disabled gc + N = gc.get_threshold()[0] + if N < 50: + return # cannot test with such a small N + def attempt(): + lock1 = threading.Lock() + lock1.acquire() + lock2 = threading.Lock() + lock2.acquire() + recycled = [False] + def another_thread(): + lock1.acquire() # wait for gc + greenlet.getcurrent() # update ts_current + lock2.release() # release gc + t = threading.Thread(target=another_thread) + t.start() + class gc_callback(object): + def __del__(self): + lock1.release() + lock2.acquire() + recycled[0] = True + class garbage(object): + def __init__(self): + self.cycle = self + self.callback = gc_callback() + l = [] + x = range(N*2) + current = greenlet.getcurrent() + g = garbage() + for _ in x: + g = None # lose reference to garbage + if recycled[0]: + # gc callback called prematurely + t.join(10) + return False + last = RawGreenlet() + if recycled[0]: + break # yes! gc called in green_new + l.append(last) # increase allocation counter + else: + # gc callback not called when expected + gc.collect() + if recycled[0]: + t.join(10) + return False + self.assertEqual(last.parent, current) + for g in l: + self.assertEqual(g.parent, current) + return True + for _ in range(5): + if attempt(): + break + + def test_issue_245_reference_counting_subclass_no_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + # Before the fix, this crashed pretty reliably on + # Python 3.10, at least on macOS; but much less reliably on other + # interpreters (memory layout must have changed). + # The threaded test crashed more reliably on more interpreters. + from greenlet import getcurrent + from greenlet import GreenletExit + + class Greenlet(RawGreenlet): + pass + + initial_refs = sys.getrefcount(Greenlet) + # This has to be an instance variable because + # Python 2 raises a SyntaxError if we delete a local + # variable referenced in an inner scope. + self.glets = [] # pylint:disable=attribute-defined-outside-init + + def greenlet_main(): + try: + getcurrent().parent.switch() + except GreenletExit: + self.glets.append(getcurrent()) + + # Before the + for _ in range(10): + Greenlet(greenlet_main).switch() + + del self.glets + self.assertEqual(sys.getrefcount(Greenlet), initial_refs) + + @unittest.skipIf( + PY313 and RUNNING_ON_MANYLINUX, + "The manylinux images appear to hang on this test on 3.13rc2" + # Or perhaps I just got tired of waiting for the 450s timeout. + # Still, it shouldn't take anywhere near that long. Does not reproduce in + # Ubuntu images, on macOS or Windows. + ) + def test_issue_245_reference_counting_subclass_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + from threading import Thread + from threading import Event + + from greenlet import getcurrent + + class MyGreenlet(RawGreenlet): + pass + + glets = [] + ref_cleared = Event() + + def greenlet_main(): + getcurrent().parent.switch() + + def thread_main(greenlet_running_event): + mine = MyGreenlet(greenlet_main) + glets.append(mine) + # The greenlets being deleted must be active + mine.switch() + # Don't keep any reference to it in this thread + del mine + # Let main know we published our greenlet. + greenlet_running_event.set() + # Wait for main to let us know the references are + # gone and the greenlet objects no longer reachable + ref_cleared.wait(10) + # The creating thread must call getcurrent() (or a few other + # greenlet APIs) because that's when the thread-local list of dead + # greenlets gets cleared. + getcurrent() + + # We start with 3 references to the subclass: + # - This module + # - Its __mro__ + # - The __subclassess__ attribute of greenlet + # - (If we call gc.get_referents(), we find four entries, including + # some other tuple ``(greenlet)`` that I'm not sure about but must be part + # of the machinery.) + # + # On Python 3.10 it's often enough to just run 3 threads; on Python 2.7, + # more threads are needed, and the results are still + # non-deterministic. Presumably the memory layouts are different + initial_refs = sys.getrefcount(MyGreenlet) + thread_ready_events = [] + for _ in range( + initial_refs + 45 + ): + event = Event() + thread = Thread(target=thread_main, args=(event,)) + thread_ready_events.append(event) + thread.start() + + + for done_event in thread_ready_events: + done_event.wait(10) + + + del glets[:] + ref_cleared.set() + # Let any other thread run; it will crash the interpreter + # if not fixed (or silently corrupt memory and we possibly crash + # later). + self.wait_for_pending_cleanups() + self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs) + + def test_falling_off_end_switches_to_unstarted_parent_raises_error(self): + def no_args(): + return 13 + + parent_never_started = RawGreenlet(no_args) + + def leaf(): + return 42 + + child = RawGreenlet(leaf, parent_never_started) + + # Because the run function takes to arguments + with self.assertRaises(TypeError): + child.switch() + + def test_falling_off_end_switches_to_unstarted_parent_works(self): + def one_arg(x): + return (x, 24) + + parent_never_started = RawGreenlet(one_arg) + + def leaf(): + return 42 + + child = RawGreenlet(leaf, parent_never_started) + + result = child.switch() + self.assertEqual(result, (42, 24)) + + def test_switch_to_dead_greenlet_with_unstarted_perverse_parent(self): + class Parent(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + raise SomeError + + + parent_never_started = Parent() + seen = [] + child = RawGreenlet(lambda: seen.append(42), parent_never_started) + # Because we automatically start the parent when the child is + # finished + with self.assertRaises(SomeError): + child.switch() + + self.assertEqual(seen, [42]) + + with self.assertRaises(SomeError): + child.switch() + self.assertEqual(seen, [42]) + + def test_switch_to_dead_greenlet_reparent(self): + seen = [] + parent_never_started = RawGreenlet(lambda: seen.append(24)) + child = RawGreenlet(lambda: seen.append(42)) + + child.switch() + self.assertEqual(seen, [42]) + + child.parent = parent_never_started + # This actually is the same as switching to the parent. + result = child.switch() + self.assertIsNone(result) + self.assertEqual(seen, [42, 24]) + + def test_can_access_f_back_of_suspended_greenlet(self): + # This tests our frame rewriting to work around Python 3.12+ having + # some interpreter frames on the C stack. It will crash in the absence + # of that logic. + main = greenlet.getcurrent() + + def outer(): + inner() + + def inner(): + main.switch(sys._getframe(0)) + + hub = RawGreenlet(outer) + # start it + hub.switch() + + # start another greenlet to make sure we aren't relying on + # anything in `hub` still being on the C stack + unrelated = RawGreenlet(lambda: None) + unrelated.switch() + + # now it is suspended + self.assertIsNotNone(hub.gr_frame) + self.assertEqual(hub.gr_frame.f_code.co_name, "inner") + self.assertIsNotNone(hub.gr_frame.f_back) + self.assertEqual(hub.gr_frame.f_back.f_code.co_name, "outer") + # The next line is what would crash + self.assertIsNone(hub.gr_frame.f_back.f_back) + + def test_get_stack_with_nested_c_calls(self): + from functools import partial + from . import _test_extension_cpp + + def recurse(v): + if v > 0: + return v * _test_extension_cpp.test_call(partial(recurse, v - 1)) + return greenlet.getcurrent().parent.switch() + + gr = RawGreenlet(recurse) + gr.switch(5) + frame = gr.gr_frame + for i in range(5): + self.assertEqual(frame.f_locals["v"], i) + frame = frame.f_back + self.assertEqual(frame.f_locals["v"], 5) + self.assertIsNone(frame.f_back) + self.assertEqual(gr.switch(10), 1200) # 1200 = 5! * 10 + + def test_frames_always_exposed(self): + # On Python 3.12 this will crash if we don't set the + # gr_frames_always_exposed attribute. More background: + # https://github.com/python-greenlet/greenlet/issues/388 + main = greenlet.getcurrent() + + def outer(): + inner(sys._getframe(0)) + + def inner(frame): + main.switch(frame) + + gr = RawGreenlet(outer) + frame = gr.switch() + + # Do something else to clobber the part of the C stack used by `gr`, + # so we can't skate by on "it just happened to still be there" + unrelated = RawGreenlet(lambda: None) + unrelated.switch() + + self.assertEqual(frame.f_code.co_name, "outer") + # The next line crashes on 3.12 if we haven't exposed the frames. + self.assertIsNone(frame.f_back) + + +class TestGreenletSetParentErrors(TestCase): + def test_threaded_reparent(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = RawGreenlet(lambda: None) + created_event.set() + done_event.wait(10) + + def blank(): + greenlet.getcurrent().parent.switch() + + thread = threading.Thread(target=run) + thread.start() + created_event.wait(10) + g = RawGreenlet(blank) + g.switch() + with self.assertRaises(ValueError) as exc: + g.parent = data['g'] + done_event.set() + thread.join(10) + + self.assertEqual(str(exc.exception), "parent cannot be on a different thread") + + def test_unexpected_reparenting(self): + another = [] + def worker(): + g = RawGreenlet(lambda: None) + another.append(g) + g.switch() + t = threading.Thread(target=worker) + t.start() + t.join(10) + # The first time we switch (running g_initialstub(), which is + # when we look up the run attribute) we attempt to change the + # parent to one from another thread (which also happens to be + # dead). ``g_initialstub()`` should detect this and raise a + # greenlet error. + # + # EXCEPT: With the fix for #252, this is actually detected + # sooner, when setting the parent itself. Prior to that fix, + # the main greenlet from the background thread kept a valid + # value for ``run_info``, and appeared to be a valid parent + # until we actually started the greenlet. But now that it's + # cleared, this test is catching whether ``green_setparent`` + # can detect the dead thread. + # + # Further refactoring once again changes this back to a greenlet.error + # + # We need to wait for the cleanup to happen, but we're + # deliberately leaking a main greenlet here. + self.wait_for_pending_cleanups(initial_main_greenlets=self.main_greenlets_before_test + 1) + + class convoluted(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return RawGreenlet.__getattribute__(self, name) + g = convoluted(lambda: None) + with self.assertRaises(greenlet.error) as exc: + g.switch() + self.assertEqual(str(exc.exception), + "cannot switch to a different thread (which happens to have exited)") + del another[:] + + def test_unexpected_reparenting_thread_running(self): + # Like ``test_unexpected_reparenting``, except the background thread is + # actually still alive. + another = [] + switched_to_greenlet = threading.Event() + keep_main_alive = threading.Event() + def worker(): + g = RawGreenlet(lambda: None) + another.append(g) + g.switch() + switched_to_greenlet.set() + keep_main_alive.wait(10) + class convoluted(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return RawGreenlet.__getattribute__(self, name) + + t = threading.Thread(target=worker) + t.start() + + switched_to_greenlet.wait(10) + try: + g = convoluted(lambda: None) + + with self.assertRaises(greenlet.error) as exc: + g.switch() + self.assertIn("Cannot switch to a different thread", str(exc.exception)) + self.assertIn("Expected", str(exc.exception)) + self.assertIn("Current", str(exc.exception)) + finally: + keep_main_alive.set() + t.join(10) + # XXX: Should handle this automatically. + del another[:] + + def test_cannot_delete_parent(self): + worker = RawGreenlet(lambda: None) + self.assertIs(worker.parent, greenlet.getcurrent()) + + with self.assertRaises(AttributeError) as exc: + del worker.parent + self.assertEqual(str(exc.exception), "can't delete attribute") + + def test_cannot_delete_parent_of_main(self): + with self.assertRaises(AttributeError) as exc: + del greenlet.getcurrent().parent + self.assertEqual(str(exc.exception), "can't delete attribute") + + + def test_main_greenlet_parent_is_none(self): + # assuming we're in a main greenlet here. + self.assertIsNone(greenlet.getcurrent().parent) + + def test_set_parent_wrong_types(self): + def bg(): + # Go back to main. + greenlet.getcurrent().parent.switch() + + def check(glet): + for p in None, 1, self, "42": + with self.assertRaises(TypeError) as exc: + glet.parent = p + + self.assertEqual( + str(exc.exception), + "GreenletChecker: Expected any type of greenlet, not " + type(p).__name__) + + # First, not running + g = RawGreenlet(bg) + self.assertFalse(g) + check(g) + + # Then when running. + g.switch() + self.assertTrue(g) + check(g) + + # Let it finish + g.switch() + + + def test_trivial_cycle(self): + glet = RawGreenlet(lambda: None) + with self.assertRaises(ValueError) as exc: + glet.parent = glet + self.assertEqual(str(exc.exception), "cyclic parent chain") + + def test_trivial_cycle_main(self): + # This used to produce a ValueError, but we catch it earlier than that now. + with self.assertRaises(AttributeError) as exc: + greenlet.getcurrent().parent = greenlet.getcurrent() + self.assertEqual(str(exc.exception), "cannot set the parent of a main greenlet") + + def test_deeper_cycle(self): + g1 = RawGreenlet(lambda: None) + g2 = RawGreenlet(lambda: None) + g3 = RawGreenlet(lambda: None) + + g1.parent = g2 + g2.parent = g3 + with self.assertRaises(ValueError) as exc: + g3.parent = g1 + self.assertEqual(str(exc.exception), "cyclic parent chain") + + +class TestRepr(TestCase): + + def assertEndsWith(self, got, suffix): + self.assertTrue(got.endswith(suffix), (got, suffix)) + + def test_main_while_running(self): + r = repr(greenlet.getcurrent()) + self.assertEndsWith(r, " current active started main>") + + def test_main_in_background(self): + main = greenlet.getcurrent() + def run(): + return repr(main) + + g = RawGreenlet(run) + r = g.switch() + self.assertEndsWith(r, ' suspended active started main>') + + def test_initial(self): + r = repr(RawGreenlet()) + self.assertEndsWith(r, ' pending>') + + def test_main_from_other_thread(self): + main = greenlet.getcurrent() + + class T(threading.Thread): + original_main = thread_main = None + main_glet = None + def run(self): + self.original_main = repr(main) + self.main_glet = greenlet.getcurrent() + self.thread_main = repr(self.main_glet) + + t = T() + t.start() + t.join(10) + + self.assertEndsWith(t.original_main, ' suspended active started main>') + self.assertEndsWith(t.thread_main, ' current active started main>') + # give the machinery time to notice the death of the thread, + # and clean it up. Note that we don't use + # ``expect_greenlet_leak`` or wait_for_pending_cleanups, + # because at this point we know we have an extra greenlet + # still reachable. + for _ in range(3): + time.sleep(0.001) + + # In the past, main greenlets, even from dead threads, never + # really appear dead. We have fixed that, and we also report + # that the thread is dead in the repr. (Do this multiple times + # to make sure that we don't self-modify and forget our state + # in the C++ code). + for _ in range(3): + self.assertTrue(t.main_glet.dead) + r = repr(t.main_glet) + self.assertEndsWith(r, ' (thread exited) dead>') + + def test_dead(self): + g = RawGreenlet(lambda: None) + g.switch() + self.assertEndsWith(repr(g), ' dead>') + self.assertNotIn('suspended', repr(g)) + self.assertNotIn('started', repr(g)) + self.assertNotIn('active', repr(g)) + + def test_formatting_produces_native_str(self): + # https://github.com/python-greenlet/greenlet/issues/218 + # %s formatting on Python 2 was producing unicode, not str. + + g_dead = RawGreenlet(lambda: None) + g_not_started = RawGreenlet(lambda: None) + g_cur = greenlet.getcurrent() + + for g in g_dead, g_not_started, g_cur: + + self.assertIsInstance( + '%s' % (g,), + str + ) + self.assertIsInstance( + '%r' % (g,), + str, + ) + + +class TestMainGreenlet(TestCase): + # Tests some implementation details, and relies on some + # implementation details. + + def _check_current_is_main(self): + # implementation detail + assert 'main' in repr(greenlet.getcurrent()) + + t = type(greenlet.getcurrent()) + assert 'main' not in repr(t) + return t + + def test_main_greenlet_type_can_be_subclassed(self): + main_type = self._check_current_is_main() + subclass = type('subclass', (main_type,), {}) + self.assertIsNotNone(subclass) + + def test_main_greenlet_is_greenlet(self): + self._check_current_is_main() + self.assertIsInstance(greenlet.getcurrent(), RawGreenlet) + + + +class TestBrokenGreenlets(TestCase): + # Tests for things that used to, or still do, terminate the interpreter. + # This often means doing unsavory things. + + def test_failed_to_initialstub(self): + def func(): + raise AssertionError("Never get here") + + + g = greenlet._greenlet.UnswitchableGreenlet(func) + g.force_switch_error = True + + with self.assertRaisesRegex(SystemError, + "Failed to switch stacks into a greenlet for the first time."): + g.switch() + + def test_failed_to_switch_into_running(self): + runs = [] + def func(): + runs.append(1) + greenlet.getcurrent().parent.switch() + runs.append(2) + greenlet.getcurrent().parent.switch() + runs.append(3) # pragma: no cover + + g = greenlet._greenlet.UnswitchableGreenlet(func) + g.switch() + self.assertEqual(runs, [1]) + g.switch() + self.assertEqual(runs, [1, 2]) + g.force_switch_error = True + + with self.assertRaisesRegex(SystemError, + "Failed to switch stacks into a running greenlet."): + g.switch() + + # If we stopped here, we would fail the leakcheck, because we've left + # the ``inner_bootstrap()`` C frame and its descendents hanging around, + # which have a bunch of Python references. They'll never get cleaned up + # if we don't let the greenlet finish. + g.force_switch_error = False + g.switch() + self.assertEqual(runs, [1, 2, 3]) + + def test_failed_to_slp_switch_into_running(self): + ex = self.assertScriptRaises('fail_slp_switch.py') + + self.assertIn('fail_slp_switch is running', ex.output) + self.assertIn(ex.returncode, self.get_expected_returncodes_for_aborted_process()) + + def test_reentrant_switch_two_greenlets(self): + # Before we started capturing the arguments in g_switch_finish, this could crash. + output = self.run_script('fail_switch_two_greenlets.py') + self.assertIn('In g1_run', output) + self.assertIn('TRACE', output) + self.assertIn('LEAVE TRACE', output) + self.assertIn('Falling off end of main', output) + self.assertIn('Falling off end of g1_run', output) + self.assertIn('Falling off end of g2', output) + + def test_reentrant_switch_three_greenlets(self): + # On debug builds of greenlet, this used to crash with an assertion error; + # on non-debug versions, it ran fine (which it should not do!). + # Now it always crashes correctly with a TypeError + ex = self.assertScriptRaises('fail_switch_three_greenlets.py', exitcodes=(1,)) + + self.assertIn('TypeError', ex.output) + self.assertIn('positional arguments', ex.output) + + def test_reentrant_switch_three_greenlets2(self): + # This actually passed on debug and non-debug builds. It + # should probably have been triggering some debug assertions + # but it didn't. + # + # I think the fixes for the above test also kicked in here. + output = self.run_script('fail_switch_three_greenlets2.py') + self.assertIn( + "RESULTS: [('trace', 'switch'), " + "('trace', 'switch'), ('g2 arg', 'g2 from tracefunc'), " + "('trace', 'switch'), ('main g1', 'from g2_run'), ('trace', 'switch'), " + "('g1 arg', 'g1 from main'), ('trace', 'switch'), ('main g2', 'from g1_run'), " + "('trace', 'switch'), ('g1 from parent', 'g1 from main 2'), ('trace', 'switch'), " + "('main g1.2', 'g1 done'), ('trace', 'switch'), ('g2 from parent', ()), " + "('trace', 'switch'), ('main g2.2', 'g2 done')]", + output + ) + + def test_reentrant_switch_GreenletAlreadyStartedInPython(self): + output = self.run_script('fail_initialstub_already_started.py') + + self.assertIn( + "RESULTS: ['Begin C', 'Switch to b from B.__getattribute__ in C', " + "('Begin B', ()), '_B_run switching to main', ('main from c', 'From B'), " + "'B.__getattribute__ back from main in C', ('Begin A', (None,)), " + "('A dead?', True, 'B dead?', True, 'C dead?', False), " + "'C done', ('main from c.2', None)]", + output + ) + + def test_reentrant_switch_run_callable_has_del(self): + output = self.run_script('fail_clearing_run_switches.py') + self.assertIn( + "RESULTS [" + "('G.__getattribute__', 'run'), ('RunCallable', '__del__'), " + "('main: g.switch()', 'from RunCallable'), ('run_func', 'enter')" + "]", + output + ) + +if __name__ == '__main__': + unittest.main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet_trash.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet_trash.py new file mode 100644 index 00000000..c1fc1374 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet_trash.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +""" +Tests for greenlets interacting with the CPython trash can API. + +The CPython trash can API is not designed to be re-entered from a +single thread. But this can happen using greenlets, if something +during the object deallocation process switches greenlets, and this second +greenlet then causes the trash can to get entered again. Here, we do this +very explicitly, but in other cases (like gevent) it could be arbitrarily more +complicated: for example, a weakref callback might try to acquire a lock that's +already held by another greenlet; that would allow a greenlet switch to occur. + +See https://github.com/gevent/gevent/issues/1909 + +This test is fragile and relies on details of the CPython +implementation (like most of the rest of this package): + + - We enter the trashcan and deferred deallocation after + ``_PyTrash_UNWIND_LEVEL`` calls. This constant, defined in + CPython's object.c, is generally 50. That's basically how many objects are required to + get us into the deferred deallocation situation. + + - The test fails by hitting an ``assert()`` in object.c; if the + build didn't enable assert, then we don't catch this. + + - If the test fails in that way, the interpreter crashes. +""" +from __future__ import print_function, absolute_import, division + +import unittest + + +class TestTrashCanReEnter(unittest.TestCase): + + def test_it(self): + try: + # pylint:disable-next=no-name-in-module + from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=unused-import + except ImportError: + import sys + # Python 3.13 has not "trash delete nesting" anymore (but "delete later") + assert sys.version_info[:2] >= (3, 13) + self.skipTest("get_tstate_trash_delete_nesting is not available.") + + # Try several times to trigger it, because it isn't 100% + # reliable. + for _ in range(10): + self.check_it() + + def check_it(self): # pylint:disable=too-many-statements + import greenlet + from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=no-name-in-module + main = greenlet.getcurrent() + + assert get_tstate_trash_delete_nesting() == 0 + + # We expect to be in deferred deallocation after this many + # deallocations have occurred. TODO: I wish we had a better way to do + # this --- that was before get_tstate_trash_delete_nesting; perhaps + # we can use that API to do better? + TRASH_UNWIND_LEVEL = 50 + # How many objects to put in a container; it's the container that + # queues objects for deferred deallocation. + OBJECTS_PER_CONTAINER = 500 + + class Dealloc: # define the class here because we alter class variables each time we run. + """ + An object with a ``__del__`` method. When it starts getting deallocated + from a deferred trash can run, it switches greenlets, allocates more objects + which then also go in the trash can. If we don't save state appropriately, + nesting gets out of order and we can crash the interpreter. + """ + + #: Has our deallocation actually run and switched greenlets? + #: When it does, this will be set to the current greenlet. This should + #: be happening in the main greenlet, so we check that down below. + SPAWNED = False + + #: Has the background greenlet run? + BG_RAN = False + + BG_GLET = None + + #: How many of these things have ever been allocated. + CREATED = 0 + + #: How many of these things have ever been deallocated. + DESTROYED = 0 + + #: How many were destroyed not in the main greenlet. There should always + #: be some. + #: If the test is broken or things change in the trashcan implementation, + #: this may not be correct. + DESTROYED_BG = 0 + + def __init__(self, sequence_number): + """ + :param sequence_number: The ordinal of this object during + one particular creation run. This is used to detect (guess, really) + when we have entered the trash can's deferred deallocation. + """ + self.i = sequence_number + Dealloc.CREATED += 1 + + def __del__(self): + if self.i == TRASH_UNWIND_LEVEL and not self.SPAWNED: + Dealloc.SPAWNED = greenlet.getcurrent() + other = Dealloc.BG_GLET = greenlet.greenlet(background_greenlet) + x = other.switch() + assert x == 42 + # It's important that we don't switch back to the greenlet, + # we leave it hanging there in an incomplete state. But we don't let it + # get collected, either. If we complete it now, while we're still + # in the scope of the initial trash can, things work out and we + # don't see the problem. We need this greenlet to complete + # at some point in the future, after we've exited this trash can invocation. + del other + elif self.i == 40 and greenlet.getcurrent() is not main: + Dealloc.BG_RAN = True + try: + main.switch(42) + except greenlet.GreenletExit as ex: + # We expect this; all references to us go away + # while we're still running, and we need to finish deleting + # ourself. + Dealloc.BG_RAN = type(ex) + del ex + + # Record the fact that we're dead last of all. This ensures that + # we actually get returned too. + Dealloc.DESTROYED += 1 + if greenlet.getcurrent() is not main: + Dealloc.DESTROYED_BG += 1 + + + def background_greenlet(): + # We direct through a second function, instead of + # directly calling ``make_some()``, so that we have complete + # control over when these objects are destroyed: we need them + # to be destroyed in the context of the background greenlet + t = make_some() + del t # Triggere deletion. + + def make_some(): + t = () + i = OBJECTS_PER_CONTAINER + while i: + # Nest the tuples; it's the recursion that gets us + # into trash. + t = (Dealloc(i), t) + i -= 1 + return t + + + some = make_some() + self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER) + self.assertEqual(Dealloc.DESTROYED, 0) + + # If we're going to crash, it should be on the following line. + # We only crash if ``assert()`` is enabled, of course. + del some + + # For non-debug builds of CPython, we won't crash. The best we can do is check + # the nesting level explicitly. + self.assertEqual(0, get_tstate_trash_delete_nesting()) + + # Discard this, raising GreenletExit into where it is waiting. + Dealloc.BG_GLET = None + # The same nesting level maintains. + self.assertEqual(0, get_tstate_trash_delete_nesting()) + + # We definitely cleaned some up in the background + self.assertGreater(Dealloc.DESTROYED_BG, 0) + + # Make sure all the cleanups happened. + self.assertIs(Dealloc.SPAWNED, main) + self.assertTrue(Dealloc.BG_RAN) + self.assertEqual(Dealloc.BG_RAN, greenlet.GreenletExit) + self.assertEqual(Dealloc.CREATED, Dealloc.DESTROYED ) + self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER * 2) + + import gc + gc.collect() + + +if __name__ == '__main__': + unittest.main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_leaks.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_leaks.py new file mode 100644 index 00000000..ed1fa717 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_leaks.py @@ -0,0 +1,443 @@ +# -*- coding: utf-8 -*- +""" +Testing scenarios that may have leaked. +""" +from __future__ import print_function, absolute_import, division + +import sys +import gc + +import time +import weakref +import threading + + +import greenlet +from . import TestCase +from .leakcheck import fails_leakcheck +from .leakcheck import ignores_leakcheck +from .leakcheck import RUNNING_ON_MANYLINUX + +# pylint:disable=protected-access + +assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0 + +class HasFinalizerTracksInstances(object): + EXTANT_INSTANCES = set() + def __init__(self, msg): + self.msg = sys.intern(msg) + self.EXTANT_INSTANCES.add(id(self)) + def __del__(self): + self.EXTANT_INSTANCES.remove(id(self)) + def __repr__(self): + return "" % ( + id(self), self.msg + ) + @classmethod + def reset(cls): + cls.EXTANT_INSTANCES.clear() + + +class TestLeaks(TestCase): + + def test_arg_refs(self): + args = ('a', 'b', 'c') + refcount_before = sys.getrefcount(args) + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda *args: greenlet.getcurrent().parent.switch(*args)) + for _ in range(100): + g.switch(*args) + self.assertEqual(sys.getrefcount(args), refcount_before) + + def test_kwarg_refs(self): + kwargs = {} + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda **kwargs: greenlet.getcurrent().parent.switch(**kwargs)) + for _ in range(100): + g.switch(**kwargs) + self.assertEqual(sys.getrefcount(kwargs), 2) + + + @staticmethod + def __recycle_threads(): + # By introducing a thread that does sleep we allow other threads, + # that have triggered their __block condition, but did not have a + # chance to deallocate their thread state yet, to finally do so. + # The way it works is by requiring a GIL switch (different thread), + # which does a GIL release (sleep), which might do a GIL switch + # to finished threads and allow them to clean up. + def worker(): + time.sleep(0.001) + t = threading.Thread(target=worker) + t.start() + time.sleep(0.001) + t.join(10) + + def test_threaded_leak(self): + gg = [] + def worker(): + # only main greenlet present + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join(10) + del t + greenlet.getcurrent() # update ts_current + self.__recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def test_threaded_adv_leak(self): + gg = [] + def worker(): + # main and additional *finished* greenlets + ll = greenlet.getcurrent().ll = [] + def additional(): + ll.append(greenlet.getcurrent()) + for _ in range(2): + greenlet.greenlet(additional).switch() + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join(10) + del t + greenlet.getcurrent() # update ts_current + self.__recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def assertClocksUsed(self): + used = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() + self.assertGreaterEqual(used, 0) + # we don't lose the value + greenlet._greenlet.enable_optional_cleanup(True) + used2 = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() + self.assertEqual(used, used2) + self.assertGreater(greenlet._greenlet.CLOCKS_PER_SEC, 1) + + def _check_issue251(self, + manually_collect_background=True, + explicit_reference_to_switch=False): + # See https://github.com/python-greenlet/greenlet/issues/251 + # Killing a greenlet (probably not the main one) + # in one thread from another thread would + # result in leaking a list (the ts_delkey list). + # We no longer use lists to hold that stuff, though. + + # For the test to be valid, even empty lists have to be tracked by the + # GC + + assert gc.is_tracked([]) + HasFinalizerTracksInstances.reset() + greenlet.getcurrent() + greenlets_before = self.count_objects(greenlet.greenlet, exact_kind=False) + + background_glet_running = threading.Event() + background_glet_killed = threading.Event() + background_greenlets = [] + + # XXX: Switching this to a greenlet subclass that overrides + # run results in all callers failing the leaktest; that + # greenlet instance is leaked. There's a bound method for + # run() living on the stack of the greenlet in g_initialstub, + # and since we don't manually switch back to the background + # greenlet to let it "fall off the end" and exit the + # g_initialstub function, it never gets cleaned up. Making the + # garbage collector aware of this bound method (making it an + # attribute of the greenlet structure and traversing into it) + # doesn't help, for some reason. + def background_greenlet(): + # Throw control back to the main greenlet. + jd = HasFinalizerTracksInstances("DELETING STACK OBJECT") + greenlet._greenlet.set_thread_local( + 'test_leaks_key', + HasFinalizerTracksInstances("DELETING THREAD STATE")) + # Explicitly keeping 'switch' in a local variable + # breaks this test in all versions + if explicit_reference_to_switch: + s = greenlet.getcurrent().parent.switch + s([jd]) + else: + greenlet.getcurrent().parent.switch([jd]) + + bg_main_wrefs = [] + + def background_thread(): + glet = greenlet.greenlet(background_greenlet) + bg_main_wrefs.append(weakref.ref(glet.parent)) + + background_greenlets.append(glet) + glet.switch() # Be sure it's active. + # Control is ours again. + del glet # Delete one reference from the thread it runs in. + background_glet_running.set() + background_glet_killed.wait(10) + + # To trigger the background collection of the dead + # greenlet, thus clearing out the contents of the list, we + # need to run some APIs. See issue 252. + if manually_collect_background: + greenlet.getcurrent() + + + t = threading.Thread(target=background_thread) + t.start() + background_glet_running.wait(10) + greenlet.getcurrent() + lists_before = self.count_objects(list, exact_kind=True) + + assert len(background_greenlets) == 1 + self.assertFalse(background_greenlets[0].dead) + # Delete the last reference to the background greenlet + # from a different thread. This puts it in the background thread's + # ts_delkey list. + del background_greenlets[:] + background_glet_killed.set() + + # Now wait for the background thread to die. + t.join(10) + del t + # As part of the fix for 252, we need to cycle the ceval.c + # interpreter loop to be sure it has had a chance to process + # the pending call. + self.wait_for_pending_cleanups() + + lists_after = self.count_objects(list, exact_kind=True) + greenlets_after = self.count_objects(greenlet.greenlet, exact_kind=False) + + # On 2.7, we observe that lists_after is smaller than + # lists_before. No idea what lists got cleaned up. All the + # Python 3 versions match exactly. + self.assertLessEqual(lists_after, lists_before) + # On versions after 3.6, we've successfully cleaned up the + # greenlet references thanks to the internal "vectorcall" + # protocol; prior to that, there is a reference path through + # the ``greenlet.switch`` method still on the stack that we + # can't reach to clean up. The C code goes through terrific + # lengths to clean that up. + if not explicit_reference_to_switch \ + and greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: + # If cleanup was disabled, though, we may not find it. + self.assertEqual(greenlets_after, greenlets_before) + if manually_collect_background: + # TODO: Figure out how to make this work! + # The one on the stack is still leaking somehow + # in the non-manually-collect state. + self.assertEqual(HasFinalizerTracksInstances.EXTANT_INSTANCES, set()) + else: + # The explicit reference prevents us from collecting it + # and it isn't always found by the GC either for some + # reason. The entire frame is leaked somehow, on some + # platforms (e.g., MacPorts builds of Python (all + # versions!)), but not on other platforms (the linux and + # windows builds on GitHub actions and Appveyor). So we'd + # like to write a test that proves that the main greenlet + # sticks around, and we can on my machine (macOS 11.6, + # MacPorts builds of everything) but we can't write that + # same test on other platforms. However, hopefully iteration + # done by leakcheck will find it. + pass + + if greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: + self.assertClocksUsed() + + def test_issue251_killing_cross_thread_leaks_list(self): + self._check_issue251() + + def test_issue251_with_cleanup_disabled(self): + greenlet._greenlet.enable_optional_cleanup(False) + try: + self._check_issue251() + finally: + greenlet._greenlet.enable_optional_cleanup(True) + + @fails_leakcheck + def test_issue251_issue252_need_to_collect_in_background(self): + # Between greenlet 1.1.2 and the next version, this was still + # failing because the leak of the list still exists when we + # don't call a greenlet API before exiting the thread. The + # proximate cause is that neither of the two greenlets from + # the background thread are actually being destroyed, even + # though the GC is in fact visiting both objects. It's not + # clear where that leak is? For some reason the thread-local + # dict holding it isn't being cleaned up. + # + # The leak, I think, is in the CPYthon internal function that + # calls into green_switch(). The argument tuple is still on + # the C stack somewhere and can't be reached? That doesn't + # make sense, because the tuple should be collectable when + # this object goes away. + # + # Note that this test sometimes spuriously passes on Linux, + # for some reason, but I've never seen it pass on macOS. + self._check_issue251(manually_collect_background=False) + + @fails_leakcheck + def test_issue251_issue252_need_to_collect_in_background_cleanup_disabled(self): + self.expect_greenlet_leak = True + greenlet._greenlet.enable_optional_cleanup(False) + try: + self._check_issue251(manually_collect_background=False) + finally: + greenlet._greenlet.enable_optional_cleanup(True) + + @fails_leakcheck + def test_issue251_issue252_explicit_reference_not_collectable(self): + self._check_issue251( + manually_collect_background=False, + explicit_reference_to_switch=True) + + UNTRACK_ATTEMPTS = 100 + + def _only_test_some_versions(self): + # We're only looking for this problem specifically on 3.11, + # and this set of tests is relatively fragile, depending on + # OS and memory management details. So we want to run it on 3.11+ + # (obviously) but not every older 3.x version in order to reduce + # false negatives. At the moment, those false results seem to have + # resolved, so we are actually running this on 3.8+ + assert sys.version_info[0] >= 3 + if sys.version_info[:2] < (3, 8): + self.skipTest('Only observed on 3.11') + if RUNNING_ON_MANYLINUX: + self.skipTest("Slow and not worth repeating here") + + @ignores_leakcheck + # Because we're just trying to track raw memory, not objects, and running + # the leakcheck makes an already slow test slower. + def test_untracked_memory_doesnt_increase(self): + # See https://github.com/gevent/gevent/issues/1924 + # and https://github.com/python-greenlet/greenlet/issues/328 + self._only_test_some_versions() + def f(): + return 1 + + ITER = 10000 + def run_it(): + for _ in range(ITER): + greenlet.greenlet(f).switch() + + # Establish baseline + for _ in range(3): + run_it() + + # uss: (Linux, macOS, Windows): aka "Unique Set Size", this is + # the memory which is unique to a process and which would be + # freed if the process was terminated right now. + uss_before = self.get_process_uss() + + for count in range(self.UNTRACK_ATTEMPTS): + uss_before = max(uss_before, self.get_process_uss()) + run_it() + + uss_after = self.get_process_uss() + if uss_after <= uss_before and count > 1: + break + + self.assertLessEqual(uss_after, uss_before) + + def _check_untracked_memory_thread(self, deallocate_in_thread=True): + self._only_test_some_versions() + # Like the above test, but what if there are a bunch of + # unfinished greenlets in a thread that dies? + # Does it matter if we deallocate in the thread or not? + EXIT_COUNT = [0] + + def f(): + try: + greenlet.getcurrent().parent.switch() + except greenlet.GreenletExit: + EXIT_COUNT[0] += 1 + raise + return 1 + + ITER = 10000 + def run_it(): + glets = [] + for _ in range(ITER): + # Greenlet starts, switches back to us. + # We keep a strong reference to the greenlet though so it doesn't + # get a GreenletExit exception. + g = greenlet.greenlet(f) + glets.append(g) + g.switch() + + return glets + + test = self + + class ThreadFunc: + uss_before = uss_after = 0 + glets = () + ITER = 2 + def __call__(self): + self.uss_before = test.get_process_uss() + + for _ in range(self.ITER): + self.glets += tuple(run_it()) + + for g in self.glets: + test.assertIn('suspended active', str(g)) + # Drop them. + if deallocate_in_thread: + self.glets = () + self.uss_after = test.get_process_uss() + + # Establish baseline + uss_before = uss_after = None + for count in range(self.UNTRACK_ATTEMPTS): + EXIT_COUNT[0] = 0 + thread_func = ThreadFunc() + t = threading.Thread(target=thread_func) + t.start() + t.join(30) + self.assertFalse(t.is_alive()) + + if uss_before is None: + uss_before = thread_func.uss_before + + uss_before = max(uss_before, thread_func.uss_before) + if deallocate_in_thread: + self.assertEqual(thread_func.glets, ()) + self.assertEqual(EXIT_COUNT[0], ITER * thread_func.ITER) + + del thread_func # Deallocate the greenlets; but this won't raise into them + del t + if not deallocate_in_thread: + self.assertEqual(EXIT_COUNT[0], 0) + if deallocate_in_thread: + self.wait_for_pending_cleanups() + + uss_after = self.get_process_uss() + # See if we achieve a non-growth state at some point. Break when we do. + if uss_after <= uss_before and count > 1: + break + + self.wait_for_pending_cleanups() + uss_after = self.get_process_uss() + self.assertLessEqual(uss_after, uss_before, "after attempts %d" % (count,)) + + @ignores_leakcheck + # Because we're just trying to track raw memory, not objects, and running + # the leakcheck makes an already slow test slower. + def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_thread(self): + self._check_untracked_memory_thread(deallocate_in_thread=True) + + @ignores_leakcheck + # Because the main greenlets from the background threads do not exit in a timely fashion, + # we fail the object-based leakchecks. + def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main(self): + self._check_untracked_memory_thread(deallocate_in_thread=False) + +if __name__ == '__main__': + __import__('unittest').main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_stack_saved.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_stack_saved.py new file mode 100644 index 00000000..b362bf95 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_stack_saved.py @@ -0,0 +1,19 @@ +import greenlet +from . import TestCase + + +class Test(TestCase): + + def test_stack_saved(self): + main = greenlet.getcurrent() + self.assertEqual(main._stack_saved, 0) + + def func(): + main.switch(main._stack_saved) + + g = greenlet.greenlet(func) + x = g.switch() + self.assertGreater(x, 0) + self.assertGreater(g._stack_saved, 0) + g.switch() + self.assertEqual(g._stack_saved, 0) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_throw.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_throw.py new file mode 100644 index 00000000..f4f9a140 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_throw.py @@ -0,0 +1,128 @@ +import sys + + +from greenlet import greenlet +from . import TestCase + +def switch(*args): + return greenlet.getcurrent().parent.switch(*args) + + +class ThrowTests(TestCase): + def test_class(self): + def f(): + try: + switch("ok") + except RuntimeError: + switch("ok") + return + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError) + self.assertEqual(res, "ok") + + def test_val(self): + def f(): + try: + switch("ok") + except RuntimeError: + val = sys.exc_info()[1] + if str(val) == "ciao": + switch("ok") + return + switch("fail") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError("ciao")) + self.assertEqual(res, "ok") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError, "ciao") + self.assertEqual(res, "ok") + + def test_kill(self): + def f(): + switch("ok") + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw() + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + self.assertTrue(g.dead) + res = g.throw() # immediately eaten by the already-dead greenlet + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + + def test_throw_goes_to_original_parent(self): + main = greenlet.getcurrent() + + def f1(): + try: + main.switch("f1 ready to catch") + except IndexError: + return "caught" + return "normal exit" + + def f2(): + main.switch("from f2") + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + with self.assertRaises(IndexError): + g2.throw(IndexError) + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.switch() + self.assertEqual(res, "from f2") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + def test_non_traceback_param(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + Exception, + Exception(), + self + ) + self.assertEqual(str(exc.exception), + "throw() third argument must be a traceback object") + + def test_instance_of_wrong_type(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + Exception(), + BaseException() + ) + + self.assertEqual(str(exc.exception), + "instance exception may not have a separate value") + + def test_not_throwable(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + "abc" + ) + self.assertEqual(str(exc.exception), + "exceptions must be classes, or instances, not str") diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_tracing.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_tracing.py new file mode 100644 index 00000000..c044d4b6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_tracing.py @@ -0,0 +1,291 @@ +from __future__ import print_function +import sys +import greenlet +import unittest + +from . import TestCase +from . import PY312 + +# https://discuss.python.org/t/cpython-3-12-greenlet-and-tracing-profiling-how-to-not-crash-and-get-correct-results/33144/2 +DEBUG_BUILD_PY312 = ( + PY312 and hasattr(sys, 'gettotalrefcount'), + "Broken on debug builds of Python 3.12" +) + +class SomeError(Exception): + pass + +class GreenletTracer(object): + oldtrace = None + + def __init__(self, error_on_trace=False): + self.actions = [] + self.error_on_trace = error_on_trace + + def __call__(self, *args): + self.actions.append(args) + if self.error_on_trace: + raise SomeError + + def __enter__(self): + self.oldtrace = greenlet.settrace(self) + return self.actions + + def __exit__(self, *args): + greenlet.settrace(self.oldtrace) + + +class TestGreenletTracing(TestCase): + """ + Tests of ``greenlet.settrace()`` + """ + + def test_a_greenlet_tracing(self): + main = greenlet.getcurrent() + def dummy(): + pass + def dummyexc(): + raise SomeError() + + with GreenletTracer() as actions: + g1 = greenlet.greenlet(dummy) + g1.switch() + g2 = greenlet.greenlet(dummyexc) + self.assertRaises(SomeError, g2.switch) + + self.assertEqual(actions, [ + ('switch', (main, g1)), + ('switch', (g1, main)), + ('switch', (main, g2)), + ('throw', (g2, main)), + ]) + + def test_b_exception_disables_tracing(self): + main = greenlet.getcurrent() + def dummy(): + main.switch() + g = greenlet.greenlet(dummy) + g.switch() + with GreenletTracer(error_on_trace=True) as actions: + self.assertRaises(SomeError, g.switch) + self.assertEqual(greenlet.gettrace(), None) + + self.assertEqual(actions, [ + ('switch', (main, g)), + ]) + + def test_set_same_tracer_twice(self): + # https://github.com/python-greenlet/greenlet/issues/332 + # Our logic in asserting that the tracefunction should + # gain a reference was incorrect if the same tracefunction was set + # twice. + tracer = GreenletTracer() + with tracer: + greenlet.settrace(tracer) + + +class PythonTracer(object): + oldtrace = None + + def __init__(self): + self.actions = [] + + def __call__(self, frame, event, arg): + # Record the co_name so we have an idea what function we're in. + self.actions.append((event, frame.f_code.co_name)) + + def __enter__(self): + self.oldtrace = sys.setprofile(self) + return self.actions + + def __exit__(self, *args): + sys.setprofile(self.oldtrace) + +def tpt_callback(): + return 42 + +class TestPythonTracing(TestCase): + """ + Tests of the interaction of ``sys.settrace()`` + with greenlet facilities. + + NOTE: Most of this is probably CPython specific. + """ + + maxDiff = None + + def test_trace_events_trivial(self): + with PythonTracer() as actions: + tpt_callback() + # If we use the sys.settrace instead of setprofile, we get + # this: + + # self.assertEqual(actions, [ + # ('call', 'tpt_callback'), + # ('call', '__exit__'), + # ]) + + self.assertEqual(actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def _trace_switch(self, glet): + with PythonTracer() as actions: + glet.switch() + return actions + + def _check_trace_events_func_already_set(self, glet): + actions = self._trace_switch(glet) + self.assertEqual(actions, [ + ('return', '__enter__'), + ('c_call', '_trace_switch'), + ('call', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('c_return', '_trace_switch'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def test_trace_events_into_greenlet_func_already_set(self): + def run(): + return tpt_callback() + + self._check_trace_events_func_already_set(greenlet.greenlet(run)) + + def test_trace_events_into_greenlet_subclass_already_set(self): + class X(greenlet.greenlet): + def run(self): + return tpt_callback() + self._check_trace_events_func_already_set(X()) + + def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer): + g.switch() + tpt_callback() + tracer.__exit__() + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + + def test_trace_events_from_greenlet_func_sets_profiler(self): + tracer = PythonTracer() + def run(): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run), + tracer) + + def test_trace_events_from_greenlet_subclass_sets_profiler(self): + tracer = PythonTracer() + class X(greenlet.greenlet): + def run(self): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(X(), tracer) + + @unittest.skipIf(*DEBUG_BUILD_PY312) + def test_trace_events_multiple_greenlets_switching(self): + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + x = g1.switch() + self.assertEqual(x, 42) + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'g2_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + @unittest.skipIf(*DEBUG_BUILD_PY312) + def test_trace_events_multiple_greenlets_switching_siblings(self): + # Like the first version, but get both greenlets running first + # as "siblings" and then establish the tracing. + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + greenlet.getcurrent().parent.switch() + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + greenlet.getcurrent().parent.switch() + + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + # Start g1 + g1.switch() + # And it immediately returns control to us. + # Start g2 + g2.switch() + # Which also returns. Now kick of the real part of the + # test. + x = g1.switch() + self.assertEqual(x, 42) + + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_version.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_version.py new file mode 100644 index 00000000..96c17cf1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_version.py @@ -0,0 +1,41 @@ +#! /usr/bin/env python +from __future__ import absolute_import +from __future__ import print_function + +import sys +import os +from unittest import TestCase as NonLeakingTestCase + +import greenlet + +# No reason to run this multiple times under leakchecks, +# it doesn't do anything. +class VersionTests(NonLeakingTestCase): + def test_version(self): + def find_dominating_file(name): + if os.path.exists(name): + return name + + tried = [] + here = os.path.abspath(os.path.dirname(__file__)) + for i in range(10): + up = ['..'] * i + path = [here] + up + [name] + fname = os.path.join(*path) + fname = os.path.abspath(fname) + tried.append(fname) + if os.path.exists(fname): + return fname + raise AssertionError("Could not find file " + name + "; checked " + str(tried)) + + try: + setup_py = find_dominating_file('setup.py') + except AssertionError as e: + self.skipTest("Unable to find setup.py; must be out of tree. " + str(e)) + + + invoke_setup = "%s %s --version" % (sys.executable, setup_py) + with os.popen(invoke_setup) as f: + sversion = f.read().strip() + + self.assertEqual(sversion, greenlet.__version__) diff --git a/.venv/lib/python3.12/site-packages/greenlet/tests/test_weakref.py b/.venv/lib/python3.12/site-packages/greenlet/tests/test_weakref.py new file mode 100644 index 00000000..05a38a7f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/greenlet/tests/test_weakref.py @@ -0,0 +1,35 @@ +import gc +import weakref + + +import greenlet +from . import TestCase + +class WeakRefTests(TestCase): + def test_dead_weakref(self): + def _dead_greenlet(): + g = greenlet.greenlet(lambda: None) + g.switch() + return g + o = weakref.ref(_dead_greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_inactive_weakref(self): + o = weakref.ref(greenlet.greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_dealloc_weakref(self): + seen = [] + def worker(): + try: + greenlet.getcurrent().parent.switch() + finally: + seen.append(g()) + g = greenlet.greenlet(worker) + g.switch() + g2 = greenlet.greenlet(lambda: None, g) + g = weakref.ref(g2) + g2 = None + self.assertEqual(seen, [None]) diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__init__.py b/.venv/lib/python3.12/site-packages/psycopg2/__init__.py new file mode 100644 index 00000000..59a89386 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/__init__.py @@ -0,0 +1,126 @@ +"""A Python driver for PostgreSQL + +psycopg is a PostgreSQL_ database adapter for the Python_ programming +language. This is version 2, a complete rewrite of the original code to +provide new-style classes for connection and cursor objects and other sweet +candies. Like the original, psycopg 2 was written with the aim of being very +small and fast, and stable as a rock. + +Homepage: https://psycopg.org/ + +.. _PostgreSQL: https://www.postgresql.org/ +.. _Python: https://www.python.org/ + +:Groups: + * `Connections creation`: connect + * `Value objects constructors`: Binary, Date, DateFromTicks, Time, + TimeFromTicks, Timestamp, TimestampFromTicks +""" +# psycopg/__init__.py - initialization of the psycopg module +# +# Copyright (C) 2003-2019 Federico Di Gregorio +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# Import modules needed by _psycopg to allow tools like py2exe to do +# their work without bothering about the module dependencies. + +# Note: the first internal import should be _psycopg, otherwise the real cause +# of a failed loading of the C module may get hidden, see +# https://archives.postgresql.org/psycopg/2011-02/msg00044.php + +# Import the DBAPI-2.0 stuff into top-level module. + +from psycopg2._psycopg import ( # noqa + BINARY, NUMBER, STRING, DATETIME, ROWID, + + Binary, Date, Time, Timestamp, + DateFromTicks, TimeFromTicks, TimestampFromTicks, + + Error, Warning, DataError, DatabaseError, ProgrammingError, IntegrityError, + InterfaceError, InternalError, NotSupportedError, OperationalError, + + _connect, apilevel, threadsafety, paramstyle, + __version__, __libpq_version__, +) + + +# Register default adapters. + +from psycopg2 import extensions as _ext +_ext.register_adapter(tuple, _ext.SQL_IN) +_ext.register_adapter(type(None), _ext.NoneAdapter) + +# Register the Decimal adapter here instead of in the C layer. +# This way a new class is registered for each sub-interpreter. +# See ticket #52 +from decimal import Decimal # noqa +from psycopg2._psycopg import Decimal as Adapter # noqa +_ext.register_adapter(Decimal, Adapter) +del Decimal, Adapter + + +def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs): + """ + Create a new database connection. + + The connection parameters can be specified as a string: + + conn = psycopg2.connect("dbname=test user=postgres password=secret") + + or using a set of keyword arguments: + + conn = psycopg2.connect(database="test", user="postgres", password="secret") + + Or as a mix of both. The basic connection parameters are: + + - *dbname*: the database name + - *database*: the database name (only as keyword argument) + - *user*: user name used to authenticate + - *password*: password used to authenticate + - *host*: database host address (defaults to UNIX socket if not provided) + - *port*: connection port number (defaults to 5432 if not provided) + + Using the *connection_factory* parameter a different class or connections + factory can be specified. It should be a callable object taking a dsn + argument. + + Using the *cursor_factory* parameter, a new default cursor factory will be + used by cursor(). + + Using *async*=True an asynchronous connection will be created. *async_* is + a valid alias (for Python versions where ``async`` is a keyword). + + Any other keyword parameter will be passed to the underlying client + library: the list of supported parameters depends on the library version. + + """ + kwasync = {} + if 'async' in kwargs: + kwasync['async'] = kwargs.pop('async') + if 'async_' in kwargs: + kwasync['async_'] = kwargs.pop('async_') + + dsn = _ext.make_dsn(dsn, **kwargs) + conn = _connect(dsn, connection_factory=connection_factory, **kwasync) + if cursor_factory is not None: + conn.cursor_factory = cursor_factory + + return conn diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..88a41804 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_ipaddress.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_ipaddress.cpython-312.pyc new file mode 100644 index 00000000..f1520cf8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_ipaddress.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_json.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_json.cpython-312.pyc new file mode 100644 index 00000000..dd051cee Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_json.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_range.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_range.cpython-312.pyc new file mode 100644 index 00000000..2d0af11d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/_range.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/errorcodes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/errorcodes.cpython-312.pyc new file mode 100644 index 00000000..5aed031a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/errorcodes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/errors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/errors.cpython-312.pyc new file mode 100644 index 00000000..2cc5e46c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/errors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/extensions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/extensions.cpython-312.pyc new file mode 100644 index 00000000..b2a806eb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/extensions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/extras.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/extras.cpython-312.pyc new file mode 100644 index 00000000..66714aa6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/extras.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/pool.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/pool.cpython-312.pyc new file mode 100644 index 00000000..26d77368 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/pool.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/sql.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/sql.cpython-312.pyc new file mode 100644 index 00000000..dd25eaae Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/sql.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/tz.cpython-312.pyc b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/tz.cpython-312.pyc new file mode 100644 index 00000000..7b5f1fc5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/__pycache__/tz.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/_ipaddress.py b/.venv/lib/python3.12/site-packages/psycopg2/_ipaddress.py new file mode 100644 index 00000000..d38566c8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/_ipaddress.py @@ -0,0 +1,90 @@ +"""Implementation of the ipaddres-based network types adaptation +""" + +# psycopg/_ipaddress.py - Ipaddres-based network types adaptation +# +# Copyright (C) 2016-2019 Daniele Varrazzo +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +from psycopg2.extensions import ( + new_type, new_array_type, register_type, register_adapter, QuotedString) + +# The module is imported on register_ipaddress +ipaddress = None + +# The typecasters are created only once +_casters = None + + +def register_ipaddress(conn_or_curs=None): + """ + Register conversion support between `ipaddress` objects and `network types`__. + + :param conn_or_curs: the scope where to register the type casters. + If `!None` register them globally. + + After the function is called, PostgreSQL :sql:`inet` values will be + converted into `~ipaddress.IPv4Interface` or `~ipaddress.IPv6Interface` + objects, :sql:`cidr` values into into `~ipaddress.IPv4Network` or + `~ipaddress.IPv6Network`. + + .. __: https://www.postgresql.org/docs/current/static/datatype-net-types.html + """ + global ipaddress + import ipaddress + + global _casters + if _casters is None: + _casters = _make_casters() + + for c in _casters: + register_type(c, conn_or_curs) + + for t in [ipaddress.IPv4Interface, ipaddress.IPv6Interface, + ipaddress.IPv4Network, ipaddress.IPv6Network]: + register_adapter(t, adapt_ipaddress) + + +def _make_casters(): + inet = new_type((869,), 'INET', cast_interface) + ainet = new_array_type((1041,), 'INET[]', inet) + + cidr = new_type((650,), 'CIDR', cast_network) + acidr = new_array_type((651,), 'CIDR[]', cidr) + + return [inet, ainet, cidr, acidr] + + +def cast_interface(s, cur=None): + if s is None: + return None + # Py2 version force the use of unicode. meh. + return ipaddress.ip_interface(str(s)) + + +def cast_network(s, cur=None): + if s is None: + return None + return ipaddress.ip_network(str(s)) + + +def adapt_ipaddress(obj): + return QuotedString(str(obj)) diff --git a/.venv/lib/python3.12/site-packages/psycopg2/_json.py b/.venv/lib/python3.12/site-packages/psycopg2/_json.py new file mode 100644 index 00000000..95024223 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/_json.py @@ -0,0 +1,199 @@ +"""Implementation of the JSON adaptation objects + +This module exists to avoid a circular import problem: pyscopg2.extras depends +on psycopg2.extension, so I can't create the default JSON typecasters in +extensions importing register_json from extras. +""" + +# psycopg/_json.py - Implementation of the JSON adaptation objects +# +# Copyright (C) 2012-2019 Daniele Varrazzo +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import json + +from psycopg2._psycopg import ISQLQuote, QuotedString +from psycopg2._psycopg import new_type, new_array_type, register_type + + +# oids from PostgreSQL 9.2 +JSON_OID = 114 +JSONARRAY_OID = 199 + +# oids from PostgreSQL 9.4 +JSONB_OID = 3802 +JSONBARRAY_OID = 3807 + + +class Json: + """ + An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to + :sql:`json` data type. + + `!Json` can be used to wrap any object supported by the provided *dumps* + function. If none is provided, the standard :py:func:`json.dumps()` is + used. + + """ + def __init__(self, adapted, dumps=None): + self.adapted = adapted + self._conn = None + self._dumps = dumps or json.dumps + + def __conform__(self, proto): + if proto is ISQLQuote: + return self + + def dumps(self, obj): + """Serialize *obj* in JSON format. + + The default is to call `!json.dumps()` or the *dumps* function + provided in the constructor. You can override this method to create a + customized JSON wrapper. + """ + return self._dumps(obj) + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + s = self.dumps(self.adapted) + qs = QuotedString(s) + if self._conn is not None: + qs.prepare(self._conn) + return qs.getquoted() + + def __str__(self): + # getquoted is binary + return self.getquoted().decode('ascii', 'replace') + + +def register_json(conn_or_curs=None, globally=False, loads=None, + oid=None, array_oid=None, name='json'): + """Create and register typecasters converting :sql:`json` type to Python objects. + + :param conn_or_curs: a connection or cursor used to find the :sql:`json` + and :sql:`json[]` oids; the typecasters are registered in a scope + limited to this object, unless *globally* is set to `!True`. It can be + `!None` if the oids are provided + :param globally: if `!False` register the typecasters only on + *conn_or_curs*, otherwise register them globally + :param loads: the function used to parse the data into a Python object. If + `!None` use `!json.loads()`, where `!json` is the module chosen + according to the Python version (see above) + :param oid: the OID of the :sql:`json` type if known; If not, it will be + queried on *conn_or_curs* + :param array_oid: the OID of the :sql:`json[]` array type if known; + if not, it will be queried on *conn_or_curs* + :param name: the name of the data type to look for in *conn_or_curs* + + The connection or cursor passed to the function will be used to query the + database and look for the OID of the :sql:`json` type (or an alternative + type if *name* if provided). No query is performed if *oid* and *array_oid* + are provided. Raise `~psycopg2.ProgrammingError` if the type is not found. + + """ + if oid is None: + oid, array_oid = _get_json_oids(conn_or_curs, name) + + JSON, JSONARRAY = _create_json_typecasters( + oid, array_oid, loads=loads, name=name.upper()) + + register_type(JSON, not globally and conn_or_curs or None) + + if JSONARRAY is not None: + register_type(JSONARRAY, not globally and conn_or_curs or None) + + return JSON, JSONARRAY + + +def register_default_json(conn_or_curs=None, globally=False, loads=None): + """ + Create and register :sql:`json` typecasters for PostgreSQL 9.2 and following. + + Since PostgreSQL 9.2 :sql:`json` is a builtin type, hence its oid is known + and fixed. This function allows specifying a customized *loads* function + for the default :sql:`json` type without querying the database. + All the parameters have the same meaning of `register_json()`. + """ + return register_json(conn_or_curs=conn_or_curs, globally=globally, + loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID) + + +def register_default_jsonb(conn_or_curs=None, globally=False, loads=None): + """ + Create and register :sql:`jsonb` typecasters for PostgreSQL 9.4 and following. + + As in `register_default_json()`, the function allows to register a + customized *loads* function for the :sql:`jsonb` type at its known oid for + PostgreSQL 9.4 and following versions. All the parameters have the same + meaning of `register_json()`. + """ + return register_json(conn_or_curs=conn_or_curs, globally=globally, + loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb') + + +def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'): + """Create typecasters for json data type.""" + if loads is None: + loads = json.loads + + def typecast_json(s, cur): + if s is None: + return None + return loads(s) + + JSON = new_type((oid, ), name, typecast_json) + if array_oid is not None: + JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON) + else: + JSONARRAY = None + + return JSON, JSONARRAY + + +def _get_json_oids(conn_or_curs, name='json'): + # lazy imports + from psycopg2.extensions import STATUS_IN_TRANSACTION + from psycopg2.extras import _solve_conn_curs + + conn, curs = _solve_conn_curs(conn_or_curs) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # column typarray not available before PG 8.3 + typarray = conn.info.server_version >= 80300 and "typarray" or "NULL" + + # get the oid for the hstore + curs.execute( + "SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;" + % typarray, (name,)) + r = curs.fetchone() + + # revert the status of the connection as before the command + if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit: + conn.rollback() + + if not r: + raise conn.ProgrammingError(f"{name} data type not found") + + return r diff --git a/.venv/lib/python3.12/site-packages/psycopg2/_psycopg.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/psycopg2/_psycopg.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 00000000..49b9ec09 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2/_psycopg.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2/_range.py b/.venv/lib/python3.12/site-packages/psycopg2/_range.py new file mode 100644 index 00000000..64bae073 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/_range.py @@ -0,0 +1,554 @@ +"""Implementation of the Range type and adaptation + +""" + +# psycopg/_range.py - Implementation of the Range type and adaptation +# +# Copyright (C) 2012-2019 Daniele Varrazzo +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import re + +from psycopg2._psycopg import ProgrammingError, InterfaceError +from psycopg2.extensions import ISQLQuote, adapt, register_adapter +from psycopg2.extensions import new_type, new_array_type, register_type + + +class Range: + """Python representation for a PostgreSQL |range|_ type. + + :param lower: lower bound for the range. `!None` means unbound + :param upper: upper bound for the range. `!None` means unbound + :param bounds: one of the literal strings ``()``, ``[)``, ``(]``, ``[]``, + representing whether the lower or upper bounds are included + :param empty: if `!True`, the range is empty + + """ + __slots__ = ('_lower', '_upper', '_bounds') + + def __init__(self, lower=None, upper=None, bounds='[)', empty=False): + if not empty: + if bounds not in ('[)', '(]', '()', '[]'): + raise ValueError(f"bound flags not valid: {bounds!r}") + + self._lower = lower + self._upper = upper + self._bounds = bounds + else: + self._lower = self._upper = self._bounds = None + + def __repr__(self): + if self._bounds is None: + return f"{self.__class__.__name__}(empty=True)" + else: + return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__, + self._lower, self._upper, self._bounds) + + def __str__(self): + if self._bounds is None: + return 'empty' + + items = [ + self._bounds[0], + str(self._lower), + ', ', + str(self._upper), + self._bounds[1] + ] + return ''.join(items) + + @property + def lower(self): + """The lower bound of the range. `!None` if empty or unbound.""" + return self._lower + + @property + def upper(self): + """The upper bound of the range. `!None` if empty or unbound.""" + return self._upper + + @property + def isempty(self): + """`!True` if the range is empty.""" + return self._bounds is None + + @property + def lower_inf(self): + """`!True` if the range doesn't have a lower bound.""" + if self._bounds is None: + return False + return self._lower is None + + @property + def upper_inf(self): + """`!True` if the range doesn't have an upper bound.""" + if self._bounds is None: + return False + return self._upper is None + + @property + def lower_inc(self): + """`!True` if the lower bound is included in the range.""" + if self._bounds is None or self._lower is None: + return False + return self._bounds[0] == '[' + + @property + def upper_inc(self): + """`!True` if the upper bound is included in the range.""" + if self._bounds is None or self._upper is None: + return False + return self._bounds[1] == ']' + + def __contains__(self, x): + if self._bounds is None: + return False + + if self._lower is not None: + if self._bounds[0] == '[': + if x < self._lower: + return False + else: + if x <= self._lower: + return False + + if self._upper is not None: + if self._bounds[1] == ']': + if x > self._upper: + return False + else: + if x >= self._upper: + return False + + return True + + def __bool__(self): + return self._bounds is not None + + def __eq__(self, other): + if not isinstance(other, Range): + return False + return (self._lower == other._lower + and self._upper == other._upper + and self._bounds == other._bounds) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self._lower, self._upper, self._bounds)) + + # as the postgres docs describe for the server-side stuff, + # ordering is rather arbitrary, but will remain stable + # and consistent. + + def __lt__(self, other): + if not isinstance(other, Range): + return NotImplemented + for attr in ('_lower', '_upper', '_bounds'): + self_value = getattr(self, attr) + other_value = getattr(other, attr) + if self_value == other_value: + pass + elif self_value is None: + return True + elif other_value is None: + return False + else: + return self_value < other_value + return False + + def __le__(self, other): + if self == other: + return True + else: + return self.__lt__(other) + + def __gt__(self, other): + if isinstance(other, Range): + return other.__lt__(self) + else: + return NotImplemented + + def __ge__(self, other): + if self == other: + return True + else: + return self.__gt__(other) + + def __getstate__(self): + return {slot: getattr(self, slot) + for slot in self.__slots__ if hasattr(self, slot)} + + def __setstate__(self, state): + for slot, value in state.items(): + setattr(self, slot, value) + + +def register_range(pgrange, pyrange, conn_or_curs, globally=False): + """Create and register an adapter and the typecasters to convert between + a PostgreSQL |range|_ type and a PostgreSQL `Range` subclass. + + :param pgrange: the name of the PostgreSQL |range| type. Can be + schema-qualified + :param pyrange: a `Range` strict subclass, or just a name to give to a new + class + :param conn_or_curs: a connection or cursor used to find the oid of the + range and its subtype; the typecaster is registered in a scope limited + to this object, unless *globally* is set to `!True` + :param globally: if `!False` (default) register the typecaster only on + *conn_or_curs*, otherwise register it globally + :return: `RangeCaster` instance responsible for the conversion + + If a string is passed to *pyrange*, a new `Range` subclass is created + with such name and will be available as the `~RangeCaster.range` attribute + of the returned `RangeCaster` object. + + The function queries the database on *conn_or_curs* to inspect the + *pgrange* type and raises `~psycopg2.ProgrammingError` if the type is not + found. If querying the database is not advisable, use directly the + `RangeCaster` class and register the adapter and typecasters using the + provided functions. + + """ + caster = RangeCaster._from_db(pgrange, pyrange, conn_or_curs) + caster._register(not globally and conn_or_curs or None) + return caster + + +class RangeAdapter: + """`ISQLQuote` adapter for `Range` subclasses. + + This is an abstract class: concrete classes must set a `name` class + attribute or override `getquoted()`. + """ + name = None + + def __init__(self, adapted): + self.adapted = adapted + + def __conform__(self, proto): + if self._proto is ISQLQuote: + return self + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + if self.name is None: + raise NotImplementedError( + 'RangeAdapter must be subclassed overriding its name ' + 'or the getquoted() method') + + r = self.adapted + if r.isempty: + return b"'empty'::" + self.name.encode('utf8') + + if r.lower is not None: + a = adapt(r.lower) + if hasattr(a, 'prepare'): + a.prepare(self._conn) + lower = a.getquoted() + else: + lower = b'NULL' + + if r.upper is not None: + a = adapt(r.upper) + if hasattr(a, 'prepare'): + a.prepare(self._conn) + upper = a.getquoted() + else: + upper = b'NULL' + + return self.name.encode('utf8') + b'(' + lower + b', ' + upper \ + + b", '" + r._bounds.encode('utf8') + b"')" + + +class RangeCaster: + """Helper class to convert between `Range` and PostgreSQL range types. + + Objects of this class are usually created by `register_range()`. Manual + creation could be useful if querying the database is not advisable: in + this case the oids must be provided. + """ + def __init__(self, pgrange, pyrange, oid, subtype_oid, array_oid=None): + self.subtype_oid = subtype_oid + self._create_ranges(pgrange, pyrange) + + name = self.adapter.name or self.adapter.__class__.__name__ + + self.typecaster = new_type((oid,), name, self.parse) + + if array_oid is not None: + self.array_typecaster = new_array_type( + (array_oid,), name + "ARRAY", self.typecaster) + else: + self.array_typecaster = None + + def _create_ranges(self, pgrange, pyrange): + """Create Range and RangeAdapter classes if needed.""" + # if got a string create a new RangeAdapter concrete type (with a name) + # else take it as an adapter. Passing an adapter should be considered + # an implementation detail and is not documented. It is currently used + # for the numeric ranges. + self.adapter = None + if isinstance(pgrange, str): + self.adapter = type(pgrange, (RangeAdapter,), {}) + self.adapter.name = pgrange + else: + try: + if issubclass(pgrange, RangeAdapter) \ + and pgrange is not RangeAdapter: + self.adapter = pgrange + except TypeError: + pass + + if self.adapter is None: + raise TypeError( + 'pgrange must be a string or a RangeAdapter strict subclass') + + self.range = None + try: + if isinstance(pyrange, str): + self.range = type(pyrange, (Range,), {}) + if issubclass(pyrange, Range) and pyrange is not Range: + self.range = pyrange + except TypeError: + pass + + if self.range is None: + raise TypeError( + 'pyrange must be a type or a Range strict subclass') + + @classmethod + def _from_db(self, name, pyrange, conn_or_curs): + """Return a `RangeCaster` instance for the type *pgrange*. + + Raise `ProgrammingError` if the type is not found. + """ + from psycopg2.extensions import STATUS_IN_TRANSACTION + from psycopg2.extras import _solve_conn_curs + conn, curs = _solve_conn_curs(conn_or_curs) + + if conn.info.server_version < 90200: + raise ProgrammingError("range types not available in version %s" + % conn.info.server_version) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # Use the correct schema + if '.' in name: + schema, tname = name.split('.', 1) + else: + tname = name + schema = 'public' + + # get the type oid and attributes + curs.execute("""\ +select rngtypid, rngsubtype, typarray +from pg_range r +join pg_type t on t.oid = rngtypid +join pg_namespace ns on ns.oid = typnamespace +where typname = %s and ns.nspname = %s; +""", (tname, schema)) + rec = curs.fetchone() + + if not rec: + # The above algorithm doesn't work for customized seach_path + # (#1487) The implementation below works better, but, to guarantee + # backwards compatibility, use it only if the original one failed. + try: + savepoint = False + # Because we executed statements earlier, we are either INTRANS + # or we are IDLE only if the transaction is autocommit, in + # which case we don't need the savepoint anyway. + if conn.status == STATUS_IN_TRANSACTION: + curs.execute("SAVEPOINT register_type") + savepoint = True + + curs.execute("""\ +SELECT rngtypid, rngsubtype, typarray, typname, nspname +from pg_range r +join pg_type t on t.oid = rngtypid +join pg_namespace ns on ns.oid = typnamespace +WHERE t.oid = %s::regtype +""", (name, )) + except ProgrammingError: + pass + else: + rec = curs.fetchone() + if rec: + tname, schema = rec[3:] + finally: + if savepoint: + curs.execute("ROLLBACK TO SAVEPOINT register_type") + + # revert the status of the connection as before the command + if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit: + conn.rollback() + + if not rec: + raise ProgrammingError( + f"PostgreSQL range '{name}' not found") + + type, subtype, array = rec[:3] + + return RangeCaster(name, pyrange, + oid=type, subtype_oid=subtype, array_oid=array) + + _re_range = re.compile(r""" + ( \(|\[ ) # lower bound flag + (?: # lower bound: + " ( (?: [^"] | "")* ) " # - a quoted string + | ( [^",]+ ) # - or an unquoted string + )? # - or empty (not catched) + , + (?: # upper bound: + " ( (?: [^"] | "")* ) " # - a quoted string + | ( [^"\)\]]+ ) # - or an unquoted string + )? # - or empty (not catched) + ( \)|\] ) # upper bound flag + """, re.VERBOSE) + + _re_undouble = re.compile(r'(["\\])\1') + + def parse(self, s, cur=None): + if s is None: + return None + + if s == 'empty': + return self.range(empty=True) + + m = self._re_range.match(s) + if m is None: + raise InterfaceError(f"failed to parse range: '{s}'") + + lower = m.group(3) + if lower is None: + lower = m.group(2) + if lower is not None: + lower = self._re_undouble.sub(r"\1", lower) + + upper = m.group(5) + if upper is None: + upper = m.group(4) + if upper is not None: + upper = self._re_undouble.sub(r"\1", upper) + + if cur is not None: + lower = cur.cast(self.subtype_oid, lower) + upper = cur.cast(self.subtype_oid, upper) + + bounds = m.group(1) + m.group(6) + + return self.range(lower, upper, bounds) + + def _register(self, scope=None): + register_type(self.typecaster, scope) + if self.array_typecaster is not None: + register_type(self.array_typecaster, scope) + + register_adapter(self.range, self.adapter) + + +class NumericRange(Range): + """A `Range` suitable to pass Python numeric types to a PostgreSQL range. + + PostgreSQL types :sql:`int4range`, :sql:`int8range`, :sql:`numrange` are + casted into `!NumericRange` instances. + """ + pass + + +class DateRange(Range): + """Represents :sql:`daterange` values.""" + pass + + +class DateTimeRange(Range): + """Represents :sql:`tsrange` values.""" + pass + + +class DateTimeTZRange(Range): + """Represents :sql:`tstzrange` values.""" + pass + + +# Special adaptation for NumericRange. Allows to pass number range regardless +# of whether they are ints, floats and what size of ints are, which are +# pointless in Python world. On the way back, no numeric range is casted to +# NumericRange, but only to their subclasses + +class NumberRangeAdapter(RangeAdapter): + """Adapt a range if the subtype doesn't need quotes.""" + def getquoted(self): + r = self.adapted + if r.isempty: + return b"'empty'" + + if not r.lower_inf: + # not exactly: we are relying that none of these object is really + # quoted (they are numbers). Also, I'm lazy and not preparing the + # adapter because I assume encoding doesn't matter for these + # objects. + lower = adapt(r.lower).getquoted().decode('ascii') + else: + lower = '' + + if not r.upper_inf: + upper = adapt(r.upper).getquoted().decode('ascii') + else: + upper = '' + + return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii') + + +# TODO: probably won't work with infs, nans and other tricky cases. +register_adapter(NumericRange, NumberRangeAdapter) + +# Register globally typecasters and adapters for builtin range types. + +# note: the adapter is registered more than once, but this is harmless. +int4range_caster = RangeCaster(NumberRangeAdapter, NumericRange, + oid=3904, subtype_oid=23, array_oid=3905) +int4range_caster._register() + +int8range_caster = RangeCaster(NumberRangeAdapter, NumericRange, + oid=3926, subtype_oid=20, array_oid=3927) +int8range_caster._register() + +numrange_caster = RangeCaster(NumberRangeAdapter, NumericRange, + oid=3906, subtype_oid=1700, array_oid=3907) +numrange_caster._register() + +daterange_caster = RangeCaster('daterange', DateRange, + oid=3912, subtype_oid=1082, array_oid=3913) +daterange_caster._register() + +tsrange_caster = RangeCaster('tsrange', DateTimeRange, + oid=3908, subtype_oid=1114, array_oid=3909) +tsrange_caster._register() + +tstzrange_caster = RangeCaster('tstzrange', DateTimeTZRange, + oid=3910, subtype_oid=1184, array_oid=3911) +tstzrange_caster._register() diff --git a/.venv/lib/python3.12/site-packages/psycopg2/errorcodes.py b/.venv/lib/python3.12/site-packages/psycopg2/errorcodes.py new file mode 100644 index 00000000..aa646c46 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/errorcodes.py @@ -0,0 +1,449 @@ +"""Error codes for PostgreSQL + +This module contains symbolic names for all PostgreSQL error codes. +""" +# psycopg2/errorcodes.py - PostgreSQL error codes +# +# Copyright (C) 2006-2019 Johan Dahlin +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. +# +# Based on: +# +# https://www.postgresql.org/docs/current/static/errcodes-appendix.html +# + + +def lookup(code, _cache={}): + """Lookup an error code or class code and return its symbolic name. + + Raise `KeyError` if the code is not found. + """ + if _cache: + return _cache[code] + + # Generate the lookup map at first usage. + tmp = {} + for k, v in globals().items(): + if isinstance(v, str) and len(v) in (2, 5): + # Strip trailing underscore used to disambiguate duplicate values + tmp[v] = k.rstrip("_") + + assert tmp + + # Atomic update, to avoid race condition on import (bug #382) + _cache.update(tmp) + + return _cache[code] + + +# autogenerated data: do not edit below this point. + +# Error classes +CLASS_SUCCESSFUL_COMPLETION = '00' +CLASS_WARNING = '01' +CLASS_NO_DATA = '02' +CLASS_SQL_STATEMENT_NOT_YET_COMPLETE = '03' +CLASS_CONNECTION_EXCEPTION = '08' +CLASS_TRIGGERED_ACTION_EXCEPTION = '09' +CLASS_FEATURE_NOT_SUPPORTED = '0A' +CLASS_INVALID_TRANSACTION_INITIATION = '0B' +CLASS_LOCATOR_EXCEPTION = '0F' +CLASS_INVALID_GRANTOR = '0L' +CLASS_INVALID_ROLE_SPECIFICATION = '0P' +CLASS_DIAGNOSTICS_EXCEPTION = '0Z' +CLASS_CASE_NOT_FOUND = '20' +CLASS_CARDINALITY_VIOLATION = '21' +CLASS_DATA_EXCEPTION = '22' +CLASS_INTEGRITY_CONSTRAINT_VIOLATION = '23' +CLASS_INVALID_CURSOR_STATE = '24' +CLASS_INVALID_TRANSACTION_STATE = '25' +CLASS_INVALID_SQL_STATEMENT_NAME = '26' +CLASS_TRIGGERED_DATA_CHANGE_VIOLATION = '27' +CLASS_INVALID_AUTHORIZATION_SPECIFICATION = '28' +CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B' +CLASS_INVALID_TRANSACTION_TERMINATION = '2D' +CLASS_SQL_ROUTINE_EXCEPTION = '2F' +CLASS_INVALID_CURSOR_NAME = '34' +CLASS_EXTERNAL_ROUTINE_EXCEPTION = '38' +CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39' +CLASS_SAVEPOINT_EXCEPTION = '3B' +CLASS_INVALID_CATALOG_NAME = '3D' +CLASS_INVALID_SCHEMA_NAME = '3F' +CLASS_TRANSACTION_ROLLBACK = '40' +CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42' +CLASS_WITH_CHECK_OPTION_VIOLATION = '44' +CLASS_INSUFFICIENT_RESOURCES = '53' +CLASS_PROGRAM_LIMIT_EXCEEDED = '54' +CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = '55' +CLASS_OPERATOR_INTERVENTION = '57' +CLASS_SYSTEM_ERROR = '58' +CLASS_SNAPSHOT_FAILURE = '72' +CLASS_CONFIGURATION_FILE_ERROR = 'F0' +CLASS_FOREIGN_DATA_WRAPPER_ERROR = 'HV' +CLASS_PL_PGSQL_ERROR = 'P0' +CLASS_INTERNAL_ERROR = 'XX' + +# Class 00 - Successful Completion +SUCCESSFUL_COMPLETION = '00000' + +# Class 01 - Warning +WARNING = '01000' +NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = '01003' +STRING_DATA_RIGHT_TRUNCATION_ = '01004' +PRIVILEGE_NOT_REVOKED = '01006' +PRIVILEGE_NOT_GRANTED = '01007' +IMPLICIT_ZERO_BIT_PADDING = '01008' +DYNAMIC_RESULT_SETS_RETURNED = '0100C' +DEPRECATED_FEATURE = '01P01' + +# Class 02 - No Data (this is also a warning class per the SQL standard) +NO_DATA = '02000' +NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED = '02001' + +# Class 03 - SQL Statement Not Yet Complete +SQL_STATEMENT_NOT_YET_COMPLETE = '03000' + +# Class 08 - Connection Exception +CONNECTION_EXCEPTION = '08000' +SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION = '08001' +CONNECTION_DOES_NOT_EXIST = '08003' +SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION = '08004' +CONNECTION_FAILURE = '08006' +TRANSACTION_RESOLUTION_UNKNOWN = '08007' +PROTOCOL_VIOLATION = '08P01' + +# Class 09 - Triggered Action Exception +TRIGGERED_ACTION_EXCEPTION = '09000' + +# Class 0A - Feature Not Supported +FEATURE_NOT_SUPPORTED = '0A000' + +# Class 0B - Invalid Transaction Initiation +INVALID_TRANSACTION_INITIATION = '0B000' + +# Class 0F - Locator Exception +LOCATOR_EXCEPTION = '0F000' +INVALID_LOCATOR_SPECIFICATION = '0F001' + +# Class 0L - Invalid Grantor +INVALID_GRANTOR = '0L000' +INVALID_GRANT_OPERATION = '0LP01' + +# Class 0P - Invalid Role Specification +INVALID_ROLE_SPECIFICATION = '0P000' + +# Class 0Z - Diagnostics Exception +DIAGNOSTICS_EXCEPTION = '0Z000' +STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = '0Z002' + +# Class 20 - Case Not Found +CASE_NOT_FOUND = '20000' + +# Class 21 - Cardinality Violation +CARDINALITY_VIOLATION = '21000' + +# Class 22 - Data Exception +DATA_EXCEPTION = '22000' +STRING_DATA_RIGHT_TRUNCATION = '22001' +NULL_VALUE_NO_INDICATOR_PARAMETER = '22002' +NUMERIC_VALUE_OUT_OF_RANGE = '22003' +NULL_VALUE_NOT_ALLOWED_ = '22004' +ERROR_IN_ASSIGNMENT = '22005' +INVALID_DATETIME_FORMAT = '22007' +DATETIME_FIELD_OVERFLOW = '22008' +INVALID_TIME_ZONE_DISPLACEMENT_VALUE = '22009' +ESCAPE_CHARACTER_CONFLICT = '2200B' +INVALID_USE_OF_ESCAPE_CHARACTER = '2200C' +INVALID_ESCAPE_OCTET = '2200D' +ZERO_LENGTH_CHARACTER_STRING = '2200F' +MOST_SPECIFIC_TYPE_MISMATCH = '2200G' +SEQUENCE_GENERATOR_LIMIT_EXCEEDED = '2200H' +NOT_AN_XML_DOCUMENT = '2200L' +INVALID_XML_DOCUMENT = '2200M' +INVALID_XML_CONTENT = '2200N' +INVALID_XML_COMMENT = '2200S' +INVALID_XML_PROCESSING_INSTRUCTION = '2200T' +INVALID_INDICATOR_PARAMETER_VALUE = '22010' +SUBSTRING_ERROR = '22011' +DIVISION_BY_ZERO = '22012' +INVALID_PRECEDING_OR_FOLLOWING_SIZE = '22013' +INVALID_ARGUMENT_FOR_NTILE_FUNCTION = '22014' +INTERVAL_FIELD_OVERFLOW = '22015' +INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = '22016' +INVALID_CHARACTER_VALUE_FOR_CAST = '22018' +INVALID_ESCAPE_CHARACTER = '22019' +INVALID_REGULAR_EXPRESSION = '2201B' +INVALID_ARGUMENT_FOR_LOGARITHM = '2201E' +INVALID_ARGUMENT_FOR_POWER_FUNCTION = '2201F' +INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION = '2201G' +INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = '2201W' +INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = '2201X' +INVALID_LIMIT_VALUE = '22020' +CHARACTER_NOT_IN_REPERTOIRE = '22021' +INDICATOR_OVERFLOW = '22022' +INVALID_PARAMETER_VALUE = '22023' +UNTERMINATED_C_STRING = '22024' +INVALID_ESCAPE_SEQUENCE = '22025' +STRING_DATA_LENGTH_MISMATCH = '22026' +TRIM_ERROR = '22027' +ARRAY_SUBSCRIPT_ERROR = '2202E' +INVALID_TABLESAMPLE_REPEAT = '2202G' +INVALID_TABLESAMPLE_ARGUMENT = '2202H' +DUPLICATE_JSON_OBJECT_KEY_VALUE = '22030' +INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION = '22031' +INVALID_JSON_TEXT = '22032' +INVALID_SQL_JSON_SUBSCRIPT = '22033' +MORE_THAN_ONE_SQL_JSON_ITEM = '22034' +NO_SQL_JSON_ITEM = '22035' +NON_NUMERIC_SQL_JSON_ITEM = '22036' +NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = '22037' +SINGLETON_SQL_JSON_ITEM_REQUIRED = '22038' +SQL_JSON_ARRAY_NOT_FOUND = '22039' +SQL_JSON_MEMBER_NOT_FOUND = '2203A' +SQL_JSON_NUMBER_NOT_FOUND = '2203B' +SQL_JSON_OBJECT_NOT_FOUND = '2203C' +TOO_MANY_JSON_ARRAY_ELEMENTS = '2203D' +TOO_MANY_JSON_OBJECT_MEMBERS = '2203E' +SQL_JSON_SCALAR_REQUIRED = '2203F' +SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE = '2203G' +FLOATING_POINT_EXCEPTION = '22P01' +INVALID_TEXT_REPRESENTATION = '22P02' +INVALID_BINARY_REPRESENTATION = '22P03' +BAD_COPY_FILE_FORMAT = '22P04' +UNTRANSLATABLE_CHARACTER = '22P05' +NONSTANDARD_USE_OF_ESCAPE_CHARACTER = '22P06' + +# Class 23 - Integrity Constraint Violation +INTEGRITY_CONSTRAINT_VIOLATION = '23000' +RESTRICT_VIOLATION = '23001' +NOT_NULL_VIOLATION = '23502' +FOREIGN_KEY_VIOLATION = '23503' +UNIQUE_VIOLATION = '23505' +CHECK_VIOLATION = '23514' +EXCLUSION_VIOLATION = '23P01' + +# Class 24 - Invalid Cursor State +INVALID_CURSOR_STATE = '24000' + +# Class 25 - Invalid Transaction State +INVALID_TRANSACTION_STATE = '25000' +ACTIVE_SQL_TRANSACTION = '25001' +BRANCH_TRANSACTION_ALREADY_ACTIVE = '25002' +INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION = '25003' +INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION = '25004' +NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION = '25005' +READ_ONLY_SQL_TRANSACTION = '25006' +SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = '25007' +HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008' +NO_ACTIVE_SQL_TRANSACTION = '25P01' +IN_FAILED_SQL_TRANSACTION = '25P02' +IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03' + +# Class 26 - Invalid SQL Statement Name +INVALID_SQL_STATEMENT_NAME = '26000' + +# Class 27 - Triggered Data Change Violation +TRIGGERED_DATA_CHANGE_VIOLATION = '27000' + +# Class 28 - Invalid Authorization Specification +INVALID_AUTHORIZATION_SPECIFICATION = '28000' +INVALID_PASSWORD = '28P01' + +# Class 2B - Dependent Privilege Descriptors Still Exist +DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B000' +DEPENDENT_OBJECTS_STILL_EXIST = '2BP01' + +# Class 2D - Invalid Transaction Termination +INVALID_TRANSACTION_TERMINATION = '2D000' + +# Class 2F - SQL Routine Exception +SQL_ROUTINE_EXCEPTION = '2F000' +MODIFYING_SQL_DATA_NOT_PERMITTED_ = '2F002' +PROHIBITED_SQL_STATEMENT_ATTEMPTED_ = '2F003' +READING_SQL_DATA_NOT_PERMITTED_ = '2F004' +FUNCTION_EXECUTED_NO_RETURN_STATEMENT = '2F005' + +# Class 34 - Invalid Cursor Name +INVALID_CURSOR_NAME = '34000' + +# Class 38 - External Routine Exception +EXTERNAL_ROUTINE_EXCEPTION = '38000' +CONTAINING_SQL_NOT_PERMITTED = '38001' +MODIFYING_SQL_DATA_NOT_PERMITTED = '38002' +PROHIBITED_SQL_STATEMENT_ATTEMPTED = '38003' +READING_SQL_DATA_NOT_PERMITTED = '38004' + +# Class 39 - External Routine Invocation Exception +EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39000' +INVALID_SQLSTATE_RETURNED = '39001' +NULL_VALUE_NOT_ALLOWED = '39004' +TRIGGER_PROTOCOL_VIOLATED = '39P01' +SRF_PROTOCOL_VIOLATED = '39P02' +EVENT_TRIGGER_PROTOCOL_VIOLATED = '39P03' + +# Class 3B - Savepoint Exception +SAVEPOINT_EXCEPTION = '3B000' +INVALID_SAVEPOINT_SPECIFICATION = '3B001' + +# Class 3D - Invalid Catalog Name +INVALID_CATALOG_NAME = '3D000' + +# Class 3F - Invalid Schema Name +INVALID_SCHEMA_NAME = '3F000' + +# Class 40 - Transaction Rollback +TRANSACTION_ROLLBACK = '40000' +SERIALIZATION_FAILURE = '40001' +TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION = '40002' +STATEMENT_COMPLETION_UNKNOWN = '40003' +DEADLOCK_DETECTED = '40P01' + +# Class 42 - Syntax Error or Access Rule Violation +SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42000' +INSUFFICIENT_PRIVILEGE = '42501' +SYNTAX_ERROR = '42601' +INVALID_NAME = '42602' +INVALID_COLUMN_DEFINITION = '42611' +NAME_TOO_LONG = '42622' +DUPLICATE_COLUMN = '42701' +AMBIGUOUS_COLUMN = '42702' +UNDEFINED_COLUMN = '42703' +UNDEFINED_OBJECT = '42704' +DUPLICATE_OBJECT = '42710' +DUPLICATE_ALIAS = '42712' +DUPLICATE_FUNCTION = '42723' +AMBIGUOUS_FUNCTION = '42725' +GROUPING_ERROR = '42803' +DATATYPE_MISMATCH = '42804' +WRONG_OBJECT_TYPE = '42809' +INVALID_FOREIGN_KEY = '42830' +CANNOT_COERCE = '42846' +UNDEFINED_FUNCTION = '42883' +GENERATED_ALWAYS = '428C9' +RESERVED_NAME = '42939' +UNDEFINED_TABLE = '42P01' +UNDEFINED_PARAMETER = '42P02' +DUPLICATE_CURSOR = '42P03' +DUPLICATE_DATABASE = '42P04' +DUPLICATE_PREPARED_STATEMENT = '42P05' +DUPLICATE_SCHEMA = '42P06' +DUPLICATE_TABLE = '42P07' +AMBIGUOUS_PARAMETER = '42P08' +AMBIGUOUS_ALIAS = '42P09' +INVALID_COLUMN_REFERENCE = '42P10' +INVALID_CURSOR_DEFINITION = '42P11' +INVALID_DATABASE_DEFINITION = '42P12' +INVALID_FUNCTION_DEFINITION = '42P13' +INVALID_PREPARED_STATEMENT_DEFINITION = '42P14' +INVALID_SCHEMA_DEFINITION = '42P15' +INVALID_TABLE_DEFINITION = '42P16' +INVALID_OBJECT_DEFINITION = '42P17' +INDETERMINATE_DATATYPE = '42P18' +INVALID_RECURSION = '42P19' +WINDOWING_ERROR = '42P20' +COLLATION_MISMATCH = '42P21' +INDETERMINATE_COLLATION = '42P22' + +# Class 44 - WITH CHECK OPTION Violation +WITH_CHECK_OPTION_VIOLATION = '44000' + +# Class 53 - Insufficient Resources +INSUFFICIENT_RESOURCES = '53000' +DISK_FULL = '53100' +OUT_OF_MEMORY = '53200' +TOO_MANY_CONNECTIONS = '53300' +CONFIGURATION_LIMIT_EXCEEDED = '53400' + +# Class 54 - Program Limit Exceeded +PROGRAM_LIMIT_EXCEEDED = '54000' +STATEMENT_TOO_COMPLEX = '54001' +TOO_MANY_COLUMNS = '54011' +TOO_MANY_ARGUMENTS = '54023' + +# Class 55 - Object Not In Prerequisite State +OBJECT_NOT_IN_PREREQUISITE_STATE = '55000' +OBJECT_IN_USE = '55006' +CANT_CHANGE_RUNTIME_PARAM = '55P02' +LOCK_NOT_AVAILABLE = '55P03' +UNSAFE_NEW_ENUM_VALUE_USAGE = '55P04' + +# Class 57 - Operator Intervention +OPERATOR_INTERVENTION = '57000' +QUERY_CANCELED = '57014' +ADMIN_SHUTDOWN = '57P01' +CRASH_SHUTDOWN = '57P02' +CANNOT_CONNECT_NOW = '57P03' +DATABASE_DROPPED = '57P04' +IDLE_SESSION_TIMEOUT = '57P05' + +# Class 58 - System Error (errors external to PostgreSQL itself) +SYSTEM_ERROR = '58000' +IO_ERROR = '58030' +UNDEFINED_FILE = '58P01' +DUPLICATE_FILE = '58P02' + +# Class 72 - Snapshot Failure +SNAPSHOT_TOO_OLD = '72000' + +# Class F0 - Configuration File Error +CONFIG_FILE_ERROR = 'F0000' +LOCK_FILE_EXISTS = 'F0001' + +# Class HV - Foreign Data Wrapper Error (SQL/MED) +FDW_ERROR = 'HV000' +FDW_OUT_OF_MEMORY = 'HV001' +FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = 'HV002' +FDW_INVALID_DATA_TYPE = 'HV004' +FDW_COLUMN_NAME_NOT_FOUND = 'HV005' +FDW_INVALID_DATA_TYPE_DESCRIPTORS = 'HV006' +FDW_INVALID_COLUMN_NAME = 'HV007' +FDW_INVALID_COLUMN_NUMBER = 'HV008' +FDW_INVALID_USE_OF_NULL_POINTER = 'HV009' +FDW_INVALID_STRING_FORMAT = 'HV00A' +FDW_INVALID_HANDLE = 'HV00B' +FDW_INVALID_OPTION_INDEX = 'HV00C' +FDW_INVALID_OPTION_NAME = 'HV00D' +FDW_OPTION_NAME_NOT_FOUND = 'HV00J' +FDW_REPLY_HANDLE = 'HV00K' +FDW_UNABLE_TO_CREATE_EXECUTION = 'HV00L' +FDW_UNABLE_TO_CREATE_REPLY = 'HV00M' +FDW_UNABLE_TO_ESTABLISH_CONNECTION = 'HV00N' +FDW_NO_SCHEMAS = 'HV00P' +FDW_SCHEMA_NOT_FOUND = 'HV00Q' +FDW_TABLE_NOT_FOUND = 'HV00R' +FDW_FUNCTION_SEQUENCE_ERROR = 'HV010' +FDW_TOO_MANY_HANDLES = 'HV014' +FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = 'HV021' +FDW_INVALID_ATTRIBUTE_VALUE = 'HV024' +FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = 'HV090' +FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = 'HV091' + +# Class P0 - PL/pgSQL Error +PLPGSQL_ERROR = 'P0000' +RAISE_EXCEPTION = 'P0001' +NO_DATA_FOUND = 'P0002' +TOO_MANY_ROWS = 'P0003' +ASSERT_FAILURE = 'P0004' + +# Class XX - Internal Error +INTERNAL_ERROR = 'XX000' +DATA_CORRUPTED = 'XX001' +INDEX_CORRUPTED = 'XX002' diff --git a/.venv/lib/python3.12/site-packages/psycopg2/errors.py b/.venv/lib/python3.12/site-packages/psycopg2/errors.py new file mode 100644 index 00000000..e4e47f5b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/errors.py @@ -0,0 +1,38 @@ +"""Error classes for PostgreSQL error codes +""" + +# psycopg/errors.py - SQLSTATE and DB-API exceptions +# +# Copyright (C) 2018-2019 Daniele Varrazzo +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# +# NOTE: the exceptions are injected into this module by the C extention. +# + + +def lookup(code): + """Lookup an error code and return its exception class. + + Raise `!KeyError` if the code is not found. + """ + from psycopg2._psycopg import sqlstate_errors # avoid circular import + return sqlstate_errors[code] diff --git a/.venv/lib/python3.12/site-packages/psycopg2/extensions.py b/.venv/lib/python3.12/site-packages/psycopg2/extensions.py new file mode 100644 index 00000000..b938d0ce --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/extensions.py @@ -0,0 +1,213 @@ +"""psycopg extensions to the DBAPI-2.0 + +This module holds all the extensions to the DBAPI-2.0 provided by psycopg. + +- `connection` -- the new-type inheritable connection class +- `cursor` -- the new-type inheritable cursor class +- `lobject` -- the new-type inheritable large object class +- `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used + by psycopg to adapt Python types to PostgreSQL ones + +.. _PEP-246: https://www.python.org/dev/peps/pep-0246/ +""" +# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg +# +# Copyright (C) 2003-2019 Federico Di Gregorio +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import re as _re + +from psycopg2._psycopg import ( # noqa + BINARYARRAY, BOOLEAN, BOOLEANARRAY, BYTES, BYTESARRAY, DATE, DATEARRAY, + DATETIMEARRAY, DECIMAL, DECIMALARRAY, FLOAT, FLOATARRAY, INTEGER, + INTEGERARRAY, INTERVAL, INTERVALARRAY, LONGINTEGER, LONGINTEGERARRAY, + ROWIDARRAY, STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY, + AsIs, Binary, Boolean, Float, Int, QuotedString, ) + +from psycopg2._psycopg import ( # noqa + PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY, + PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY, + DateFromPy, TimeFromPy, TimestampFromPy, IntervalFromPy, ) + +from psycopg2._psycopg import ( # noqa + adapt, adapters, encodings, connection, cursor, + lobject, Xid, libpq_version, parse_dsn, quote_ident, + string_types, binary_types, new_type, new_array_type, register_type, + ISQLQuote, Notify, Diagnostics, Column, ConnectionInfo, + QueryCanceledError, TransactionRollbackError, + set_wait_callback, get_wait_callback, encrypt_password, ) + + +"""Isolation level values.""" +ISOLATION_LEVEL_AUTOCOMMIT = 0 +ISOLATION_LEVEL_READ_UNCOMMITTED = 4 +ISOLATION_LEVEL_READ_COMMITTED = 1 +ISOLATION_LEVEL_REPEATABLE_READ = 2 +ISOLATION_LEVEL_SERIALIZABLE = 3 +ISOLATION_LEVEL_DEFAULT = None + + +"""psycopg connection status values.""" +STATUS_SETUP = 0 +STATUS_READY = 1 +STATUS_BEGIN = 2 +STATUS_SYNC = 3 # currently unused +STATUS_ASYNC = 4 # currently unused +STATUS_PREPARED = 5 + +# This is a useful mnemonic to check if the connection is in a transaction +STATUS_IN_TRANSACTION = STATUS_BEGIN + + +"""psycopg asynchronous connection polling values""" +POLL_OK = 0 +POLL_READ = 1 +POLL_WRITE = 2 +POLL_ERROR = 3 + + +"""Backend transaction status values.""" +TRANSACTION_STATUS_IDLE = 0 +TRANSACTION_STATUS_ACTIVE = 1 +TRANSACTION_STATUS_INTRANS = 2 +TRANSACTION_STATUS_INERROR = 3 +TRANSACTION_STATUS_UNKNOWN = 4 + + +def register_adapter(typ, callable): + """Register 'callable' as an ISQLQuote adapter for type 'typ'.""" + adapters[(typ, ISQLQuote)] = callable + + +# The SQL_IN class is the official adapter for tuples starting from 2.0.6. +class SQL_IN: + """Adapt any iterable to an SQL quotable object.""" + def __init__(self, seq): + self._seq = seq + self._conn = None + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + # this is the important line: note how every object in the + # list is adapted and then how getquoted() is called on it + pobjs = [adapt(o) for o in self._seq] + if self._conn is not None: + for obj in pobjs: + if hasattr(obj, 'prepare'): + obj.prepare(self._conn) + qobjs = [o.getquoted() for o in pobjs] + return b'(' + b', '.join(qobjs) + b')' + + def __str__(self): + return str(self.getquoted()) + + +class NoneAdapter: + """Adapt None to NULL. + + This adapter is not used normally as a fast path in mogrify uses NULL, + but it makes easier to adapt composite types. + """ + def __init__(self, obj): + pass + + def getquoted(self, _null=b"NULL"): + return _null + + +def make_dsn(dsn=None, **kwargs): + """Convert a set of keywords into a connection strings.""" + if dsn is None and not kwargs: + return '' + + # If no kwarg is specified don't mung the dsn, but verify it + if not kwargs: + parse_dsn(dsn) + return dsn + + # Override the dsn with the parameters + if 'database' in kwargs: + if 'dbname' in kwargs: + raise TypeError( + "you can't specify both 'database' and 'dbname' arguments") + kwargs['dbname'] = kwargs.pop('database') + + # Drop the None arguments + kwargs = {k: v for (k, v) in kwargs.items() if v is not None} + + if dsn is not None: + tmp = parse_dsn(dsn) + tmp.update(kwargs) + kwargs = tmp + + dsn = " ".join(["{}={}".format(k, _param_escape(str(v))) + for (k, v) in kwargs.items()]) + + # verify that the returned dsn is valid + parse_dsn(dsn) + + return dsn + + +def _param_escape(s, + re_escape=_re.compile(r"([\\'])"), + re_space=_re.compile(r'\s')): + """ + Apply the escaping rule required by PQconnectdb + """ + if not s: + return "''" + + s = re_escape.sub(r'\\\1', s) + if re_space.search(s): + s = "'" + s + "'" + + return s + + +# Create default json typecasters for PostgreSQL 9.2 oids +from psycopg2._json import register_default_json, register_default_jsonb # noqa + +try: + JSON, JSONARRAY = register_default_json() + JSONB, JSONBARRAY = register_default_jsonb() +except ImportError: + pass + +del register_default_json, register_default_jsonb + + +# Create default Range typecasters +from psycopg2. _range import Range # noqa +del Range + + +# Add the "cleaned" version of the encodings to the key. +# When the encoding is set its name is cleaned up from - and _ and turned +# uppercase, so an encoding not respecting these rules wouldn't be found in the +# encodings keys and would raise an exception with the unicode typecaster +for k, v in list(encodings.items()): + k = k.replace('_', '').replace('-', '').upper() + encodings[k] = v + +del k, v diff --git a/.venv/lib/python3.12/site-packages/psycopg2/extras.py b/.venv/lib/python3.12/site-packages/psycopg2/extras.py new file mode 100644 index 00000000..36e8ef9a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/extras.py @@ -0,0 +1,1340 @@ +"""Miscellaneous goodies for psycopg2 + +This module is a generic place used to hold little helper functions +and classes until a better place in the distribution is found. +""" +# psycopg/extras.py - miscellaneous extra goodies for psycopg +# +# Copyright (C) 2003-2019 Federico Di Gregorio +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import os as _os +import time as _time +import re as _re +from collections import namedtuple, OrderedDict + +import logging as _logging + +import psycopg2 +from psycopg2 import extensions as _ext +from .extensions import cursor as _cursor +from .extensions import connection as _connection +from .extensions import adapt as _A, quote_ident +from functools import lru_cache + +from psycopg2._psycopg import ( # noqa + REPLICATION_PHYSICAL, REPLICATION_LOGICAL, + ReplicationConnection as _replicationConnection, + ReplicationCursor as _replicationCursor, + ReplicationMessage) + + +# expose the json adaptation stuff into the module +from psycopg2._json import ( # noqa + json, Json, register_json, register_default_json, register_default_jsonb) + + +# Expose range-related objects +from psycopg2._range import ( # noqa + Range, NumericRange, DateRange, DateTimeRange, DateTimeTZRange, + register_range, RangeAdapter, RangeCaster) + + +# Expose ipaddress-related objects +from psycopg2._ipaddress import register_ipaddress # noqa + + +class DictCursorBase(_cursor): + """Base class for all dict-like cursors.""" + + def __init__(self, *args, **kwargs): + if 'row_factory' in kwargs: + row_factory = kwargs['row_factory'] + del kwargs['row_factory'] + else: + raise NotImplementedError( + "DictCursorBase can't be instantiated without a row factory.") + super().__init__(*args, **kwargs) + self._query_executed = False + self._prefetch = False + self.row_factory = row_factory + + def fetchone(self): + if self._prefetch: + res = super().fetchone() + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super().fetchone() + return res + + def fetchmany(self, size=None): + if self._prefetch: + res = super().fetchmany(size) + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super().fetchmany(size) + return res + + def fetchall(self): + if self._prefetch: + res = super().fetchall() + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super().fetchall() + return res + + def __iter__(self): + try: + if self._prefetch: + res = super().__iter__() + first = next(res) + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super().__iter__() + first = next(res) + + yield first + while True: + yield next(res) + except StopIteration: + return + + +class DictConnection(_connection): + """A connection that uses `DictCursor` automatically.""" + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor) + return super().cursor(*args, **kwargs) + + +class DictCursor(DictCursorBase): + """A cursor that keeps a list of column name -> index mappings__. + + .. __: https://docs.python.org/glossary.html#term-mapping + """ + + def __init__(self, *args, **kwargs): + kwargs['row_factory'] = DictRow + super().__init__(*args, **kwargs) + self._prefetch = True + + def execute(self, query, vars=None): + self.index = OrderedDict() + self._query_executed = True + return super().execute(query, vars) + + def callproc(self, procname, vars=None): + self.index = OrderedDict() + self._query_executed = True + return super().callproc(procname, vars) + + def _build_index(self): + if self._query_executed and self.description: + for i in range(len(self.description)): + self.index[self.description[i][0]] = i + self._query_executed = False + + +class DictRow(list): + """A row object that allow by-column-name access to data.""" + + __slots__ = ('_index',) + + def __init__(self, cursor): + self._index = cursor.index + self[:] = [None] * len(cursor.description) + + def __getitem__(self, x): + if not isinstance(x, (int, slice)): + x = self._index[x] + return super().__getitem__(x) + + def __setitem__(self, x, v): + if not isinstance(x, (int, slice)): + x = self._index[x] + super().__setitem__(x, v) + + def items(self): + g = super().__getitem__ + return ((n, g(self._index[n])) for n in self._index) + + def keys(self): + return iter(self._index) + + def values(self): + g = super().__getitem__ + return (g(self._index[n]) for n in self._index) + + def get(self, x, default=None): + try: + return self[x] + except Exception: + return default + + def copy(self): + return OrderedDict(self.items()) + + def __contains__(self, x): + return x in self._index + + def __reduce__(self): + # this is apparently useless, but it fixes #1073 + return super().__reduce__() + + def __getstate__(self): + return self[:], self._index.copy() + + def __setstate__(self, data): + self[:] = data[0] + self._index = data[1] + + +class RealDictConnection(_connection): + """A connection that uses `RealDictCursor` automatically.""" + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor) + return super().cursor(*args, **kwargs) + + +class RealDictCursor(DictCursorBase): + """A cursor that uses a real dict as the base type for rows. + + Note that this cursor is extremely specialized and does not allow + the normal access (using integer indices) to fetched data. If you need + to access database rows both as a dictionary and a list, then use + the generic `DictCursor` instead of `!RealDictCursor`. + """ + def __init__(self, *args, **kwargs): + kwargs['row_factory'] = RealDictRow + super().__init__(*args, **kwargs) + + def execute(self, query, vars=None): + self.column_mapping = [] + self._query_executed = True + return super().execute(query, vars) + + def callproc(self, procname, vars=None): + self.column_mapping = [] + self._query_executed = True + return super().callproc(procname, vars) + + def _build_index(self): + if self._query_executed and self.description: + self.column_mapping = [d[0] for d in self.description] + self._query_executed = False + + +class RealDictRow(OrderedDict): + """A `!dict` subclass representing a data record.""" + + def __init__(self, *args, **kwargs): + if args and isinstance(args[0], _cursor): + cursor = args[0] + args = args[1:] + else: + cursor = None + + super().__init__(*args, **kwargs) + + if cursor is not None: + # Required for named cursors + if cursor.description and not cursor.column_mapping: + cursor._build_index() + + # Store the cols mapping in the dict itself until the row is fully + # populated, so we don't need to add attributes to the class + # (hence keeping its maintenance, special pickle support, etc.) + self[RealDictRow] = cursor.column_mapping + + def __setitem__(self, key, value): + if RealDictRow in self: + # We are in the row building phase + mapping = self[RealDictRow] + super().__setitem__(mapping[key], value) + if key == len(mapping) - 1: + # Row building finished + del self[RealDictRow] + return + + super().__setitem__(key, value) + + +class NamedTupleConnection(_connection): + """A connection that uses `NamedTupleCursor` automatically.""" + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor) + return super().cursor(*args, **kwargs) + + +class NamedTupleCursor(_cursor): + """A cursor that generates results as `~collections.namedtuple`. + + `!fetch*()` methods will return named tuples instead of regular tuples, so + their elements can be accessed both as regular numeric items as well as + attributes. + + >>> nt_cur = conn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor) + >>> rec = nt_cur.fetchone() + >>> rec + Record(id=1, num=100, data="abc'def") + >>> rec[1] + 100 + >>> rec.data + "abc'def" + """ + Record = None + MAX_CACHE = 1024 + + def execute(self, query, vars=None): + self.Record = None + return super().execute(query, vars) + + def executemany(self, query, vars): + self.Record = None + return super().executemany(query, vars) + + def callproc(self, procname, vars=None): + self.Record = None + return super().callproc(procname, vars) + + def fetchone(self): + t = super().fetchone() + if t is not None: + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + return nt._make(t) + + def fetchmany(self, size=None): + ts = super().fetchmany(size) + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + return list(map(nt._make, ts)) + + def fetchall(self): + ts = super().fetchall() + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + return list(map(nt._make, ts)) + + def __iter__(self): + try: + it = super().__iter__() + t = next(it) + + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + + yield nt._make(t) + + while True: + yield nt._make(next(it)) + except StopIteration: + return + + def _make_nt(self): + key = tuple(d[0] for d in self.description) if self.description else () + return self._cached_make_nt(key) + + @classmethod + def _do_make_nt(cls, key): + fields = [] + for s in key: + s = _re_clean.sub('_', s) + # Python identifier cannot start with numbers, namedtuple fields + # cannot start with underscore. So... + if s[0] == '_' or '0' <= s[0] <= '9': + s = 'f' + s + fields.append(s) + + nt = namedtuple("Record", fields) + return nt + + +@lru_cache(512) +def _cached_make_nt(cls, key): + return cls._do_make_nt(key) + + +# Exposed for testability, and if someone wants to monkeypatch to tweak +# the cache size. +NamedTupleCursor._cached_make_nt = classmethod(_cached_make_nt) + + +class LoggingConnection(_connection): + """A connection that logs all queries to a file or logger__ object. + + .. __: https://docs.python.org/library/logging.html + """ + + def initialize(self, logobj): + """Initialize the connection to log to `!logobj`. + + The `!logobj` parameter can be an open file object or a Logger/LoggerAdapter + instance from the standard logging module. + """ + self._logobj = logobj + if _logging and isinstance( + logobj, (_logging.Logger, _logging.LoggerAdapter)): + self.log = self._logtologger + else: + self.log = self._logtofile + + def filter(self, msg, curs): + """Filter the query before logging it. + + This is the method to overwrite to filter unwanted queries out of the + log or to add some extra data to the output. The default implementation + just does nothing. + """ + return msg + + def _logtofile(self, msg, curs): + msg = self.filter(msg, curs) + if msg: + if isinstance(msg, bytes): + msg = msg.decode(_ext.encodings[self.encoding], 'replace') + self._logobj.write(msg + _os.linesep) + + def _logtologger(self, msg, curs): + msg = self.filter(msg, curs) + if msg: + self._logobj.debug(msg) + + def _check(self): + if not hasattr(self, '_logobj'): + raise self.ProgrammingError( + "LoggingConnection object has not been initialize()d") + + def cursor(self, *args, **kwargs): + self._check() + kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor) + return super().cursor(*args, **kwargs) + + +class LoggingCursor(_cursor): + """A cursor that logs queries using its connection logging facilities.""" + + def execute(self, query, vars=None): + try: + return super().execute(query, vars) + finally: + self.connection.log(self.query, self) + + def callproc(self, procname, vars=None): + try: + return super().callproc(procname, vars) + finally: + self.connection.log(self.query, self) + + +class MinTimeLoggingConnection(LoggingConnection): + """A connection that logs queries based on execution time. + + This is just an example of how to sub-class `LoggingConnection` to + provide some extra filtering for the logged queries. Both the + `initialize()` and `filter()` methods are overwritten to make sure + that only queries executing for more than ``mintime`` ms are logged. + + Note that this connection uses the specialized cursor + `MinTimeLoggingCursor`. + """ + def initialize(self, logobj, mintime=0): + LoggingConnection.initialize(self, logobj) + self._mintime = mintime + + def filter(self, msg, curs): + t = (_time.time() - curs.timestamp) * 1000 + if t > self._mintime: + if isinstance(msg, bytes): + msg = msg.decode(_ext.encodings[self.encoding], 'replace') + return f"{msg}{_os.linesep} (execution time: {t} ms)" + + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', + self.cursor_factory or MinTimeLoggingCursor) + return LoggingConnection.cursor(self, *args, **kwargs) + + +class MinTimeLoggingCursor(LoggingCursor): + """The cursor sub-class companion to `MinTimeLoggingConnection`.""" + + def execute(self, query, vars=None): + self.timestamp = _time.time() + return LoggingCursor.execute(self, query, vars) + + def callproc(self, procname, vars=None): + self.timestamp = _time.time() + return LoggingCursor.callproc(self, procname, vars) + + +class LogicalReplicationConnection(_replicationConnection): + + def __init__(self, *args, **kwargs): + kwargs['replication_type'] = REPLICATION_LOGICAL + super().__init__(*args, **kwargs) + + +class PhysicalReplicationConnection(_replicationConnection): + + def __init__(self, *args, **kwargs): + kwargs['replication_type'] = REPLICATION_PHYSICAL + super().__init__(*args, **kwargs) + + +class StopReplication(Exception): + """ + Exception used to break out of the endless loop in + `~ReplicationCursor.consume_stream()`. + + Subclass of `~exceptions.Exception`. Intentionally *not* inherited from + `~psycopg2.Error` as occurrence of this exception does not indicate an + error. + """ + pass + + +class ReplicationCursor(_replicationCursor): + """A cursor used for communication on replication connections.""" + + def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None): + """Create streaming replication slot.""" + + command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} " + + if slot_type is None: + slot_type = self.connection.replication_type + + if slot_type == REPLICATION_LOGICAL: + if output_plugin is None: + raise psycopg2.ProgrammingError( + "output plugin name is required to create " + "logical replication slot") + + command += f"LOGICAL {quote_ident(output_plugin, self)}" + + elif slot_type == REPLICATION_PHYSICAL: + if output_plugin is not None: + raise psycopg2.ProgrammingError( + "cannot specify output plugin name when creating " + "physical replication slot") + + command += "PHYSICAL" + + else: + raise psycopg2.ProgrammingError( + f"unrecognized replication type: {repr(slot_type)}") + + self.execute(command) + + def drop_replication_slot(self, slot_name): + """Drop streaming replication slot.""" + + command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}" + self.execute(command) + + def start_replication( + self, slot_name=None, slot_type=None, start_lsn=0, + timeline=0, options=None, decode=False, status_interval=10): + """Start replication stream.""" + + command = "START_REPLICATION " + + if slot_type is None: + slot_type = self.connection.replication_type + + if slot_type == REPLICATION_LOGICAL: + if slot_name: + command += f"SLOT {quote_ident(slot_name, self)} " + else: + raise psycopg2.ProgrammingError( + "slot name is required for logical replication") + + command += "LOGICAL " + + elif slot_type == REPLICATION_PHYSICAL: + if slot_name: + command += f"SLOT {quote_ident(slot_name, self)} " + # don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX + + else: + raise psycopg2.ProgrammingError( + f"unrecognized replication type: {repr(slot_type)}") + + if type(start_lsn) is str: + lsn = start_lsn.split('/') + lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}" + else: + lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}" + + command += lsn + + if timeline != 0: + if slot_type == REPLICATION_LOGICAL: + raise psycopg2.ProgrammingError( + "cannot specify timeline for logical replication") + + command += f" TIMELINE {timeline}" + + if options: + if slot_type == REPLICATION_PHYSICAL: + raise psycopg2.ProgrammingError( + "cannot specify output plugin options for physical replication") + + command += " (" + for k, v in options.items(): + if not command.endswith('('): + command += ", " + command += f"{quote_ident(k, self)} {_A(str(v))}" + command += ")" + + self.start_replication_expert( + command, decode=decode, status_interval=status_interval) + + # allows replication cursors to be used in select.select() directly + def fileno(self): + return self.connection.fileno() + + +# a dbtype and adapter for Python UUID type + +class UUID_adapter: + """Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__. + + .. __: https://docs.python.org/library/uuid.html + .. __: https://www.postgresql.org/docs/current/static/datatype-uuid.html + """ + + def __init__(self, uuid): + self._uuid = uuid + + def __conform__(self, proto): + if proto is _ext.ISQLQuote: + return self + + def getquoted(self): + return (f"'{self._uuid}'::uuid").encode('utf8') + + def __str__(self): + return f"'{self._uuid}'::uuid" + + +def register_uuid(oids=None, conn_or_curs=None): + """Create the UUID type and an uuid.UUID adapter. + + :param oids: oid for the PostgreSQL :sql:`uuid` type, or 2-items sequence + with oids of the type and the array. If not specified, use PostgreSQL + standard oids. + :param conn_or_curs: where to register the typecaster. If not specified, + register it globally. + """ + + import uuid + + if not oids: + oid1 = 2950 + oid2 = 2951 + elif isinstance(oids, (list, tuple)): + oid1, oid2 = oids + else: + oid1 = oids + oid2 = 2951 + + _ext.UUID = _ext.new_type((oid1, ), "UUID", + lambda data, cursor: data and uuid.UUID(data) or None) + _ext.UUIDARRAY = _ext.new_array_type((oid2,), "UUID[]", _ext.UUID) + + _ext.register_type(_ext.UUID, conn_or_curs) + _ext.register_type(_ext.UUIDARRAY, conn_or_curs) + _ext.register_adapter(uuid.UUID, UUID_adapter) + + return _ext.UUID + + +# a type, dbtype and adapter for PostgreSQL inet type + +class Inet: + """Wrap a string to allow for correct SQL-quoting of inet values. + + Note that this adapter does NOT check the passed value to make + sure it really is an inet-compatible address but DOES call adapt() + on it to make sure it is impossible to execute an SQL-injection + by passing an evil value to the initializer. + """ + def __init__(self, addr): + self.addr = addr + + def __repr__(self): + return f"{self.__class__.__name__}({self.addr!r})" + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + obj = _A(self.addr) + if hasattr(obj, 'prepare'): + obj.prepare(self._conn) + return obj.getquoted() + b"::inet" + + def __conform__(self, proto): + if proto is _ext.ISQLQuote: + return self + + def __str__(self): + return str(self.addr) + + +def register_inet(oid=None, conn_or_curs=None): + """Create the INET type and an Inet adapter. + + :param oid: oid for the PostgreSQL :sql:`inet` type, or 2-items sequence + with oids of the type and the array. If not specified, use PostgreSQL + standard oids. + :param conn_or_curs: where to register the typecaster. If not specified, + register it globally. + """ + import warnings + warnings.warn( + "the inet adapter is deprecated, it's not very useful", + DeprecationWarning) + + if not oid: + oid1 = 869 + oid2 = 1041 + elif isinstance(oid, (list, tuple)): + oid1, oid2 = oid + else: + oid1 = oid + oid2 = 1041 + + _ext.INET = _ext.new_type((oid1, ), "INET", + lambda data, cursor: data and Inet(data) or None) + _ext.INETARRAY = _ext.new_array_type((oid2, ), "INETARRAY", _ext.INET) + + _ext.register_type(_ext.INET, conn_or_curs) + _ext.register_type(_ext.INETARRAY, conn_or_curs) + + return _ext.INET + + +def wait_select(conn): + """Wait until a connection or cursor has data available. + + The function is an example of a wait callback to be registered with + `~psycopg2.extensions.set_wait_callback()`. This function uses + :py:func:`~select.select()` to wait for data to become available, and + therefore is able to handle/receive SIGINT/KeyboardInterrupt. + """ + import select + from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE + + while True: + try: + state = conn.poll() + if state == POLL_OK: + break + elif state == POLL_READ: + select.select([conn.fileno()], [], []) + elif state == POLL_WRITE: + select.select([], [conn.fileno()], []) + else: + raise conn.OperationalError(f"bad state from poll: {state}") + except KeyboardInterrupt: + conn.cancel() + # the loop will be broken by a server error + continue + + +def _solve_conn_curs(conn_or_curs): + """Return the connection and a DBAPI cursor from a connection or cursor.""" + if conn_or_curs is None: + raise psycopg2.ProgrammingError("no connection or cursor provided") + + if hasattr(conn_or_curs, 'execute'): + conn = conn_or_curs.connection + curs = conn.cursor(cursor_factory=_cursor) + else: + conn = conn_or_curs + curs = conn.cursor(cursor_factory=_cursor) + + return conn, curs + + +class HstoreAdapter: + """Adapt a Python dict to the hstore syntax.""" + def __init__(self, wrapped): + self.wrapped = wrapped + + def prepare(self, conn): + self.conn = conn + + # use an old-style getquoted implementation if required + if conn.info.server_version < 90000: + self.getquoted = self._getquoted_8 + + def _getquoted_8(self): + """Use the operators available in PG pre-9.0.""" + if not self.wrapped: + return b"''::hstore" + + adapt = _ext.adapt + rv = [] + for k, v in self.wrapped.items(): + k = adapt(k) + k.prepare(self.conn) + k = k.getquoted() + + if v is not None: + v = adapt(v) + v.prepare(self.conn) + v = v.getquoted() + else: + v = b'NULL' + + # XXX this b'ing is painfully inefficient! + rv.append(b"(" + k + b" => " + v + b")") + + return b"(" + b'||'.join(rv) + b")" + + def _getquoted_9(self): + """Use the hstore(text[], text[]) function.""" + if not self.wrapped: + return b"''::hstore" + + k = _ext.adapt(list(self.wrapped.keys())) + k.prepare(self.conn) + v = _ext.adapt(list(self.wrapped.values())) + v.prepare(self.conn) + return b"hstore(" + k.getquoted() + b", " + v.getquoted() + b")" + + getquoted = _getquoted_9 + + _re_hstore = _re.compile(r""" + # hstore key: + # a string of normal or escaped chars + "((?: [^"\\] | \\. )*)" + \s*=>\s* # hstore value + (?: + NULL # the value can be null - not catched + # or a quoted string like the key + | "((?: [^"\\] | \\. )*)" + ) + (?:\s*,\s*|$) # pairs separated by comma or end of string. + """, _re.VERBOSE) + + @classmethod + def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")): + """Parse an hstore representation in a Python string. + + The hstore is represented as something like:: + + "a"=>"1", "b"=>"2" + + with backslash-escaped strings. + """ + if s is None: + return None + + rv = {} + start = 0 + for m in self._re_hstore.finditer(s): + if m is None or m.start() != start: + raise psycopg2.InterfaceError( + f"error parsing hstore pair at char {start}") + k = _bsdec.sub(r'\1', m.group(1)) + v = m.group(2) + if v is not None: + v = _bsdec.sub(r'\1', v) + + rv[k] = v + start = m.end() + + if start < len(s): + raise psycopg2.InterfaceError( + f"error parsing hstore: unparsed data after char {start}") + + return rv + + @classmethod + def parse_unicode(self, s, cur): + """Parse an hstore returning unicode keys and values.""" + if s is None: + return None + + s = s.decode(_ext.encodings[cur.connection.encoding]) + return self.parse(s, cur) + + @classmethod + def get_oids(self, conn_or_curs): + """Return the lists of OID of the hstore and hstore[] types. + """ + conn, curs = _solve_conn_curs(conn_or_curs) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # column typarray not available before PG 8.3 + typarray = conn.info.server_version >= 80300 and "typarray" or "NULL" + + rv0, rv1 = [], [] + + # get the oid for the hstore + curs.execute(f"""SELECT t.oid, {typarray} +FROM pg_type t JOIN pg_namespace ns + ON typnamespace = ns.oid +WHERE typname = 'hstore'; +""") + for oids in curs: + rv0.append(oids[0]) + rv1.append(oids[1]) + + # revert the status of the connection as before the command + if (conn_status != _ext.STATUS_IN_TRANSACTION + and not conn.autocommit): + conn.rollback() + + return tuple(rv0), tuple(rv1) + + +def register_hstore(conn_or_curs, globally=False, unicode=False, + oid=None, array_oid=None): + r"""Register adapter and typecaster for `!dict`\-\ |hstore| conversions. + + :param conn_or_curs: a connection or cursor: the typecaster will be + registered only on this object unless *globally* is set to `!True` + :param globally: register the adapter globally, not only on *conn_or_curs* + :param unicode: if `!True`, keys and values returned from the database + will be `!unicode` instead of `!str`. The option is not available on + Python 3 + :param oid: the OID of the |hstore| type if known. If not, it will be + queried on *conn_or_curs*. + :param array_oid: the OID of the |hstore| array type if known. If not, it + will be queried on *conn_or_curs*. + + The connection or cursor passed to the function will be used to query the + database and look for the OID of the |hstore| type (which may be different + across databases). If querying is not desirable (e.g. with + :ref:`asynchronous connections `) you may specify it in the + *oid* parameter, which can be found using a query such as :sql:`SELECT + 'hstore'::regtype::oid`. Analogously you can obtain a value for *array_oid* + using a query such as :sql:`SELECT 'hstore[]'::regtype::oid`. + + Note that, when passing a dictionary from Python to the database, both + strings and unicode keys and values are supported. Dictionaries returned + from the database have keys/values according to the *unicode* parameter. + + The |hstore| contrib module must be already installed in the database + (executing the ``hstore.sql`` script in your ``contrib`` directory). + Raise `~psycopg2.ProgrammingError` if the type is not found. + """ + if oid is None: + oid = HstoreAdapter.get_oids(conn_or_curs) + if oid is None or not oid[0]: + raise psycopg2.ProgrammingError( + "hstore type not found in the database. " + "please install it from your 'contrib/hstore.sql' file") + else: + array_oid = oid[1] + oid = oid[0] + + if isinstance(oid, int): + oid = (oid,) + + if array_oid is not None: + if isinstance(array_oid, int): + array_oid = (array_oid,) + else: + array_oid = tuple([x for x in array_oid if x]) + + # create and register the typecaster + HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse) + _ext.register_type(HSTORE, not globally and conn_or_curs or None) + _ext.register_adapter(dict, HstoreAdapter) + + if array_oid: + HSTOREARRAY = _ext.new_array_type(array_oid, "HSTOREARRAY", HSTORE) + _ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None) + + +class CompositeCaster: + """Helps conversion of a PostgreSQL composite type into a Python object. + + The class is usually created by the `register_composite()` function. + You may want to create and register manually instances of the class if + querying the database at registration time is not desirable (such as when + using an :ref:`asynchronous connections `). + + """ + def __init__(self, name, oid, attrs, array_oid=None, schema=None): + self.name = name + self.schema = schema + self.oid = oid + self.array_oid = array_oid + + self.attnames = [a[0] for a in attrs] + self.atttypes = [a[1] for a in attrs] + self._create_type(name, self.attnames) + self.typecaster = _ext.new_type((oid,), name, self.parse) + if array_oid: + self.array_typecaster = _ext.new_array_type( + (array_oid,), f"{name}ARRAY", self.typecaster) + else: + self.array_typecaster = None + + def parse(self, s, curs): + if s is None: + return None + + tokens = self.tokenize(s) + if len(tokens) != len(self.atttypes): + raise psycopg2.DataError( + "expecting %d components for the type %s, %d found instead" % + (len(self.atttypes), self.name, len(tokens))) + + values = [curs.cast(oid, token) + for oid, token in zip(self.atttypes, tokens)] + + return self.make(values) + + def make(self, values): + """Return a new Python object representing the data being casted. + + *values* is the list of attributes, already casted into their Python + representation. + + You can subclass this method to :ref:`customize the composite cast + `. + """ + + return self._ctor(values) + + _re_tokenize = _re.compile(r""" + \(? ([,)]) # an empty token, representing NULL +| \(? " ((?: [^"] | "")*) " [,)] # or a quoted string +| \(? ([^",)]+) [,)] # or an unquoted string + """, _re.VERBOSE) + + _re_undouble = _re.compile(r'(["\\])\1') + + @classmethod + def tokenize(self, s): + rv = [] + for m in self._re_tokenize.finditer(s): + if m is None: + raise psycopg2.InterfaceError(f"can't parse type: {s!r}") + if m.group(1) is not None: + rv.append(None) + elif m.group(2) is not None: + rv.append(self._re_undouble.sub(r"\1", m.group(2))) + else: + rv.append(m.group(3)) + + return rv + + def _create_type(self, name, attnames): + name = _re_clean.sub('_', name) + self.type = namedtuple(name, attnames) + self._ctor = self.type._make + + @classmethod + def _from_db(self, name, conn_or_curs): + """Return a `CompositeCaster` instance for the type *name*. + + Raise `ProgrammingError` if the type is not found. + """ + conn, curs = _solve_conn_curs(conn_or_curs) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # Use the correct schema + if '.' in name: + schema, tname = name.split('.', 1) + else: + tname = name + schema = 'public' + + # column typarray not available before PG 8.3 + typarray = conn.info.server_version >= 80300 and "typarray" or "NULL" + + # get the type oid and attributes + curs.execute("""\ +SELECT t.oid, %s, attname, atttypid +FROM pg_type t +JOIN pg_namespace ns ON typnamespace = ns.oid +JOIN pg_attribute a ON attrelid = typrelid +WHERE typname = %%s AND nspname = %%s + AND attnum > 0 AND NOT attisdropped +ORDER BY attnum; +""" % typarray, (tname, schema)) + + recs = curs.fetchall() + + if not recs: + # The above algorithm doesn't work for customized seach_path + # (#1487) The implementation below works better, but, to guarantee + # backwards compatibility, use it only if the original one failed. + try: + savepoint = False + # Because we executed statements earlier, we are either INTRANS + # or we are IDLE only if the transaction is autocommit, in + # which case we don't need the savepoint anyway. + if conn.status == _ext.STATUS_IN_TRANSACTION: + curs.execute("SAVEPOINT register_type") + savepoint = True + + curs.execute("""\ +SELECT t.oid, %s, attname, atttypid, typname, nspname +FROM pg_type t +JOIN pg_namespace ns ON typnamespace = ns.oid +JOIN pg_attribute a ON attrelid = typrelid +WHERE t.oid = %%s::regtype + AND attnum > 0 AND NOT attisdropped +ORDER BY attnum; +""" % typarray, (name, )) + except psycopg2.ProgrammingError: + pass + else: + recs = curs.fetchall() + if recs: + tname = recs[0][4] + schema = recs[0][5] + finally: + if savepoint: + curs.execute("ROLLBACK TO SAVEPOINT register_type") + + # revert the status of the connection as before the command + if conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit: + conn.rollback() + + if not recs: + raise psycopg2.ProgrammingError( + f"PostgreSQL type '{name}' not found") + + type_oid = recs[0][0] + array_oid = recs[0][1] + type_attrs = [(r[2], r[3]) for r in recs] + + return self(tname, type_oid, type_attrs, + array_oid=array_oid, schema=schema) + + +def register_composite(name, conn_or_curs, globally=False, factory=None): + """Register a typecaster to convert a composite type into a tuple. + + :param name: the name of a PostgreSQL composite type, e.g. created using + the |CREATE TYPE|_ command + :param conn_or_curs: a connection or cursor used to find the type oid and + components; the typecaster is registered in a scope limited to this + object, unless *globally* is set to `!True` + :param globally: if `!False` (default) register the typecaster only on + *conn_or_curs*, otherwise register it globally + :param factory: if specified it should be a `CompositeCaster` subclass: use + it to :ref:`customize how to cast composite types ` + :return: the registered `CompositeCaster` or *factory* instance + responsible for the conversion + """ + if factory is None: + factory = CompositeCaster + + caster = factory._from_db(name, conn_or_curs) + _ext.register_type(caster.typecaster, not globally and conn_or_curs or None) + + if caster.array_typecaster is not None: + _ext.register_type( + caster.array_typecaster, not globally and conn_or_curs or None) + + return caster + + +def _paginate(seq, page_size): + """Consume an iterable and return it in chunks. + + Every chunk is at most `page_size`. Never return an empty chunk. + """ + page = [] + it = iter(seq) + while True: + try: + for i in range(page_size): + page.append(next(it)) + yield page + page = [] + except StopIteration: + if page: + yield page + return + + +def execute_batch(cur, sql, argslist, page_size=100): + r"""Execute groups of statements in fewer server roundtrips. + + Execute *sql* several times, against all parameters set (sequences or + mappings) found in *argslist*. + + The function is semantically similar to + + .. parsed-literal:: + + *cur*\.\ `~cursor.executemany`\ (\ *sql*\ , *argslist*\ ) + + but has a different implementation: Psycopg will join the statements into + fewer multi-statement commands, each one containing at most *page_size* + statements, resulting in a reduced number of server roundtrips. + + After the execution of the function the `cursor.rowcount` property will + **not** contain a total result. + + """ + for page in _paginate(argslist, page_size=page_size): + sqls = [cur.mogrify(sql, args) for args in page] + cur.execute(b";".join(sqls)) + + +def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False): + '''Execute a statement using :sql:`VALUES` with a sequence of parameters. + + :param cur: the cursor to use to execute the query. + + :param sql: the query to execute. It must contain a single ``%s`` + placeholder, which will be replaced by a `VALUES list`__. + Example: ``"INSERT INTO mytable (id, f1, f2) VALUES %s"``. + + :param argslist: sequence of sequences or dictionaries with the arguments + to send to the query. The type and content must be consistent with + *template*. + + :param template: the snippet to merge to every item in *argslist* to + compose the query. + + - If the *argslist* items are sequences it should contain positional + placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there + are constants value...). + + - If the *argslist* items are mappings it should contain named + placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``). + + If not specified, assume the arguments are sequence and use a simple + positional template (i.e. ``(%s, %s, ...)``), with the number of + placeholders sniffed by the first element in *argslist*. + + :param page_size: maximum number of *argslist* items to include in every + statement. If there are more items the function will execute more than + one statement. + + :param fetch: if `!True` return the query results into a list (like in a + `~cursor.fetchall()`). Useful for queries with :sql:`RETURNING` + clause. + + .. __: https://www.postgresql.org/docs/current/static/queries-values.html + + After the execution of the function the `cursor.rowcount` property will + **not** contain a total result. + + While :sql:`INSERT` is an obvious candidate for this function it is + possible to use it with other statements, for example:: + + >>> cur.execute( + ... "create table test (id int primary key, v1 int, v2 int)") + + >>> execute_values(cur, + ... "INSERT INTO test (id, v1, v2) VALUES %s", + ... [(1, 2, 3), (4, 5, 6), (7, 8, 9)]) + + >>> execute_values(cur, + ... """UPDATE test SET v1 = data.v1 FROM (VALUES %s) AS data (id, v1) + ... WHERE test.id = data.id""", + ... [(1, 20), (4, 50)]) + + >>> cur.execute("select * from test order by id") + >>> cur.fetchall() + [(1, 20, 3), (4, 50, 6), (7, 8, 9)]) + + ''' + from psycopg2.sql import Composable + if isinstance(sql, Composable): + sql = sql.as_string(cur) + + # we can't just use sql % vals because vals is bytes: if sql is bytes + # there will be some decoding error because of stupid codec used, and Py3 + # doesn't implement % on bytes. + if not isinstance(sql, bytes): + sql = sql.encode(_ext.encodings[cur.connection.encoding]) + pre, post = _split_sql(sql) + + result = [] if fetch else None + for page in _paginate(argslist, page_size=page_size): + if template is None: + template = b'(' + b','.join([b'%s'] * len(page[0])) + b')' + parts = pre[:] + for args in page: + parts.append(cur.mogrify(template, args)) + parts.append(b',') + parts[-1:] = post + cur.execute(b''.join(parts)) + if fetch: + result.extend(cur.fetchall()) + + return result + + +def _split_sql(sql): + """Split *sql* on a single ``%s`` placeholder. + + Split on the %s, perform %% replacement and return pre, post lists of + snippets. + """ + curr = pre = [] + post = [] + tokens = _re.split(br'(%.)', sql) + for token in tokens: + if len(token) != 2 or token[:1] != b'%': + curr.append(token) + continue + + if token[1:] == b's': + if curr is pre: + curr = post + else: + raise ValueError( + "the query contains more than one '%s' placeholder") + elif token[1:] == b'%': + curr.append(b'%') + else: + raise ValueError("unsupported format character: '%s'" + % token[1:].decode('ascii', 'replace')) + + if curr is pre: + raise ValueError("the query doesn't contain any '%s' placeholder") + + return pre, post + + +# ascii except alnum and underscore +_re_clean = _re.compile( + '[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']') diff --git a/.venv/lib/python3.12/site-packages/psycopg2/pool.py b/.venv/lib/python3.12/site-packages/psycopg2/pool.py new file mode 100644 index 00000000..9d67d68e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/pool.py @@ -0,0 +1,187 @@ +"""Connection pooling for psycopg2 + +This module implements thread-safe (and not) connection pools. +""" +# psycopg/pool.py - pooling code for psycopg +# +# Copyright (C) 2003-2019 Federico Di Gregorio +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import psycopg2 +from psycopg2 import extensions as _ext + + +class PoolError(psycopg2.Error): + pass + + +class AbstractConnectionPool: + """Generic key-based pooling code.""" + + def __init__(self, minconn, maxconn, *args, **kwargs): + """Initialize the connection pool. + + New 'minconn' connections are created immediately calling 'connfunc' + with given parameters. The connection pool will support a maximum of + about 'maxconn' connections. + """ + self.minconn = int(minconn) + self.maxconn = int(maxconn) + self.closed = False + + self._args = args + self._kwargs = kwargs + + self._pool = [] + self._used = {} + self._rused = {} # id(conn) -> key map + self._keys = 0 + + for i in range(self.minconn): + self._connect() + + def _connect(self, key=None): + """Create a new connection and assign it to 'key' if not None.""" + conn = psycopg2.connect(*self._args, **self._kwargs) + if key is not None: + self._used[key] = conn + self._rused[id(conn)] = key + else: + self._pool.append(conn) + return conn + + def _getkey(self): + """Return a new unique key.""" + self._keys += 1 + return self._keys + + def _getconn(self, key=None): + """Get a free connection and assign it to 'key' if not None.""" + if self.closed: + raise PoolError("connection pool is closed") + if key is None: + key = self._getkey() + + if key in self._used: + return self._used[key] + + if self._pool: + self._used[key] = conn = self._pool.pop() + self._rused[id(conn)] = key + return conn + else: + if len(self._used) == self.maxconn: + raise PoolError("connection pool exhausted") + return self._connect(key) + + def _putconn(self, conn, key=None, close=False): + """Put away a connection.""" + if self.closed: + raise PoolError("connection pool is closed") + + if key is None: + key = self._rused.get(id(conn)) + if key is None: + raise PoolError("trying to put unkeyed connection") + + if len(self._pool) < self.minconn and not close: + # Return the connection into a consistent state before putting + # it back into the pool + if not conn.closed: + status = conn.info.transaction_status + if status == _ext.TRANSACTION_STATUS_UNKNOWN: + # server connection lost + conn.close() + elif status != _ext.TRANSACTION_STATUS_IDLE: + # connection in error or in transaction + conn.rollback() + self._pool.append(conn) + else: + # regular idle connection + self._pool.append(conn) + # If the connection is closed, we just discard it. + else: + conn.close() + + # here we check for the presence of key because it can happen that a + # thread tries to put back a connection after a call to close + if not self.closed or key in self._used: + del self._used[key] + del self._rused[id(conn)] + + def _closeall(self): + """Close all connections. + + Note that this can lead to some code fail badly when trying to use + an already closed connection. If you call .closeall() make sure + your code can deal with it. + """ + if self.closed: + raise PoolError("connection pool is closed") + for conn in self._pool + list(self._used.values()): + try: + conn.close() + except Exception: + pass + self.closed = True + + +class SimpleConnectionPool(AbstractConnectionPool): + """A connection pool that can't be shared across different threads.""" + + getconn = AbstractConnectionPool._getconn + putconn = AbstractConnectionPool._putconn + closeall = AbstractConnectionPool._closeall + + +class ThreadedConnectionPool(AbstractConnectionPool): + """A connection pool that works with the threading module.""" + + def __init__(self, minconn, maxconn, *args, **kwargs): + """Initialize the threading lock.""" + import threading + AbstractConnectionPool.__init__( + self, minconn, maxconn, *args, **kwargs) + self._lock = threading.Lock() + + def getconn(self, key=None): + """Get a free connection and assign it to 'key' if not None.""" + self._lock.acquire() + try: + return self._getconn(key) + finally: + self._lock.release() + + def putconn(self, conn=None, key=None, close=False): + """Put away an unused connection.""" + self._lock.acquire() + try: + self._putconn(conn, key, close) + finally: + self._lock.release() + + def closeall(self): + """Close all connections (even the one currently in use.)""" + self._lock.acquire() + try: + self._closeall() + finally: + self._lock.release() diff --git a/.venv/lib/python3.12/site-packages/psycopg2/sql.py b/.venv/lib/python3.12/site-packages/psycopg2/sql.py new file mode 100644 index 00000000..69b352b7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/sql.py @@ -0,0 +1,455 @@ +"""SQL composition utility module +""" + +# psycopg/sql.py - SQL composition utility module +# +# Copyright (C) 2016-2019 Daniele Varrazzo +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import string + +from psycopg2 import extensions as ext + + +_formatter = string.Formatter() + + +class Composable: + """ + Abstract base class for objects that can be used to compose an SQL string. + + `!Composable` objects can be passed directly to `~cursor.execute()`, + `~cursor.executemany()`, `~cursor.copy_expert()` in place of the query + string. + + `!Composable` objects can be joined using the ``+`` operator: the result + will be a `Composed` instance containing the objects joined. The operator + ``*`` is also supported with an integer argument: the result is a + `!Composed` instance containing the left argument repeated as many times as + requested. + """ + def __init__(self, wrapped): + self._wrapped = wrapped + + def __repr__(self): + return f"{self.__class__.__name__}({self._wrapped!r})" + + def as_string(self, context): + """ + Return the string value of the object. + + :param context: the context to evaluate the string into. + :type context: `connection` or `cursor` + + The method is automatically invoked by `~cursor.execute()`, + `~cursor.executemany()`, `~cursor.copy_expert()` if a `!Composable` is + passed instead of the query string. + """ + raise NotImplementedError + + def __add__(self, other): + if isinstance(other, Composed): + return Composed([self]) + other + if isinstance(other, Composable): + return Composed([self]) + Composed([other]) + else: + return NotImplemented + + def __mul__(self, n): + return Composed([self] * n) + + def __eq__(self, other): + return type(self) is type(other) and self._wrapped == other._wrapped + + def __ne__(self, other): + return not self.__eq__(other) + + +class Composed(Composable): + """ + A `Composable` object made of a sequence of `!Composable`. + + The object is usually created using `!Composable` operators and methods. + However it is possible to create a `!Composed` directly specifying a + sequence of `!Composable` as arguments. + + Example:: + + >>> comp = sql.Composed( + ... [sql.SQL("insert into "), sql.Identifier("table")]) + >>> print(comp.as_string(conn)) + insert into "table" + + `!Composed` objects are iterable (so they can be used in `SQL.join` for + instance). + """ + def __init__(self, seq): + wrapped = [] + for i in seq: + if not isinstance(i, Composable): + raise TypeError( + f"Composed elements must be Composable, got {i!r} instead") + wrapped.append(i) + + super().__init__(wrapped) + + @property + def seq(self): + """The list of the content of the `!Composed`.""" + return list(self._wrapped) + + def as_string(self, context): + rv = [] + for i in self._wrapped: + rv.append(i.as_string(context)) + return ''.join(rv) + + def __iter__(self): + return iter(self._wrapped) + + def __add__(self, other): + if isinstance(other, Composed): + return Composed(self._wrapped + other._wrapped) + if isinstance(other, Composable): + return Composed(self._wrapped + [other]) + else: + return NotImplemented + + def join(self, joiner): + """ + Return a new `!Composed` interposing the *joiner* with the `!Composed` items. + + The *joiner* must be a `SQL` or a string which will be interpreted as + an `SQL`. + + Example:: + + >>> fields = sql.Identifier('foo') + sql.Identifier('bar') # a Composed + >>> print(fields.join(', ').as_string(conn)) + "foo", "bar" + + """ + if isinstance(joiner, str): + joiner = SQL(joiner) + elif not isinstance(joiner, SQL): + raise TypeError( + "Composed.join() argument must be a string or an SQL") + + return joiner.join(self) + + +class SQL(Composable): + """ + A `Composable` representing a snippet of SQL statement. + + `!SQL` exposes `join()` and `format()` methods useful to create a template + where to merge variable parts of a query (for instance field or table + names). + + The *string* doesn't undergo any form of escaping, so it is not suitable to + represent variable identifiers or values: you should only use it to pass + constant strings representing templates or snippets of SQL statements; use + other objects such as `Identifier` or `Literal` to represent variable + parts. + + Example:: + + >>> query = sql.SQL("select {0} from {1}").format( + ... sql.SQL(', ').join([sql.Identifier('foo'), sql.Identifier('bar')]), + ... sql.Identifier('table')) + >>> print(query.as_string(conn)) + select "foo", "bar" from "table" + """ + def __init__(self, string): + if not isinstance(string, str): + raise TypeError("SQL values must be strings") + super().__init__(string) + + @property + def string(self): + """The string wrapped by the `!SQL` object.""" + return self._wrapped + + def as_string(self, context): + return self._wrapped + + def format(self, *args, **kwargs): + """ + Merge `Composable` objects into a template. + + :param `Composable` args: parameters to replace to numbered + (``{0}``, ``{1}``) or auto-numbered (``{}``) placeholders + :param `Composable` kwargs: parameters to replace to named (``{name}``) + placeholders + :return: the union of the `!SQL` string with placeholders replaced + :rtype: `Composed` + + The method is similar to the Python `str.format()` method: the string + template supports auto-numbered (``{}``), numbered (``{0}``, + ``{1}``...), and named placeholders (``{name}``), with positional + arguments replacing the numbered placeholders and keywords replacing + the named ones. However placeholder modifiers (``{0!r}``, ``{0:<10}``) + are not supported. Only `!Composable` objects can be passed to the + template. + + Example:: + + >>> print(sql.SQL("select * from {} where {} = %s") + ... .format(sql.Identifier('people'), sql.Identifier('id')) + ... .as_string(conn)) + select * from "people" where "id" = %s + + >>> print(sql.SQL("select * from {tbl} where {pkey} = %s") + ... .format(tbl=sql.Identifier('people'), pkey=sql.Identifier('id')) + ... .as_string(conn)) + select * from "people" where "id" = %s + + """ + rv = [] + autonum = 0 + for pre, name, spec, conv in _formatter.parse(self._wrapped): + if spec: + raise ValueError("no format specification supported by SQL") + if conv: + raise ValueError("no format conversion supported by SQL") + if pre: + rv.append(SQL(pre)) + + if name is None: + continue + + if name.isdigit(): + if autonum: + raise ValueError( + "cannot switch from automatic field numbering to manual") + rv.append(args[int(name)]) + autonum = None + + elif not name: + if autonum is None: + raise ValueError( + "cannot switch from manual field numbering to automatic") + rv.append(args[autonum]) + autonum += 1 + + else: + rv.append(kwargs[name]) + + return Composed(rv) + + def join(self, seq): + """ + Join a sequence of `Composable`. + + :param seq: the elements to join. + :type seq: iterable of `!Composable` + + Use the `!SQL` object's *string* to separate the elements in *seq*. + Note that `Composed` objects are iterable too, so they can be used as + argument for this method. + + Example:: + + >>> snip = sql.SQL(', ').join( + ... sql.Identifier(n) for n in ['foo', 'bar', 'baz']) + >>> print(snip.as_string(conn)) + "foo", "bar", "baz" + """ + rv = [] + it = iter(seq) + try: + rv.append(next(it)) + except StopIteration: + pass + else: + for i in it: + rv.append(self) + rv.append(i) + + return Composed(rv) + + +class Identifier(Composable): + """ + A `Composable` representing an SQL identifier or a dot-separated sequence. + + Identifiers usually represent names of database objects, such as tables or + fields. PostgreSQL identifiers follow `different rules`__ than SQL string + literals for escaping (e.g. they use double quotes instead of single). + + .. __: https://www.postgresql.org/docs/current/static/sql-syntax-lexical.html# \ + SQL-SYNTAX-IDENTIFIERS + + Example:: + + >>> t1 = sql.Identifier("foo") + >>> t2 = sql.Identifier("ba'r") + >>> t3 = sql.Identifier('ba"z') + >>> print(sql.SQL(', ').join([t1, t2, t3]).as_string(conn)) + "foo", "ba'r", "ba""z" + + Multiple strings can be passed to the object to represent a qualified name, + i.e. a dot-separated sequence of identifiers. + + Example:: + + >>> query = sql.SQL("select {} from {}").format( + ... sql.Identifier("table", "field"), + ... sql.Identifier("schema", "table")) + >>> print(query.as_string(conn)) + select "table"."field" from "schema"."table" + + """ + def __init__(self, *strings): + if not strings: + raise TypeError("Identifier cannot be empty") + + for s in strings: + if not isinstance(s, str): + raise TypeError("SQL identifier parts must be strings") + + super().__init__(strings) + + @property + def strings(self): + """A tuple with the strings wrapped by the `Identifier`.""" + return self._wrapped + + @property + def string(self): + """The string wrapped by the `Identifier`. + """ + if len(self._wrapped) == 1: + return self._wrapped[0] + else: + raise AttributeError( + "the Identifier wraps more than one than one string") + + def __repr__(self): + return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})" + + def as_string(self, context): + return '.'.join(ext.quote_ident(s, context) for s in self._wrapped) + + +class Literal(Composable): + """ + A `Composable` representing an SQL value to include in a query. + + Usually you will want to include placeholders in the query and pass values + as `~cursor.execute()` arguments. If however you really really need to + include a literal value in the query you can use this object. + + The string returned by `!as_string()` follows the normal :ref:`adaptation + rules ` for Python objects. + + Example:: + + >>> s1 = sql.Literal("foo") + >>> s2 = sql.Literal("ba'r") + >>> s3 = sql.Literal(42) + >>> print(sql.SQL(', ').join([s1, s2, s3]).as_string(conn)) + 'foo', 'ba''r', 42 + + """ + @property + def wrapped(self): + """The object wrapped by the `!Literal`.""" + return self._wrapped + + def as_string(self, context): + # is it a connection or cursor? + if isinstance(context, ext.connection): + conn = context + elif isinstance(context, ext.cursor): + conn = context.connection + else: + raise TypeError("context must be a connection or a cursor") + + a = ext.adapt(self._wrapped) + if hasattr(a, 'prepare'): + a.prepare(conn) + + rv = a.getquoted() + if isinstance(rv, bytes): + rv = rv.decode(ext.encodings[conn.encoding]) + + return rv + + +class Placeholder(Composable): + """A `Composable` representing a placeholder for query parameters. + + If the name is specified, generate a named placeholder (e.g. ``%(name)s``), + otherwise generate a positional placeholder (e.g. ``%s``). + + The object is useful to generate SQL queries with a variable number of + arguments. + + Examples:: + + >>> names = ['foo', 'bar', 'baz'] + + >>> q1 = sql.SQL("insert into table ({}) values ({})").format( + ... sql.SQL(', ').join(map(sql.Identifier, names)), + ... sql.SQL(', ').join(sql.Placeholder() * len(names))) + >>> print(q1.as_string(conn)) + insert into table ("foo", "bar", "baz") values (%s, %s, %s) + + >>> q2 = sql.SQL("insert into table ({}) values ({})").format( + ... sql.SQL(', ').join(map(sql.Identifier, names)), + ... sql.SQL(', ').join(map(sql.Placeholder, names))) + >>> print(q2.as_string(conn)) + insert into table ("foo", "bar", "baz") values (%(foo)s, %(bar)s, %(baz)s) + + """ + + def __init__(self, name=None): + if isinstance(name, str): + if ')' in name: + raise ValueError(f"invalid name: {name!r}") + + elif name is not None: + raise TypeError(f"expected string or None as name, got {name!r}") + + super().__init__(name) + + @property + def name(self): + """The name of the `!Placeholder`.""" + return self._wrapped + + def __repr__(self): + if self._wrapped is None: + return f"{self.__class__.__name__}()" + else: + return f"{self.__class__.__name__}({self._wrapped!r})" + + def as_string(self, context): + if self._wrapped is not None: + return f"%({self._wrapped})s" + else: + return "%s" + + +# Literals +NULL = SQL("NULL") +DEFAULT = SQL("DEFAULT") diff --git a/.venv/lib/python3.12/site-packages/psycopg2/tz.py b/.venv/lib/python3.12/site-packages/psycopg2/tz.py new file mode 100644 index 00000000..d88ca37c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2/tz.py @@ -0,0 +1,158 @@ +"""tzinfo implementations for psycopg2 + +This module holds two different tzinfo implementations that can be used as +the 'tzinfo' argument to datetime constructors, directly passed to psycopg +functions or used to set the .tzinfo_factory attribute in cursors. +""" +# psycopg/tz.py - tzinfo implementation +# +# Copyright (C) 2003-2019 Federico Di Gregorio +# Copyright (C) 2020-2021 The Psycopg Team +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import datetime +import time + +ZERO = datetime.timedelta(0) + + +class FixedOffsetTimezone(datetime.tzinfo): + """Fixed offset in minutes east from UTC. + + This is exactly the implementation__ found in Python 2.3.x documentation, + with a small change to the `!__init__()` method to allow for pickling + and a default name in the form ``sHH:MM`` (``s`` is the sign.). + + The implementation also caches instances. During creation, if a + FixedOffsetTimezone instance has previously been created with the same + offset and name that instance will be returned. This saves memory and + improves comparability. + + .. versionchanged:: 2.9 + + The constructor can take either a timedelta or a number of minutes of + offset. Previously only minutes were supported. + + .. __: https://docs.python.org/library/datetime.html + """ + _name = None + _offset = ZERO + + _cache = {} + + def __init__(self, offset=None, name=None): + if offset is not None: + if not isinstance(offset, datetime.timedelta): + offset = datetime.timedelta(minutes=offset) + self._offset = offset + if name is not None: + self._name = name + + def __new__(cls, offset=None, name=None): + """Return a suitable instance created earlier if it exists + """ + key = (offset, name) + try: + return cls._cache[key] + except KeyError: + tz = super().__new__(cls, offset, name) + cls._cache[key] = tz + return tz + + def __repr__(self): + return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \ + % (self._offset, self._name) + + def __eq__(self, other): + if isinstance(other, FixedOffsetTimezone): + return self._offset == other._offset + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, FixedOffsetTimezone): + return self._offset != other._offset + else: + return NotImplemented + + def __getinitargs__(self): + return self._offset, self._name + + def utcoffset(self, dt): + return self._offset + + def tzname(self, dt): + if self._name is not None: + return self._name + + minutes, seconds = divmod(self._offset.total_seconds(), 60) + hours, minutes = divmod(minutes, 60) + rv = "%+03d" % hours + if minutes or seconds: + rv += ":%02d" % minutes + if seconds: + rv += ":%02d" % seconds + + return rv + + def dst(self, dt): + return ZERO + + +STDOFFSET = datetime.timedelta(seconds=-time.timezone) +if time.daylight: + DSTOFFSET = datetime.timedelta(seconds=-time.altzone) +else: + DSTOFFSET = STDOFFSET +DSTDIFF = DSTOFFSET - STDOFFSET + + +class LocalTimezone(datetime.tzinfo): + """Platform idea of local timezone. + + This is the exact implementation from the Python 2.3 documentation. + """ + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = time.mktime(tt) + tt = time.localtime(stamp) + return tt.tm_isdst > 0 + + +LOCAL = LocalTimezone() + +# TODO: pre-generate some interesting time zones? diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/LICENSE b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/LICENSE new file mode 100644 index 00000000..9029e70f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/LICENSE @@ -0,0 +1,49 @@ +psycopg2 and the LGPL +--------------------- + +psycopg2 is free software: you can redistribute it and/or modify it +under the terms of the GNU Lesser General Public License as published +by the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +psycopg2 is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +License for more details. + +In addition, as a special exception, the copyright holders give +permission to link this program with the OpenSSL library (or with +modified versions of OpenSSL that use the same license as OpenSSL), +and distribute linked combinations including the two. + +You must obey the GNU Lesser General Public License in all respects for +all of the code used other than OpenSSL. If you modify file(s) with this +exception, you may extend this exception to your version of the file(s), +but you are not obligated to do so. If you do not wish to do so, delete +this exception statement from your version. If you delete this exception +statement from all source files in the program, then also delete it here. + +You should have received a copy of the GNU Lesser General Public License +along with psycopg2 (see the doc/ directory.) +If not, see . + + +Alternative licenses +-------------------- + +The following BSD-like license applies (at your option) to the files following +the pattern ``psycopg/adapter*.{h,c}`` and ``psycopg/microprotocol*.{h,c}``: + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product documentation + would be appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not + be misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/METADATA b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/METADATA new file mode 100644 index 00000000..724e6c1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/METADATA @@ -0,0 +1,110 @@ +Metadata-Version: 2.1 +Name: psycopg2-binary +Version: 2.9.9 +Summary: psycopg2 - Python-PostgreSQL Database Adapter +Home-page: https://psycopg.org/ +Author: Federico Di Gregorio +Author-email: fog@initd.org +Maintainer: Daniele Varrazzo +Maintainer-email: daniele.varrazzo@gmail.com +License: LGPL with exceptions +Project-URL: Homepage, https://psycopg.org/ +Project-URL: Documentation, https://www.psycopg.org/docs/ +Project-URL: Code, https://github.com/psycopg/psycopg2 +Project-URL: Issue Tracker, https://github.com/psycopg/psycopg2/issues +Project-URL: Download, https://pypi.org/project/psycopg2/ +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: C +Classifier: Programming Language :: SQL +Classifier: Topic :: Database +Classifier: Topic :: Database :: Front-Ends +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Requires-Python: >=3.7 +License-File: LICENSE + +Psycopg is the most popular PostgreSQL database adapter for the Python +programming language. Its main features are the complete implementation of +the Python DB API 2.0 specification and the thread safety (several threads can +share the same connection). It was designed for heavily multi-threaded +applications that create and destroy lots of cursors and make a large number +of concurrent "INSERT"s or "UPDATE"s. + +Psycopg 2 is mostly implemented in C as a libpq wrapper, resulting in being +both efficient and secure. It features client-side and server-side cursors, +asynchronous communication and notifications, "COPY TO/COPY FROM" support. +Many Python types are supported out-of-the-box and adapted to matching +PostgreSQL data types; adaptation can be extended and customized thanks to a +flexible objects adaptation system. + +Psycopg 2 is both Unicode and Python 3 friendly. + + +Documentation +------------- + +Documentation is included in the ``doc`` directory and is `available online`__. + +.. __: https://www.psycopg.org/docs/ + +For any other resource (source code repository, bug tracker, mailing list) +please check the `project homepage`__. + +.. __: https://psycopg.org/ + + +Installation +------------ + +Building Psycopg requires a few prerequisites (a C compiler, some development +packages): please check the install_ and the faq_ documents in the ``doc`` dir +or online for the details. + +If prerequisites are met, you can install psycopg like any other Python +package, using ``pip`` to download it from PyPI_:: + + $ pip install psycopg2 + +or using ``setup.py`` if you have downloaded the source package locally:: + + $ python setup.py build + $ sudo python setup.py install + +You can also obtain a stand-alone package, not requiring a compiler or +external libraries, by installing the `psycopg2-binary`_ package from PyPI:: + + $ pip install psycopg2-binary + +The binary package is a practical choice for development and testing but in +production it is advised to use the package built from sources. + +.. _PyPI: https://pypi.org/project/psycopg2/ +.. _psycopg2-binary: https://pypi.org/project/psycopg2-binary/ +.. _install: https://www.psycopg.org/docs/install.html#install-from-source +.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile + +:Linux/OSX: |gh-actions| +:Windows: |appveyor| + +.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg + :target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml + :alt: Linux and OSX build status + +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?branch=master&svg=true + :target: https://ci.appveyor.com/project/psycopg/psycopg2/branch/master + :alt: Windows build status diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/RECORD b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/RECORD new file mode 100644 index 00000000..65e11eaa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/RECORD @@ -0,0 +1,44 @@ +psycopg2/__init__.py,sha256=9mo5Qd0uWHiEBx2CdogGos2kNqtlNNGzbtYlGC0hWS8,4768 +psycopg2/__pycache__/__init__.cpython-312.pyc,, +psycopg2/__pycache__/_ipaddress.cpython-312.pyc,, +psycopg2/__pycache__/_json.cpython-312.pyc,, +psycopg2/__pycache__/_range.cpython-312.pyc,, +psycopg2/__pycache__/errorcodes.cpython-312.pyc,, +psycopg2/__pycache__/errors.cpython-312.pyc,, +psycopg2/__pycache__/extensions.cpython-312.pyc,, +psycopg2/__pycache__/extras.cpython-312.pyc,, +psycopg2/__pycache__/pool.cpython-312.pyc,, +psycopg2/__pycache__/sql.cpython-312.pyc,, +psycopg2/__pycache__/tz.cpython-312.pyc,, +psycopg2/_ipaddress.py,sha256=jkuyhLgqUGRBcLNWDM8QJysV6q1Npc_RYH4_kE7JZPU,2922 +psycopg2/_json.py,sha256=XPn4PnzbTg1Dcqz7n1JMv5dKhB5VFV6834GEtxSawt0,7153 +psycopg2/_psycopg.cpython-312-x86_64-linux-gnu.so,sha256=Y_MtTA7BiSenx2ulSd3tYwfiMjdXdyK-brB_A7-kKD8,339145 +psycopg2/_range.py,sha256=sXeenGraJEEw2I3mc8RlmNivy2jMg7zWoanDes2Ywp8,18494 +psycopg2/errorcodes.py,sha256=jb1SkuGq5zJT7F99GFAUi3VQH8GbsB7zRHiLsAWAU0Q,14362 +psycopg2/errors.py,sha256=aAS4dJyTg1bsDzJDCRQAMB_s7zv-Q4yB6Yvih26I-0M,1425 +psycopg2/extensions.py,sha256=CG0kG5vL8Ot503UGlDXXJJFdFWLg4HE2_c1-lLOLc8M,6797 +psycopg2/extras.py,sha256=oBfrdvtWn8ITxc3x-h2h6IwHUsWdVqCdf4Gphb0JqY8,44215 +psycopg2/pool.py,sha256=UGEt8IdP3xNc2PGYNlG4sQvg8nhf4aeCnz39hTR0H8I,6316 +psycopg2/sql.py,sha256=OcFEAmpe2aMfrx0MEk4Lx00XvXXJCmvllaOVbJY-yoE,14779 +psycopg2/tz.py,sha256=r95kK7eGSpOYr_luCyYsznHMzjl52sLjsnSPXkXLzRI,4870 +psycopg2_binary-2.9.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +psycopg2_binary-2.9.9.dist-info/LICENSE,sha256=lhS4XfyacsWyyjMUTB1-HtOxwpdFnZ-yimpXYsLo1xs,2238 +psycopg2_binary-2.9.9.dist-info/METADATA,sha256=vkxMt-2J7iReUtyq2SN4AY4BrHDgiz8csUjacUUYWVk,4445 +psycopg2_binary-2.9.9.dist-info/RECORD,, +psycopg2_binary-2.9.9.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +psycopg2_binary-2.9.9.dist-info/WHEEL,sha256=4ZiCdXIWMxJyEClivrQv1QAHZpQh8kVYU92_ZAVwaok,152 +psycopg2_binary-2.9.9.dist-info/top_level.txt,sha256=7dHGpLqQ3w-vGmGEVn-7uK90qU9fyrGdWWi7S-gTcnM,9 +psycopg2_binary.libs/libcom_err-2abe824b.so.2.1,sha256=VCbctU3QHJ7t2gXiF58ORxFOi0ilNP_p6UkW55Rxslc,17497 +psycopg2_binary.libs/libcrypto-0628e7d4.so.1.1,sha256=iNCZwhYYZg5Gc5zN14JOY0gUyelRkm3wD9A-0kbL6SA,3133185 +psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2,sha256=KnSwMw7pcygbJvjr5KzvDr-e6ZxraEl8-RUf_2xMNOE,345209 +psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1,sha256=mETlAJ5wpq0vsitYcwaBD-Knsbn2uZItqhx4ujRm3ic,219953 +psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5,sha256=wp5BsDz0st_7-0lglG4rQvgsDKXVPSMdPw_Fl7onRIg,17913 +psycopg2_binary.libs/libkrb5-fcafa220.so.3.3,sha256=sqq1KP9MqyFE5c4BskasCfV0oHKlP_Y-qB1rspsmuPE,1018953 +psycopg2_binary.libs/libkrb5support-d0bcff84.so.0.1,sha256=anH1fXSP73m05zbVNIh1VF0KIk-okotdYqPPJkf8EJ8,76873 +psycopg2_binary.libs/liblber-5a1d5ae1.so.2.0.200,sha256=hfC4ohbSIRZ9kJRuaT4PlfOEogZXpgLlY_FgaMNaoYc,60977 +psycopg2_binary.libs/libldap-5d2ff197.so.2.0.200,sha256=ho65rEV6AhnLA0mo-TKB9TcUROR8-uymbfEAGkAcpwQ,447329 +psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0,sha256=Au2oUOBJMWVtivgfUXG_902L7BVT09hcPTLX_F7-iGQ,406817 +psycopg2_binary.libs/libpq-e8a033dd.so.5.16,sha256=io69ZDoOBgCMoVj2aGl1-aovIrAOzg2YxumgJeq1iQ8,370777 +psycopg2_binary.libs/libsasl2-883649fd.so.3.0.0,sha256=GC8C1eR02yJ82oOrrHQT1DHUh8bAGv0M10HhQM7cDzo,119217 +psycopg2_binary.libs/libselinux-0922c95c.so.1,sha256=1PqOf7Ot2WCmgyWlnJaUJErqMhP9c5pQgVywZ8SWVlQ,178337 +psycopg2_binary.libs/libssl-3e69114b.so.1.1,sha256=FJ2ccBmBNGXrf07x0GVrPwIORu0BPRHyt_tLogu5jjA,646065 diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/WHEEL new file mode 100644 index 00000000..d1b3f1da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/top_level.txt new file mode 100644 index 00000000..658130bb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/psycopg2_binary-2.9.9.dist-info/top_level.txt @@ -0,0 +1 @@ +psycopg2 diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 new file mode 100755 index 00000000..76ea28d0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libcrypto-0628e7d4.so.1.1 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libcrypto-0628e7d4.so.1.1 new file mode 100755 index 00000000..34fea43c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libcrypto-0628e7d4.so.1.1 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 new file mode 100755 index 00000000..8254ea40 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 new file mode 100755 index 00000000..cc955025 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 new file mode 100755 index 00000000..2070ec60 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkrb5-fcafa220.so.3.3 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkrb5-fcafa220.so.3.3 new file mode 100755 index 00000000..8f041a1d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkrb5-fcafa220.so.3.3 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkrb5support-d0bcff84.so.0.1 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkrb5support-d0bcff84.so.0.1 new file mode 100755 index 00000000..da58cde4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libkrb5support-d0bcff84.so.0.1 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/liblber-5a1d5ae1.so.2.0.200 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/liblber-5a1d5ae1.so.2.0.200 new file mode 100755 index 00000000..7884bd30 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/liblber-5a1d5ae1.so.2.0.200 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libldap-5d2ff197.so.2.0.200 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libldap-5d2ff197.so.2.0.200 new file mode 100755 index 00000000..3780b756 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libldap-5d2ff197.so.2.0.200 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 new file mode 100755 index 00000000..ffd000a6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libpq-e8a033dd.so.5.16 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libpq-e8a033dd.so.5.16 new file mode 100755 index 00000000..0c52cfae Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libpq-e8a033dd.so.5.16 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libsasl2-883649fd.so.3.0.0 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libsasl2-883649fd.so.3.0.0 new file mode 100755 index 00000000..37c37621 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libsasl2-883649fd.so.3.0.0 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libselinux-0922c95c.so.1 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libselinux-0922c95c.so.1 new file mode 100755 index 00000000..366e9a81 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libselinux-0922c95c.so.1 differ diff --git a/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libssl-3e69114b.so.1.1 b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libssl-3e69114b.so.1.1 new file mode 100755 index 00000000..b1fd77e7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/psycopg2_binary.libs/libssl-3e69114b.so.1.1 differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/__init__.py new file mode 100644 index 00000000..d28421a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/__init__.py @@ -0,0 +1,294 @@ +# __init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Any + +from . import util as _util +from .engine import AdaptedConnection as AdaptedConnection +from .engine import BaseRow as BaseRow +from .engine import BindTyping as BindTyping +from .engine import ChunkedIteratorResult as ChunkedIteratorResult +from .engine import Compiled as Compiled +from .engine import Connection as Connection +from .engine import create_engine as create_engine +from .engine import create_mock_engine as create_mock_engine +from .engine import create_pool_from_url as create_pool_from_url +from .engine import CreateEnginePlugin as CreateEnginePlugin +from .engine import CursorResult as CursorResult +from .engine import Dialect as Dialect +from .engine import Engine as Engine +from .engine import engine_from_config as engine_from_config +from .engine import ExceptionContext as ExceptionContext +from .engine import ExecutionContext as ExecutionContext +from .engine import FrozenResult as FrozenResult +from .engine import Inspector as Inspector +from .engine import IteratorResult as IteratorResult +from .engine import make_url as make_url +from .engine import MappingResult as MappingResult +from .engine import MergedResult as MergedResult +from .engine import NestedTransaction as NestedTransaction +from .engine import Result as Result +from .engine import result_tuple as result_tuple +from .engine import ResultProxy as ResultProxy +from .engine import RootTransaction as RootTransaction +from .engine import Row as Row +from .engine import RowMapping as RowMapping +from .engine import ScalarResult as ScalarResult +from .engine import Transaction as Transaction +from .engine import TwoPhaseTransaction as TwoPhaseTransaction +from .engine import TypeCompiler as TypeCompiler +from .engine import URL as URL +from .inspection import inspect as inspect +from .pool import AssertionPool as AssertionPool +from .pool import AsyncAdaptedQueuePool as AsyncAdaptedQueuePool +from .pool import ( + FallbackAsyncAdaptedQueuePool as FallbackAsyncAdaptedQueuePool, +) +from .pool import NullPool as NullPool +from .pool import Pool as Pool +from .pool import PoolProxiedConnection as PoolProxiedConnection +from .pool import PoolResetState as PoolResetState +from .pool import QueuePool as QueuePool +from .pool import SingletonThreadPool as SingletonThreadPool +from .pool import StaticPool as StaticPool +from .schema import BaseDDLElement as BaseDDLElement +from .schema import BLANK_SCHEMA as BLANK_SCHEMA +from .schema import CheckConstraint as CheckConstraint +from .schema import Column as Column +from .schema import ColumnDefault as ColumnDefault +from .schema import Computed as Computed +from .schema import Constraint as Constraint +from .schema import DDL as DDL +from .schema import DDLElement as DDLElement +from .schema import DefaultClause as DefaultClause +from .schema import ExecutableDDLElement as ExecutableDDLElement +from .schema import FetchedValue as FetchedValue +from .schema import ForeignKey as ForeignKey +from .schema import ForeignKeyConstraint as ForeignKeyConstraint +from .schema import Identity as Identity +from .schema import Index as Index +from .schema import insert_sentinel as insert_sentinel +from .schema import MetaData as MetaData +from .schema import PrimaryKeyConstraint as PrimaryKeyConstraint +from .schema import Sequence as Sequence +from .schema import Table as Table +from .schema import UniqueConstraint as UniqueConstraint +from .sql import ColumnExpressionArgument as ColumnExpressionArgument +from .sql import NotNullable as NotNullable +from .sql import Nullable as Nullable +from .sql import SelectLabelStyle as SelectLabelStyle +from .sql.expression import Alias as Alias +from .sql.expression import alias as alias +from .sql.expression import AliasedReturnsRows as AliasedReturnsRows +from .sql.expression import all_ as all_ +from .sql.expression import and_ as and_ +from .sql.expression import any_ as any_ +from .sql.expression import asc as asc +from .sql.expression import between as between +from .sql.expression import BinaryExpression as BinaryExpression +from .sql.expression import bindparam as bindparam +from .sql.expression import BindParameter as BindParameter +from .sql.expression import bitwise_not as bitwise_not +from .sql.expression import BooleanClauseList as BooleanClauseList +from .sql.expression import CacheKey as CacheKey +from .sql.expression import Case as Case +from .sql.expression import case as case +from .sql.expression import Cast as Cast +from .sql.expression import cast as cast +from .sql.expression import ClauseElement as ClauseElement +from .sql.expression import ClauseList as ClauseList +from .sql.expression import collate as collate +from .sql.expression import CollectionAggregate as CollectionAggregate +from .sql.expression import column as column +from .sql.expression import ColumnClause as ColumnClause +from .sql.expression import ColumnCollection as ColumnCollection +from .sql.expression import ColumnElement as ColumnElement +from .sql.expression import ColumnOperators as ColumnOperators +from .sql.expression import CompoundSelect as CompoundSelect +from .sql.expression import CTE as CTE +from .sql.expression import cte as cte +from .sql.expression import custom_op as custom_op +from .sql.expression import Delete as Delete +from .sql.expression import delete as delete +from .sql.expression import desc as desc +from .sql.expression import distinct as distinct +from .sql.expression import except_ as except_ +from .sql.expression import except_all as except_all +from .sql.expression import Executable as Executable +from .sql.expression import Exists as Exists +from .sql.expression import exists as exists +from .sql.expression import Extract as Extract +from .sql.expression import extract as extract +from .sql.expression import false as false +from .sql.expression import False_ as False_ +from .sql.expression import FromClause as FromClause +from .sql.expression import FromGrouping as FromGrouping +from .sql.expression import func as func +from .sql.expression import funcfilter as funcfilter +from .sql.expression import Function as Function +from .sql.expression import FunctionElement as FunctionElement +from .sql.expression import FunctionFilter as FunctionFilter +from .sql.expression import GenerativeSelect as GenerativeSelect +from .sql.expression import Grouping as Grouping +from .sql.expression import HasCTE as HasCTE +from .sql.expression import HasPrefixes as HasPrefixes +from .sql.expression import HasSuffixes as HasSuffixes +from .sql.expression import Insert as Insert +from .sql.expression import insert as insert +from .sql.expression import intersect as intersect +from .sql.expression import intersect_all as intersect_all +from .sql.expression import Join as Join +from .sql.expression import join as join +from .sql.expression import Label as Label +from .sql.expression import label as label +from .sql.expression import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT +from .sql.expression import ( + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, +) +from .sql.expression import LABEL_STYLE_NONE as LABEL_STYLE_NONE +from .sql.expression import ( + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, +) +from .sql.expression import lambda_stmt as lambda_stmt +from .sql.expression import LambdaElement as LambdaElement +from .sql.expression import Lateral as Lateral +from .sql.expression import lateral as lateral +from .sql.expression import literal as literal +from .sql.expression import literal_column as literal_column +from .sql.expression import modifier as modifier +from .sql.expression import not_ as not_ +from .sql.expression import Null as Null +from .sql.expression import null as null +from .sql.expression import nulls_first as nulls_first +from .sql.expression import nulls_last as nulls_last +from .sql.expression import nullsfirst as nullsfirst +from .sql.expression import nullslast as nullslast +from .sql.expression import Operators as Operators +from .sql.expression import or_ as or_ +from .sql.expression import outerjoin as outerjoin +from .sql.expression import outparam as outparam +from .sql.expression import Over as Over +from .sql.expression import over as over +from .sql.expression import quoted_name as quoted_name +from .sql.expression import ReleaseSavepointClause as ReleaseSavepointClause +from .sql.expression import ReturnsRows as ReturnsRows +from .sql.expression import ( + RollbackToSavepointClause as RollbackToSavepointClause, +) +from .sql.expression import SavepointClause as SavepointClause +from .sql.expression import ScalarSelect as ScalarSelect +from .sql.expression import Select as Select +from .sql.expression import select as select +from .sql.expression import Selectable as Selectable +from .sql.expression import SelectBase as SelectBase +from .sql.expression import SQLColumnExpression as SQLColumnExpression +from .sql.expression import StatementLambdaElement as StatementLambdaElement +from .sql.expression import Subquery as Subquery +from .sql.expression import table as table +from .sql.expression import TableClause as TableClause +from .sql.expression import TableSample as TableSample +from .sql.expression import tablesample as tablesample +from .sql.expression import TableValuedAlias as TableValuedAlias +from .sql.expression import text as text +from .sql.expression import TextAsFrom as TextAsFrom +from .sql.expression import TextClause as TextClause +from .sql.expression import TextualSelect as TextualSelect +from .sql.expression import true as true +from .sql.expression import True_ as True_ +from .sql.expression import try_cast as try_cast +from .sql.expression import TryCast as TryCast +from .sql.expression import Tuple as Tuple +from .sql.expression import tuple_ as tuple_ +from .sql.expression import type_coerce as type_coerce +from .sql.expression import TypeClause as TypeClause +from .sql.expression import TypeCoerce as TypeCoerce +from .sql.expression import UnaryExpression as UnaryExpression +from .sql.expression import union as union +from .sql.expression import union_all as union_all +from .sql.expression import Update as Update +from .sql.expression import update as update +from .sql.expression import UpdateBase as UpdateBase +from .sql.expression import Values as Values +from .sql.expression import values as values +from .sql.expression import ValuesBase as ValuesBase +from .sql.expression import Visitable as Visitable +from .sql.expression import within_group as within_group +from .sql.expression import WithinGroup as WithinGroup +from .types import ARRAY as ARRAY +from .types import BIGINT as BIGINT +from .types import BigInteger as BigInteger +from .types import BINARY as BINARY +from .types import BLOB as BLOB +from .types import BOOLEAN as BOOLEAN +from .types import Boolean as Boolean +from .types import CHAR as CHAR +from .types import CLOB as CLOB +from .types import DATE as DATE +from .types import Date as Date +from .types import DATETIME as DATETIME +from .types import DateTime as DateTime +from .types import DECIMAL as DECIMAL +from .types import DOUBLE as DOUBLE +from .types import Double as Double +from .types import DOUBLE_PRECISION as DOUBLE_PRECISION +from .types import Enum as Enum +from .types import FLOAT as FLOAT +from .types import Float as Float +from .types import INT as INT +from .types import INTEGER as INTEGER +from .types import Integer as Integer +from .types import Interval as Interval +from .types import JSON as JSON +from .types import LargeBinary as LargeBinary +from .types import NCHAR as NCHAR +from .types import NUMERIC as NUMERIC +from .types import Numeric as Numeric +from .types import NVARCHAR as NVARCHAR +from .types import PickleType as PickleType +from .types import REAL as REAL +from .types import SMALLINT as SMALLINT +from .types import SmallInteger as SmallInteger +from .types import String as String +from .types import TEXT as TEXT +from .types import Text as Text +from .types import TIME as TIME +from .types import Time as Time +from .types import TIMESTAMP as TIMESTAMP +from .types import TupleType as TupleType +from .types import TypeDecorator as TypeDecorator +from .types import Unicode as Unicode +from .types import UnicodeText as UnicodeText +from .types import UUID as UUID +from .types import Uuid as Uuid +from .types import VARBINARY as VARBINARY +from .types import VARCHAR as VARCHAR + +__version__ = "2.0.35" + + +def __go(lcls: Any) -> None: + _util.preloaded.import_prefix("sqlalchemy") + + from . import exc + + exc._version_token = "".join(__version__.split(".")[0:2]) + + +__go(locals()) + + +def __getattr__(name: str) -> Any: + if name == "SingleonThreadPool": + _util.warn_deprecated( + "SingleonThreadPool was a typo in the v2 series. " + "Please use the correct SingletonThreadPool name.", + "2.0.24", + ) + return SingletonThreadPool + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..6348e0dd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/events.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..235aef6f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/events.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/exc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/exc.cpython-312.pyc new file mode 100644 index 00000000..b9b5c370 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/exc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/inspection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/inspection.cpython-312.pyc new file mode 100644 index 00000000..5bb3a31f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/inspection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/log.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/log.cpython-312.pyc new file mode 100644 index 00000000..90edfd97 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/log.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/schema.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/schema.cpython-312.pyc new file mode 100644 index 00000000..8e9a688c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/schema.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..653701bf Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__init__.py new file mode 100644 index 00000000..f1cae0b3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__init__.py @@ -0,0 +1,18 @@ +# connectors/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + + +from ..engine.interfaces import Dialect + + +class Connector(Dialect): + """Base class for dialect mixins, for DBAPIs that work + across entirely different database backends. + + Currently the only such mixin is pyodbc. + + """ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..fae2bea5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc new file mode 100644 index 00000000..a4b3c67b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc new file mode 100644 index 00000000..32640d6d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc new file mode 100644 index 00000000..168f8e65 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/aioodbc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/aioodbc.py new file mode 100644 index 00000000..3b5c3b49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/aioodbc.py @@ -0,0 +1,174 @@ +# connectors/aioodbc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from .asyncio import AsyncAdapt_dbapi_connection +from .asyncio import AsyncAdapt_dbapi_cursor +from .asyncio import AsyncAdapt_dbapi_ss_cursor +from .asyncio import AsyncAdaptFallback_dbapi_connection +from .pyodbc import PyODBCConnector +from .. import pool +from .. import util +from ..util.concurrency import await_fallback +from ..util.concurrency import await_only + +if TYPE_CHECKING: + from ..engine.interfaces import ConnectArgsType + from ..engine.url import URL + + +class AsyncAdapt_aioodbc_cursor(AsyncAdapt_dbapi_cursor): + __slots__ = () + + def setinputsizes(self, *inputsizes): + # see https://github.com/aio-libs/aioodbc/issues/451 + return self._cursor._impl.setinputsizes(*inputsizes) + + # how it's supposed to work + # return self.await_(self._cursor.setinputsizes(*inputsizes)) + + +class AsyncAdapt_aioodbc_ss_cursor( + AsyncAdapt_aioodbc_cursor, AsyncAdapt_dbapi_ss_cursor +): + __slots__ = () + + +class AsyncAdapt_aioodbc_connection(AsyncAdapt_dbapi_connection): + _cursor_cls = AsyncAdapt_aioodbc_cursor + _ss_cursor_cls = AsyncAdapt_aioodbc_ss_cursor + __slots__ = () + + @property + def autocommit(self): + return self._connection.autocommit + + @autocommit.setter + def autocommit(self, value): + # https://github.com/aio-libs/aioodbc/issues/448 + # self._connection.autocommit = value + + self._connection._conn.autocommit = value + + def cursor(self, server_side=False): + # aioodbc sets connection=None when closed and just fails with + # AttributeError here. Here we use the same ProgrammingError + + # message that pyodbc uses, so it triggers is_disconnect() as well. + if self._connection.closed: + raise self.dbapi.ProgrammingError( + "Attempt to use a closed connection." + ) + return super().cursor(server_side=server_side) + + def rollback(self): + # aioodbc sets connection=None when closed and just fails with + # AttributeError here. should be a no-op + if not self._connection.closed: + super().rollback() + + def commit(self): + # aioodbc sets connection=None when closed and just fails with + # AttributeError here. should be a no-op + if not self._connection.closed: + super().commit() + + def close(self): + # aioodbc sets connection=None when closed and just fails with + # AttributeError here. should be a no-op + if not self._connection.closed: + super().close() + + +class AsyncAdaptFallback_aioodbc_connection( + AsyncAdaptFallback_dbapi_connection, AsyncAdapt_aioodbc_connection +): + __slots__ = () + + +class AsyncAdapt_aioodbc_dbapi: + def __init__(self, aioodbc, pyodbc): + self.aioodbc = aioodbc + self.pyodbc = pyodbc + self.paramstyle = pyodbc.paramstyle + self._init_dbapi_attributes() + self.Cursor = AsyncAdapt_dbapi_cursor + self.version = pyodbc.version + + def _init_dbapi_attributes(self): + for name in ( + "Warning", + "Error", + "InterfaceError", + "DataError", + "DatabaseError", + "OperationalError", + "InterfaceError", + "IntegrityError", + "ProgrammingError", + "InternalError", + "NotSupportedError", + "NUMBER", + "STRING", + "DATETIME", + "BINARY", + "Binary", + "BinaryNull", + "SQL_VARCHAR", + "SQL_WVARCHAR", + ): + setattr(self, name, getattr(self.pyodbc, name)) + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop("async_creator_fn", self.aioodbc.connect) + + if util.asbool(async_fallback): + return AsyncAdaptFallback_aioodbc_connection( + self, + await_fallback(creator_fn(*arg, **kw)), + ) + else: + return AsyncAdapt_aioodbc_connection( + self, + await_only(creator_fn(*arg, **kw)), + ) + + +class aiodbcConnector(PyODBCConnector): + is_async = True + supports_statement_cache = True + + supports_server_side_cursors = True + + @classmethod + def import_dbapi(cls): + return AsyncAdapt_aioodbc_dbapi( + __import__("aioodbc"), __import__("pyodbc") + ) + + def create_connect_args(self, url: URL) -> ConnectArgsType: + arg, kw = super().create_connect_args(url) + if arg and arg[0]: + kw["dsn"] = arg[0] + + return (), kw + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if util.asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def get_driver_connection(self, connection): + return connection._connection diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/asyncio.py b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/asyncio.py new file mode 100644 index 00000000..9b19bef7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/asyncio.py @@ -0,0 +1,213 @@ +# connectors/asyncio.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +"""generic asyncio-adapted versions of DBAPI connection and cursor""" + +from __future__ import annotations + +import collections + +from ..engine import AdaptedConnection +from ..util.concurrency import asyncio +from ..util.concurrency import await_fallback +from ..util.concurrency import await_only + + +class AsyncAdapt_dbapi_cursor: + server_side = False + __slots__ = ( + "_adapt_connection", + "_connection", + "await_", + "_cursor", + "_rows", + ) + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + + cursor = self._connection.cursor() + self._cursor = self._aenter_cursor(cursor) + + if not self.server_side: + self._rows = collections.deque() + + def _aenter_cursor(self, cursor): + return self.await_(cursor.__aenter__()) + + @property + def description(self): + return self._cursor.description + + @property + def rowcount(self): + return self._cursor.rowcount + + @property + def arraysize(self): + return self._cursor.arraysize + + @arraysize.setter + def arraysize(self, value): + self._cursor.arraysize = value + + @property + def lastrowid(self): + return self._cursor.lastrowid + + def close(self): + # note we aren't actually closing the cursor here, + # we are just letting GC do it. see notes in aiomysql dialect + self._rows.clear() + + def execute(self, operation, parameters=None): + return self.await_(self._execute_async(operation, parameters)) + + def executemany(self, operation, seq_of_parameters): + return self.await_( + self._executemany_async(operation, seq_of_parameters) + ) + + async def _execute_async(self, operation, parameters): + async with self._adapt_connection._execute_mutex: + result = await self._cursor.execute(operation, parameters or ()) + + if self._cursor.description and not self.server_side: + self._rows = collections.deque(await self._cursor.fetchall()) + return result + + async def _executemany_async(self, operation, seq_of_parameters): + async with self._adapt_connection._execute_mutex: + return await self._cursor.executemany(operation, seq_of_parameters) + + def nextset(self): + self.await_(self._cursor.nextset()) + if self._cursor.description and not self.server_side: + self._rows = collections.deque( + self.await_(self._cursor.fetchall()) + ) + + def setinputsizes(self, *inputsizes): + # NOTE: this is overrridden in aioodbc due to + # see https://github.com/aio-libs/aioodbc/issues/451 + # right now + + return self.await_(self._cursor.setinputsizes(*inputsizes)) + + def __iter__(self): + while self._rows: + yield self._rows.popleft() + + def fetchone(self): + if self._rows: + return self._rows.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + size = self.arraysize + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] + + def fetchall(self): + retval = list(self._rows) + self._rows.clear() + return retval + + +class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor): + __slots__ = () + server_side = True + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + + cursor = self._connection.cursor() + + self._cursor = self.await_(cursor.__aenter__()) + + def close(self): + if self._cursor is not None: + self.await_(self._cursor.close()) + self._cursor = None + + def fetchone(self): + return self.await_(self._cursor.fetchone()) + + def fetchmany(self, size=None): + return self.await_(self._cursor.fetchmany(size=size)) + + def fetchall(self): + return self.await_(self._cursor.fetchall()) + + def __iter__(self): + iterator = self._cursor.__aiter__() + while True: + try: + yield self.await_(iterator.__anext__()) + except StopAsyncIteration: + break + + +class AsyncAdapt_dbapi_connection(AdaptedConnection): + _cursor_cls = AsyncAdapt_dbapi_cursor + _ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor + + await_ = staticmethod(await_only) + __slots__ = ("dbapi", "_execute_mutex") + + def __init__(self, dbapi, connection): + self.dbapi = dbapi + self._connection = connection + self._execute_mutex = asyncio.Lock() + + def ping(self, reconnect): + return self.await_(self._connection.ping(reconnect)) + + def add_output_converter(self, *arg, **kw): + self._connection.add_output_converter(*arg, **kw) + + def character_set_name(self): + return self._connection.character_set_name() + + @property + def autocommit(self): + return self._connection.autocommit + + @autocommit.setter + def autocommit(self, value): + # https://github.com/aio-libs/aioodbc/issues/448 + # self._connection.autocommit = value + + self._connection._conn.autocommit = value + + def cursor(self, server_side=False): + if server_side: + return self._ss_cursor_cls(self) + else: + return self._cursor_cls(self) + + def rollback(self): + self.await_(self._connection.rollback()) + + def commit(self): + self.await_(self._connection.commit()) + + def close(self): + self.await_(self._connection.close()) + + +class AsyncAdaptFallback_dbapi_connection(AsyncAdapt_dbapi_connection): + __slots__ = () + + await_ = staticmethod(await_fallback) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/pyodbc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/pyodbc.py new file mode 100644 index 00000000..f204d80a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/connectors/pyodbc.py @@ -0,0 +1,249 @@ +# connectors/pyodbc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import re +from types import ModuleType +import typing +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union +from urllib.parse import unquote_plus + +from . import Connector +from .. import ExecutionContext +from .. import pool +from .. import util +from ..engine import ConnectArgsType +from ..engine import Connection +from ..engine import interfaces +from ..engine import URL +from ..sql.type_api import TypeEngine + +if typing.TYPE_CHECKING: + from ..engine.interfaces import IsolationLevel + + +class PyODBCConnector(Connector): + driver = "pyodbc" + + # this is no longer False for pyodbc in general + supports_sane_rowcount_returning = True + supports_sane_multi_rowcount = False + + supports_native_decimal = True + default_paramstyle = "named" + + fast_executemany = False + + # for non-DSN connections, this *may* be used to + # hold the desired driver name + pyodbc_driver_name: Optional[str] = None + + dbapi: ModuleType + + def __init__(self, use_setinputsizes: bool = False, **kw: Any): + super().__init__(**kw) + if use_setinputsizes: + self.bind_typing = interfaces.BindTyping.SETINPUTSIZES + + @classmethod + def import_dbapi(cls) -> ModuleType: + return __import__("pyodbc") + + def create_connect_args(self, url: URL) -> ConnectArgsType: + opts = url.translate_connect_args(username="user") + opts.update(url.query) + + keys = opts + + query = url.query + + connect_args: Dict[str, Any] = {} + connectors: List[str] + + for param in ("ansi", "unicode_results", "autocommit"): + if param in keys: + connect_args[param] = util.asbool(keys.pop(param)) + + if "odbc_connect" in keys: + connectors = [unquote_plus(keys.pop("odbc_connect"))] + else: + + def check_quote(token: str) -> str: + if ";" in str(token) or str(token).startswith("{"): + token = "{%s}" % token.replace("}", "}}") + return token + + keys = {k: check_quote(v) for k, v in keys.items()} + + dsn_connection = "dsn" in keys or ( + "host" in keys and "database" not in keys + ) + if dsn_connection: + connectors = [ + "dsn=%s" % (keys.pop("host", "") or keys.pop("dsn", "")) + ] + else: + port = "" + if "port" in keys and "port" not in query: + port = ",%d" % int(keys.pop("port")) + + connectors = [] + driver = keys.pop("driver", self.pyodbc_driver_name) + if driver is None and keys: + # note if keys is empty, this is a totally blank URL + util.warn( + "No driver name specified; " + "this is expected by PyODBC when using " + "DSN-less connections" + ) + else: + connectors.append("DRIVER={%s}" % driver) + + connectors.extend( + [ + "Server=%s%s" % (keys.pop("host", ""), port), + "Database=%s" % keys.pop("database", ""), + ] + ) + + user = keys.pop("user", None) + if user: + connectors.append("UID=%s" % user) + pwd = keys.pop("password", "") + if pwd: + connectors.append("PWD=%s" % pwd) + else: + authentication = keys.pop("authentication", None) + if authentication: + connectors.append("Authentication=%s" % authentication) + else: + connectors.append("Trusted_Connection=Yes") + + # if set to 'Yes', the ODBC layer will try to automagically + # convert textual data from your database encoding to your + # client encoding. This should obviously be set to 'No' if + # you query a cp1253 encoded database from a latin1 client... + if "odbc_autotranslate" in keys: + connectors.append( + "AutoTranslate=%s" % keys.pop("odbc_autotranslate") + ) + + connectors.extend(["%s=%s" % (k, v) for k, v in keys.items()]) + + return ((";".join(connectors),), connect_args) + + def is_disconnect( + self, + e: Exception, + connection: Optional[ + Union[pool.PoolProxiedConnection, interfaces.DBAPIConnection] + ], + cursor: Optional[interfaces.DBAPICursor], + ) -> bool: + if isinstance(e, self.dbapi.ProgrammingError): + return "The cursor's connection has been closed." in str( + e + ) or "Attempt to use a closed connection." in str(e) + else: + return False + + def _dbapi_version(self) -> interfaces.VersionInfoType: + if not self.dbapi: + return () + return self._parse_dbapi_version(self.dbapi.version) + + def _parse_dbapi_version(self, vers: str) -> interfaces.VersionInfoType: + m = re.match(r"(?:py.*-)?([\d\.]+)(?:-(\w+))?", vers) + if not m: + return () + vers_tuple: interfaces.VersionInfoType = tuple( + [int(x) for x in m.group(1).split(".")] + ) + if m.group(2): + vers_tuple += (m.group(2),) + return vers_tuple + + def _get_server_version_info( + self, connection: Connection + ) -> interfaces.VersionInfoType: + # NOTE: this function is not reliable, particularly when + # freetds is in use. Implement database-specific server version + # queries. + dbapi_con = connection.connection.dbapi_connection + version: Tuple[Union[int, str], ...] = () + r = re.compile(r"[.\-]") + for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)): # type: ignore[union-attr] # noqa: E501 + try: + version += (int(n),) + except ValueError: + pass + return tuple(version) + + def do_set_input_sizes( + self, + cursor: interfaces.DBAPICursor, + list_of_tuples: List[Tuple[str, Any, TypeEngine[Any]]], + context: ExecutionContext, + ) -> None: + # the rules for these types seems a little strange, as you can pass + # non-tuples as well as tuples, however it seems to assume "0" + # for the subsequent values if you don't pass a tuple which fails + # for types such as pyodbc.SQL_WLONGVARCHAR, which is the datatype + # that ticket #5649 is targeting. + + # NOTE: as of #6058, this won't be called if the use_setinputsizes + # parameter were not passed to the dialect, or if no types were + # specified in list_of_tuples + + # as of #8177 for 2.0 we assume use_setinputsizes=True and only + # omit the setinputsizes calls for .executemany() with + # fast_executemany=True + + if ( + context.execute_style is interfaces.ExecuteStyle.EXECUTEMANY + and self.fast_executemany + ): + return + + cursor.setinputsizes( + [ + ( + (dbtype, None, None) + if not isinstance(dbtype, tuple) + else dbtype + ) + for key, dbtype, sqltype in list_of_tuples + ] + ) + + def get_isolation_level_values( + self, dbapi_connection: interfaces.DBAPIConnection + ) -> List[IsolationLevel]: + return super().get_isolation_level_values(dbapi_connection) + [ + "AUTOCOMMIT" + ] + + def set_isolation_level( + self, + dbapi_connection: interfaces.DBAPIConnection, + level: IsolationLevel, + ) -> None: + # adjust for ConnectionFairy being present + # allows attribute set e.g. "connection.autocommit = True" + # to work properly + + if level == "AUTOCOMMIT": + dbapi_connection.autocommit = True + else: + dbapi_connection.autocommit = False + super().set_isolation_level(dbapi_connection, level) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/__init__.py new file mode 100644 index 00000000..88a4d903 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/__init__.py @@ -0,0 +1,6 @@ +# cyextension/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..662ebcc8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..fef2a8ab Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/collections.pyx b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/collections.pyx new file mode 100644 index 00000000..86d24852 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/collections.pyx @@ -0,0 +1,409 @@ +# cyextension/collections.pyx +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +cimport cython +from cpython.long cimport PyLong_FromLongLong +from cpython.set cimport PySet_Add + +from collections.abc import Collection +from itertools import filterfalse + +cdef bint add_not_present(set seen, object item, hashfunc): + hash_value = hashfunc(item) + if hash_value not in seen: + PySet_Add(seen, hash_value) + return True + else: + return False + +cdef list cunique_list(seq, hashfunc=None): + cdef set seen = set() + if not hashfunc: + return [x for x in seq if x not in seen and not PySet_Add(seen, x)] + else: + return [x for x in seq if add_not_present(seen, x, hashfunc)] + +def unique_list(seq, hashfunc=None): + return cunique_list(seq, hashfunc) + +cdef class OrderedSet(set): + + cdef list _list + + @classmethod + def __class_getitem__(cls, key): + return cls + + def __init__(self, d=None): + set.__init__(self) + if d is not None: + self._list = cunique_list(d) + set.update(self, self._list) + else: + self._list = [] + + cpdef OrderedSet copy(self): + cdef OrderedSet cp = OrderedSet.__new__(OrderedSet) + cp._list = list(self._list) + set.update(cp, cp._list) + return cp + + @cython.final + cdef OrderedSet _from_list(self, list new_list): + cdef OrderedSet new = OrderedSet.__new__(OrderedSet) + new._list = new_list + set.update(new, new_list) + return new + + def add(self, element): + if element not in self: + self._list.append(element) + PySet_Add(self, element) + + def remove(self, element): + # set.remove will raise if element is not in self + set.remove(self, element) + self._list.remove(element) + + def pop(self): + try: + value = self._list.pop() + except IndexError: + raise KeyError("pop from an empty set") from None + set.remove(self, value) + return value + + def insert(self, Py_ssize_t pos, element): + if element not in self: + self._list.insert(pos, element) + PySet_Add(self, element) + + def discard(self, element): + if element in self: + set.remove(self, element) + self._list.remove(element) + + def clear(self): + set.clear(self) + self._list = [] + + def __getitem__(self, key): + return self._list[key] + + def __iter__(self): + return iter(self._list) + + def __add__(self, other): + return self.union(other) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._list) + + __str__ = __repr__ + + def update(self, *iterables): + for iterable in iterables: + for e in iterable: + if e not in self: + self._list.append(e) + set.add(self, e) + + def __ior__(self, iterable): + self.update(iterable) + return self + + def union(self, *other): + result = self.copy() + result.update(*other) + return result + + def __or__(self, other): + return self.union(other) + + def intersection(self, *other): + cdef set other_set = set.intersection(self, *other) + return self._from_list([a for a in self._list if a in other_set]) + + def __and__(self, other): + return self.intersection(other) + + def symmetric_difference(self, other): + cdef set other_set + if isinstance(other, set): + other_set = other + collection = other_set + elif isinstance(other, Collection): + collection = other + other_set = set(other) + else: + collection = list(other) + other_set = set(collection) + result = self._from_list([a for a in self._list if a not in other_set]) + result.update(a for a in collection if a not in self) + return result + + def __xor__(self, other): + return self.symmetric_difference(other) + + def difference(self, *other): + cdef set other_set = set.difference(self, *other) + return self._from_list([a for a in self._list if a in other_set]) + + def __sub__(self, other): + return self.difference(other) + + def intersection_update(self, *other): + set.intersection_update(self, *other) + self._list = [a for a in self._list if a in self] + + def __iand__(self, other): + self.intersection_update(other) + return self + + cpdef symmetric_difference_update(self, other): + collection = other if isinstance(other, Collection) else list(other) + set.symmetric_difference_update(self, collection) + self._list = [a for a in self._list if a in self] + self._list += [a for a in collection if a in self] + + def __ixor__(self, other): + self.symmetric_difference_update(other) + return self + + def difference_update(self, *other): + set.difference_update(self, *other) + self._list = [a for a in self._list if a in self] + + def __isub__(self, other): + self.difference_update(other) + return self + +cdef object cy_id(object item): + return PyLong_FromLongLong( (item)) + +# NOTE: cython 0.x will call __add__, __sub__, etc with the parameter swapped +# instead of the __rmeth__, so they need to check that also self is of the +# correct type. This is fixed in cython 3.x. See: +# https://docs.cython.org/en/latest/src/userguide/special_methods.html#arithmetic-methods +cdef class IdentitySet: + """A set that considers only object id() for uniqueness. + + This strategy has edge cases for builtin types- it's possible to have + two 'foo' strings in one of these sets, for example. Use sparingly. + + """ + + cdef dict _members + + def __init__(self, iterable=None): + self._members = {} + if iterable: + self.update(iterable) + + def add(self, value): + self._members[cy_id(value)] = value + + def __contains__(self, value): + return cy_id(value) in self._members + + cpdef remove(self, value): + del self._members[cy_id(value)] + + def discard(self, value): + try: + self.remove(value) + except KeyError: + pass + + def pop(self): + cdef tuple pair + try: + pair = self._members.popitem() + return pair[1] + except KeyError: + raise KeyError("pop from an empty set") + + def clear(self): + self._members.clear() + + def __eq__(self, other): + cdef IdentitySet other_ + if isinstance(other, IdentitySet): + other_ = other + return self._members == other_._members + else: + return False + + def __ne__(self, other): + cdef IdentitySet other_ + if isinstance(other, IdentitySet): + other_ = other + return self._members != other_._members + else: + return True + + cpdef issubset(self, iterable): + cdef IdentitySet other + if isinstance(iterable, self.__class__): + other = iterable + else: + other = self.__class__(iterable) + + if len(self) > len(other): + return False + for m in filterfalse(other._members.__contains__, self._members): + return False + return True + + def __le__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.issubset(other) + + def __lt__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return len(self) < len(other) and self.issubset(other) + + cpdef issuperset(self, iterable): + cdef IdentitySet other + if isinstance(iterable, self.__class__): + other = iterable + else: + other = self.__class__(iterable) + + if len(self) < len(other): + return False + for m in filterfalse(self._members.__contains__, other._members): + return False + return True + + def __ge__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.issuperset(other) + + def __gt__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return len(self) > len(other) and self.issuperset(other) + + cpdef IdentitySet union(self, iterable): + cdef IdentitySet result = self.__class__() + result._members.update(self._members) + result.update(iterable) + return result + + def __or__(self, other): + if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet): + return NotImplemented + return self.union(other) + + cpdef update(self, iterable): + for obj in iterable: + self._members[cy_id(obj)] = obj + + def __ior__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.update(other) + return self + + cpdef IdentitySet difference(self, iterable): + cdef IdentitySet result = self.__new__(self.__class__) + if isinstance(iterable, self.__class__): + other = (iterable)._members + else: + other = {cy_id(obj) for obj in iterable} + result._members = {k:v for k, v in self._members.items() if k not in other} + return result + + def __sub__(self, other): + if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet): + return NotImplemented + return self.difference(other) + + cpdef difference_update(self, iterable): + cdef IdentitySet other = self.difference(iterable) + self._members = other._members + + def __isub__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.difference_update(other) + return self + + cpdef IdentitySet intersection(self, iterable): + cdef IdentitySet result = self.__new__(self.__class__) + if isinstance(iterable, self.__class__): + other = (iterable)._members + else: + other = {cy_id(obj) for obj in iterable} + result._members = {k: v for k, v in self._members.items() if k in other} + return result + + def __and__(self, other): + if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet): + return NotImplemented + return self.intersection(other) + + cpdef intersection_update(self, iterable): + cdef IdentitySet other = self.intersection(iterable) + self._members = other._members + + def __iand__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.intersection_update(other) + return self + + cpdef IdentitySet symmetric_difference(self, iterable): + cdef IdentitySet result = self.__new__(self.__class__) + cdef dict other + if isinstance(iterable, self.__class__): + other = (iterable)._members + else: + other = {cy_id(obj): obj for obj in iterable} + result._members = {k: v for k, v in self._members.items() if k not in other} + result._members.update( + [(k, v) for k, v in other.items() if k not in self._members] + ) + return result + + def __xor__(self, other): + if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet): + return NotImplemented + return self.symmetric_difference(other) + + cpdef symmetric_difference_update(self, iterable): + cdef IdentitySet other = self.symmetric_difference(iterable) + self._members = other._members + + def __ixor__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.symmetric_difference(other) + return self + + cpdef IdentitySet copy(self): + cdef IdentitySet cp = self.__new__(self.__class__) + cp._members = self._members.copy() + return cp + + def __copy__(self): + return self.copy() + + def __len__(self): + return len(self._members) + + def __iter__(self): + return iter(self._members.values()) + + def __hash__(self): + raise TypeError("set objects are unhashable") + + def __repr__(self): + return "%s(%r)" % (type(self).__name__, list(self._members.values())) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..2a09aae9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.pxd b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.pxd new file mode 100644 index 00000000..76f22893 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.pxd @@ -0,0 +1,8 @@ +# cyextension/immutabledict.pxd +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +cdef class immutabledict(dict): + pass diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.pyx b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.pyx new file mode 100644 index 00000000..b37eccc4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/immutabledict.pyx @@ -0,0 +1,133 @@ +# cyextension/immutabledict.pyx +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from cpython.dict cimport PyDict_New, PyDict_Update, PyDict_Size + + +def _readonly_fn(obj): + raise TypeError( + "%s object is immutable and/or readonly" % obj.__class__.__name__) + + +def _immutable_fn(obj): + raise TypeError( + "%s object is immutable" % obj.__class__.__name__) + + +class ReadOnlyContainer: + + __slots__ = () + + def _readonly(self, *a,**kw): + _readonly_fn(self) + + __delitem__ = __setitem__ = __setattr__ = _readonly + + +class ImmutableDictBase(dict): + def _immutable(self, *a,**kw): + _immutable_fn(self) + + @classmethod + def __class_getitem__(cls, key): + return cls + + __delitem__ = __setitem__ = __setattr__ = _immutable + clear = pop = popitem = setdefault = update = _immutable + + +cdef class immutabledict(dict): + def __repr__(self): + return f"immutabledict({dict.__repr__(self)})" + + @classmethod + def __class_getitem__(cls, key): + return cls + + def union(self, *args, **kw): + cdef dict to_merge = None + cdef immutabledict result + cdef Py_ssize_t args_len = len(args) + if args_len > 1: + raise TypeError( + f'union expected at most 1 argument, got {args_len}' + ) + if args_len == 1: + attribute = args[0] + if isinstance(attribute, dict): + to_merge = attribute + if to_merge is None: + to_merge = dict(*args, **kw) + + if PyDict_Size(to_merge) == 0: + return self + + # new + update is faster than immutabledict(self) + result = immutabledict() + PyDict_Update(result, self) + PyDict_Update(result, to_merge) + return result + + def merge_with(self, *other): + cdef immutabledict result = None + cdef object d + cdef bint update = False + if not other: + return self + for d in other: + if d: + if update == False: + update = True + # new + update is faster than immutabledict(self) + result = immutabledict() + PyDict_Update(result, self) + PyDict_Update( + result, (d if isinstance(d, dict) else dict(d)) + ) + + return self if update == False else result + + def copy(self): + return self + + def __reduce__(self): + return immutabledict, (dict(self), ) + + def __delitem__(self, k): + _immutable_fn(self) + + def __setitem__(self, k, v): + _immutable_fn(self) + + def __setattr__(self, k, v): + _immutable_fn(self) + + def clear(self, *args, **kw): + _immutable_fn(self) + + def pop(self, *args, **kw): + _immutable_fn(self) + + def popitem(self, *args, **kw): + _immutable_fn(self) + + def setdefault(self, *args, **kw): + _immutable_fn(self) + + def update(self, *args, **kw): + _immutable_fn(self) + + # PEP 584 + def __ior__(self, other): + _immutable_fn(self) + + def __or__(self, other): + return immutabledict(dict.__or__(self, other)) + + def __ror__(self, other): + # NOTE: this is used only in cython 3.x; + # version 0.x will call __or__ with args inversed + return immutabledict(dict.__ror__(self, other)) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..297ecb7c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/processors.pyx b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/processors.pyx new file mode 100644 index 00000000..3d714569 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/processors.pyx @@ -0,0 +1,68 @@ +# cyextension/processors.pyx +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +import datetime +from datetime import datetime as datetime_cls +from datetime import time as time_cls +from datetime import date as date_cls +import re + +from cpython.object cimport PyObject_Str +from cpython.unicode cimport PyUnicode_AsASCIIString, PyUnicode_Check, PyUnicode_Decode +from libc.stdio cimport sscanf + + +def int_to_boolean(value): + if value is None: + return None + return True if value else False + +def to_str(value): + return PyObject_Str(value) if value is not None else None + +def to_float(value): + return float(value) if value is not None else None + +cdef inline bytes to_bytes(object value, str type_name): + try: + return PyUnicode_AsASCIIString(value) + except Exception as e: + raise ValueError( + f"Couldn't parse {type_name} string '{value!r}' " + "- value is not a string." + ) from e + +def str_to_datetime(value): + if value is not None: + value = datetime_cls.fromisoformat(value) + return value + +def str_to_time(value): + if value is not None: + value = time_cls.fromisoformat(value) + return value + + +def str_to_date(value): + if value is not None: + value = date_cls.fromisoformat(value) + return value + + + +cdef class DecimalResultProcessor: + cdef object type_ + cdef str format_ + + def __cinit__(self, type_, format_): + self.type_ = type_ + self.format_ = format_ + + def process(self, object value): + if value is None: + return None + else: + return self.type_(self.format_ % value) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..9231cfc7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/resultproxy.pyx b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/resultproxy.pyx new file mode 100644 index 00000000..b6e357a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/resultproxy.pyx @@ -0,0 +1,102 @@ +# cyextension/resultproxy.pyx +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +import operator + +cdef class BaseRow: + cdef readonly object _parent + cdef readonly dict _key_to_index + cdef readonly tuple _data + + def __init__(self, object parent, object processors, dict key_to_index, object data): + """Row objects are constructed by CursorResult objects.""" + + self._parent = parent + + self._key_to_index = key_to_index + + if processors: + self._data = _apply_processors(processors, data) + else: + self._data = tuple(data) + + def __reduce__(self): + return ( + rowproxy_reconstructor, + (self.__class__, self.__getstate__()), + ) + + def __getstate__(self): + return {"_parent": self._parent, "_data": self._data} + + def __setstate__(self, dict state): + parent = state["_parent"] + self._parent = parent + self._data = state["_data"] + self._key_to_index = parent._key_to_index + + def _values_impl(self): + return list(self) + + def __iter__(self): + return iter(self._data) + + def __len__(self): + return len(self._data) + + def __hash__(self): + return hash(self._data) + + def __getitem__(self, index): + return self._data[index] + + def _get_by_key_impl_mapping(self, key): + return self._get_by_key_impl(key, 0) + + cdef _get_by_key_impl(self, object key, int attr_err): + index = self._key_to_index.get(key) + if index is not None: + return self._data[index] + self._parent._key_not_found(key, attr_err != 0) + + def __getattr__(self, name): + return self._get_by_key_impl(name, 1) + + def _to_tuple_instance(self): + return self._data + + +cdef tuple _apply_processors(proc, data): + res = [] + for i in range(len(proc)): + p = proc[i] + if p is None: + res.append(data[i]) + else: + res.append(p(data[i])) + return tuple(res) + + +def rowproxy_reconstructor(cls, state): + obj = cls.__new__(cls) + obj.__setstate__(state) + return obj + + +cdef int is_contiguous(tuple indexes): + cdef int i + for i in range(1, len(indexes)): + if indexes[i-1] != indexes[i] -1: + return 0 + return 1 + + +def tuplegetter(*indexes): + if len(indexes) == 1 or is_contiguous(indexes) != 0: + # slice form is faster but returns a list if input is list + return operator.itemgetter(slice(indexes[0], indexes[-1] + 1)) + else: + return operator.itemgetter(*indexes) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..769065ae Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/util.pyx b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/util.pyx new file mode 100644 index 00000000..cb17acd6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/cyextension/util.pyx @@ -0,0 +1,91 @@ +# cyextension/util.pyx +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from collections.abc import Mapping + +from sqlalchemy import exc + +cdef tuple _Empty_Tuple = () + +cdef inline bint _mapping_or_tuple(object value): + return isinstance(value, dict) or isinstance(value, tuple) or isinstance(value, Mapping) + +cdef inline bint _check_item(object params) except 0: + cdef object item + cdef bint ret = 1 + if params: + item = params[0] + if not _mapping_or_tuple(item): + ret = 0 + raise exc.ArgumentError( + "List argument must consist only of tuples or dictionaries" + ) + return ret + +def _distill_params_20(object params): + if params is None: + return _Empty_Tuple + elif isinstance(params, list) or isinstance(params, tuple): + _check_item(params) + return params + elif isinstance(params, dict) or isinstance(params, Mapping): + return [params] + else: + raise exc.ArgumentError("mapping or list expected for parameters") + + +def _distill_raw_params(object params): + if params is None: + return _Empty_Tuple + elif isinstance(params, list): + _check_item(params) + return params + elif _mapping_or_tuple(params): + return [params] + else: + raise exc.ArgumentError("mapping or sequence expected for parameters") + +cdef class prefix_anon_map(dict): + def __missing__(self, str key): + cdef str derived + cdef int anonymous_counter + cdef dict self_dict = self + + derived = key.split(" ", 1)[1] + + anonymous_counter = self_dict.get(derived, 1) + self_dict[derived] = anonymous_counter + 1 + value = f"{derived}_{anonymous_counter}" + self_dict[key] = value + return value + + +cdef class cache_anon_map(dict): + cdef int _index + + def __init__(self): + self._index = 0 + + def get_anon(self, obj): + cdef long long idself + cdef str id_ + cdef dict self_dict = self + + idself = id(obj) + if idself in self_dict: + return self_dict[idself], True + else: + id_ = self.__missing__(idself) + return id_, False + + def __missing__(self, key): + cdef str val + cdef dict self_dict = self + + self_dict[key] = val = str(self._index) + self._index += 1 + return val + diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__init__.py new file mode 100644 index 00000000..7d5cc1c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__init__.py @@ -0,0 +1,61 @@ +# dialects/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Callable +from typing import Optional +from typing import Type +from typing import TYPE_CHECKING + +from .. import util + +if TYPE_CHECKING: + from ..engine.interfaces import Dialect + +__all__ = ("mssql", "mysql", "oracle", "postgresql", "sqlite") + + +def _auto_fn(name: str) -> Optional[Callable[[], Type[Dialect]]]: + """default dialect importer. + + plugs into the :class:`.PluginLoader` + as a first-hit system. + + """ + if "." in name: + dialect, driver = name.split(".") + else: + dialect = name + driver = "base" + + try: + if dialect == "mariadb": + # it's "OK" for us to hardcode here since _auto_fn is already + # hardcoded. if mysql / mariadb etc were third party dialects + # they would just publish all the entrypoints, which would actually + # look much nicer. + module = __import__( + "sqlalchemy.dialects.mysql.mariadb" + ).dialects.mysql.mariadb + return module.loader(driver) # type: ignore + else: + module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects + module = getattr(module, dialect) + except ImportError: + return None + + if hasattr(module, driver): + module = getattr(module, driver) + return lambda: module.dialect + else: + return None + + +registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn) + +plugins = util.PluginLoader("sqlalchemy.plugins") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d5503fa5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc new file mode 100644 index 00000000..2b9a73f8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/_typing.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/_typing.py new file mode 100644 index 00000000..9ee6e4bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/_typing.py @@ -0,0 +1,25 @@ +# dialects/_typing.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +from typing import Any +from typing import Iterable +from typing import Mapping +from typing import Optional +from typing import Union + +from ..sql._typing import _DDLColumnArgument +from ..sql.elements import DQLDMLClauseElement +from ..sql.schema import ColumnCollectionConstraint +from ..sql.schema import Index + + +_OnConflictConstraintT = Union[str, ColumnCollectionConstraint, Index, None] +_OnConflictIndexElementsT = Optional[Iterable[_DDLColumnArgument]] +_OnConflictIndexWhereT = Optional[DQLDMLClauseElement] +_OnConflictSetT = Optional[Mapping[Any, Any]] +_OnConflictWhereT = Union[DQLDMLClauseElement, str, None] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__init__.py new file mode 100644 index 00000000..19ab7c42 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__init__.py @@ -0,0 +1,88 @@ +# dialects/mssql/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from . import aioodbc # noqa +from . import base # noqa +from . import pymssql # noqa +from . import pyodbc # noqa +from .base import BIGINT +from .base import BINARY +from .base import BIT +from .base import CHAR +from .base import DATE +from .base import DATETIME +from .base import DATETIME2 +from .base import DATETIMEOFFSET +from .base import DECIMAL +from .base import DOUBLE_PRECISION +from .base import FLOAT +from .base import IMAGE +from .base import INTEGER +from .base import JSON +from .base import MONEY +from .base import NCHAR +from .base import NTEXT +from .base import NUMERIC +from .base import NVARCHAR +from .base import REAL +from .base import ROWVERSION +from .base import SMALLDATETIME +from .base import SMALLINT +from .base import SMALLMONEY +from .base import SQL_VARIANT +from .base import TEXT +from .base import TIME +from .base import TIMESTAMP +from .base import TINYINT +from .base import UNIQUEIDENTIFIER +from .base import VARBINARY +from .base import VARCHAR +from .base import XML +from ...sql import try_cast + + +base.dialect = dialect = pyodbc.dialect + + +__all__ = ( + "JSON", + "INTEGER", + "BIGINT", + "SMALLINT", + "TINYINT", + "VARCHAR", + "NVARCHAR", + "CHAR", + "NCHAR", + "TEXT", + "NTEXT", + "DECIMAL", + "NUMERIC", + "FLOAT", + "DATETIME", + "DATETIME2", + "DATETIMEOFFSET", + "DATE", + "DOUBLE_PRECISION", + "TIME", + "SMALLDATETIME", + "BINARY", + "VARBINARY", + "BIT", + "REAL", + "IMAGE", + "TIMESTAMP", + "ROWVERSION", + "MONEY", + "SMALLMONEY", + "UNIQUEIDENTIFIER", + "SQL_VARIANT", + "XML", + "dialect", + "try_cast", +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..6cdbdb1b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc new file mode 100644 index 00000000..8f3142f6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..76b3650d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc new file mode 100644 index 00000000..e18a2452 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc new file mode 100644 index 00000000..06dae2c9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc new file mode 100644 index 00000000..9129511d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc new file mode 100644 index 00000000..3e600b5d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc new file mode 100644 index 00000000..23e719a1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/aioodbc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/aioodbc.py new file mode 100644 index 00000000..65945d97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/aioodbc.py @@ -0,0 +1,64 @@ +# dialects/mssql/aioodbc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +r""" +.. dialect:: mssql+aioodbc + :name: aioodbc + :dbapi: aioodbc + :connectstring: mssql+aioodbc://:@ + :url: https://pypi.org/project/aioodbc/ + + +Support for the SQL Server database in asyncio style, using the aioodbc +driver which itself is a thread-wrapper around pyodbc. + +.. versionadded:: 2.0.23 Added the mssql+aioodbc dialect which builds + on top of the pyodbc and general aio* dialect architecture. + +Using a special asyncio mediation layer, the aioodbc dialect is usable +as the backend for the :ref:`SQLAlchemy asyncio ` +extension package. + +Most behaviors and caveats for this driver are the same as that of the +pyodbc dialect used on SQL Server; see :ref:`mssql_pyodbc` for general +background. + +This dialect should normally be used only with the +:func:`_asyncio.create_async_engine` engine creation function; connection +styles are otherwise equivalent to those documented in the pyodbc section:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine( + "mssql+aioodbc://scott:tiger@mssql2017:1433/test?" + "driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes" + ) + + + +""" + +from __future__ import annotations + +from .pyodbc import MSDialect_pyodbc +from .pyodbc import MSExecutionContext_pyodbc +from ...connectors.aioodbc import aiodbcConnector + + +class MSExecutionContext_aioodbc(MSExecutionContext_pyodbc): + def create_server_side_cursor(self): + return self._dbapi_connection.cursor(server_side=True) + + +class MSDialectAsync_aioodbc(aiodbcConnector, MSDialect_pyodbc): + driver = "aioodbc" + + supports_statement_cache = True + + execution_ctx_cls = MSExecutionContext_aioodbc + + +dialect = MSDialectAsync_aioodbc diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/base.py new file mode 100644 index 00000000..ddee9a5a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/base.py @@ -0,0 +1,4011 @@ +# dialects/mssql/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +""" +.. dialect:: mssql + :name: Microsoft SQL Server + :full_support: 2017 + :normal_support: 2012+ + :best_effort: 2005+ + +.. _mssql_external_dialects: + +External Dialects +----------------- + +In addition to the above DBAPI layers with native SQLAlchemy support, there +are third-party dialects for other DBAPI layers that are compatible +with SQL Server. See the "External Dialects" list on the +:ref:`dialect_toplevel` page. + +.. _mssql_identity: + +Auto Increment Behavior / IDENTITY Columns +------------------------------------------ + +SQL Server provides so-called "auto incrementing" behavior using the +``IDENTITY`` construct, which can be placed on any single integer column in a +table. SQLAlchemy considers ``IDENTITY`` within its default "autoincrement" +behavior for an integer primary key column, described at +:paramref:`_schema.Column.autoincrement`. This means that by default, +the first integer primary key column in a :class:`_schema.Table` will be +considered to be the identity column - unless it is associated with a +:class:`.Sequence` - and will generate DDL as such:: + + from sqlalchemy import Table, MetaData, Column, Integer + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True), + Column('x', Integer)) + m.create_all(engine) + +The above example will generate DDL as: + +.. sourcecode:: sql + + CREATE TABLE t ( + id INTEGER NOT NULL IDENTITY, + x INTEGER NULL, + PRIMARY KEY (id) + ) + +For the case where this default generation of ``IDENTITY`` is not desired, +specify ``False`` for the :paramref:`_schema.Column.autoincrement` flag, +on the first integer primary key column:: + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('x', Integer)) + m.create_all(engine) + +To add the ``IDENTITY`` keyword to a non-primary key column, specify +``True`` for the :paramref:`_schema.Column.autoincrement` flag on the desired +:class:`_schema.Column` object, and ensure that +:paramref:`_schema.Column.autoincrement` +is set to ``False`` on any integer primary key column:: + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('x', Integer, autoincrement=True)) + m.create_all(engine) + +.. versionchanged:: 1.4 Added :class:`_schema.Identity` construct + in a :class:`_schema.Column` to specify the start and increment + parameters of an IDENTITY. These replace + the use of the :class:`.Sequence` object in order to specify these values. + +.. deprecated:: 1.4 + + The ``mssql_identity_start`` and ``mssql_identity_increment`` parameters + to :class:`_schema.Column` are deprecated and should we replaced by + an :class:`_schema.Identity` object. Specifying both ways of configuring + an IDENTITY will result in a compile error. + These options are also no longer returned as part of the + ``dialect_options`` key in :meth:`_reflection.Inspector.get_columns`. + Use the information in the ``identity`` key instead. + +.. deprecated:: 1.3 + + The use of :class:`.Sequence` to specify IDENTITY characteristics is + deprecated and will be removed in a future release. Please use + the :class:`_schema.Identity` object parameters + :paramref:`_schema.Identity.start` and + :paramref:`_schema.Identity.increment`. + +.. versionchanged:: 1.4 Removed the ability to use a :class:`.Sequence` + object to modify IDENTITY characteristics. :class:`.Sequence` objects + now only manipulate true T-SQL SEQUENCE types. + +.. note:: + + There can only be one IDENTITY column on the table. When using + ``autoincrement=True`` to enable the IDENTITY keyword, SQLAlchemy does not + guard against multiple columns specifying the option simultaneously. The + SQL Server database will instead reject the ``CREATE TABLE`` statement. + +.. note:: + + An INSERT statement which attempts to provide a value for a column that is + marked with IDENTITY will be rejected by SQL Server. In order for the + value to be accepted, a session-level option "SET IDENTITY_INSERT" must be + enabled. The SQLAlchemy SQL Server dialect will perform this operation + automatically when using a core :class:`_expression.Insert` + construct; if the + execution specifies a value for the IDENTITY column, the "IDENTITY_INSERT" + option will be enabled for the span of that statement's invocation.However, + this scenario is not high performing and should not be relied upon for + normal use. If a table doesn't actually require IDENTITY behavior in its + integer primary key column, the keyword should be disabled when creating + the table by ensuring that ``autoincrement=False`` is set. + +Controlling "Start" and "Increment" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Specific control over the "start" and "increment" values for +the ``IDENTITY`` generator are provided using the +:paramref:`_schema.Identity.start` and :paramref:`_schema.Identity.increment` +parameters passed to the :class:`_schema.Identity` object:: + + from sqlalchemy import Table, Integer, Column, Identity + + test = Table( + 'test', metadata, + Column( + 'id', + Integer, + primary_key=True, + Identity(start=100, increment=10) + ), + Column('name', String(20)) + ) + +The CREATE TABLE for the above :class:`_schema.Table` object would be: + +.. sourcecode:: sql + + CREATE TABLE test ( + id INTEGER NOT NULL IDENTITY(100,10) PRIMARY KEY, + name VARCHAR(20) NULL, + ) + +.. note:: + + The :class:`_schema.Identity` object supports many other parameter in + addition to ``start`` and ``increment``. These are not supported by + SQL Server and will be ignored when generating the CREATE TABLE ddl. + +.. versionchanged:: 1.3.19 The :class:`_schema.Identity` object is + now used to affect the + ``IDENTITY`` generator for a :class:`_schema.Column` under SQL Server. + Previously, the :class:`.Sequence` object was used. As SQL Server now + supports real sequences as a separate construct, :class:`.Sequence` will be + functional in the normal way starting from SQLAlchemy version 1.4. + + +Using IDENTITY with Non-Integer numeric types +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SQL Server also allows ``IDENTITY`` to be used with ``NUMERIC`` columns. To +implement this pattern smoothly in SQLAlchemy, the primary datatype of the +column should remain as ``Integer``, however the underlying implementation +type deployed to the SQL Server database can be specified as ``Numeric`` using +:meth:`.TypeEngine.with_variant`:: + + from sqlalchemy import Column + from sqlalchemy import Integer + from sqlalchemy import Numeric + from sqlalchemy import String + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class TestTable(Base): + __tablename__ = "test" + id = Column( + Integer().with_variant(Numeric(10, 0), "mssql"), + primary_key=True, + autoincrement=True, + ) + name = Column(String) + +In the above example, ``Integer().with_variant()`` provides clear usage +information that accurately describes the intent of the code. The general +restriction that ``autoincrement`` only applies to ``Integer`` is established +at the metadata level and not at the per-dialect level. + +When using the above pattern, the primary key identifier that comes back from +the insertion of a row, which is also the value that would be assigned to an +ORM object such as ``TestTable`` above, will be an instance of ``Decimal()`` +and not ``int`` when using SQL Server. The numeric return type of the +:class:`_types.Numeric` type can be changed to return floats by passing False +to :paramref:`_types.Numeric.asdecimal`. To normalize the return type of the +above ``Numeric(10, 0)`` to return Python ints (which also support "long" +integer values in Python 3), use :class:`_types.TypeDecorator` as follows:: + + from sqlalchemy import TypeDecorator + + class NumericAsInteger(TypeDecorator): + '''normalize floating point return values into ints''' + + impl = Numeric(10, 0, asdecimal=False) + cache_ok = True + + def process_result_value(self, value, dialect): + if value is not None: + value = int(value) + return value + + class TestTable(Base): + __tablename__ = "test" + id = Column( + Integer().with_variant(NumericAsInteger, "mssql"), + primary_key=True, + autoincrement=True, + ) + name = Column(String) + +.. _mssql_insert_behavior: + +INSERT behavior +^^^^^^^^^^^^^^^^ + +Handling of the ``IDENTITY`` column at INSERT time involves two key +techniques. The most common is being able to fetch the "last inserted value" +for a given ``IDENTITY`` column, a process which SQLAlchemy performs +implicitly in many cases, most importantly within the ORM. + +The process for fetching this value has several variants: + +* In the vast majority of cases, RETURNING is used in conjunction with INSERT + statements on SQL Server in order to get newly generated primary key values: + + .. sourcecode:: sql + + INSERT INTO t (x) OUTPUT inserted.id VALUES (?) + + As of SQLAlchemy 2.0, the :ref:`engine_insertmanyvalues` feature is also + used by default to optimize many-row INSERT statements; for SQL Server + the feature takes place for both RETURNING and-non RETURNING + INSERT statements. + + .. versionchanged:: 2.0.10 The :ref:`engine_insertmanyvalues` feature for + SQL Server was temporarily disabled for SQLAlchemy version 2.0.9 due to + issues with row ordering. As of 2.0.10 the feature is re-enabled, with + special case handling for the unit of work's requirement for RETURNING to + be ordered. + +* When RETURNING is not available or has been disabled via + ``implicit_returning=False``, either the ``scope_identity()`` function or + the ``@@identity`` variable is used; behavior varies by backend: + + * when using PyODBC, the phrase ``; select scope_identity()`` will be + appended to the end of the INSERT statement; a second result set will be + fetched in order to receive the value. Given a table as:: + + t = Table( + 't', + metadata, + Column('id', Integer, primary_key=True), + Column('x', Integer), + implicit_returning=False + ) + + an INSERT will look like: + + .. sourcecode:: sql + + INSERT INTO t (x) VALUES (?); select scope_identity() + + * Other dialects such as pymssql will call upon + ``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT + statement. If the flag ``use_scope_identity=False`` is passed to + :func:`_sa.create_engine`, + the statement ``SELECT @@identity AS lastrowid`` + is used instead. + +A table that contains an ``IDENTITY`` column will prohibit an INSERT statement +that refers to the identity column explicitly. The SQLAlchemy dialect will +detect when an INSERT construct, created using a core +:func:`_expression.insert` +construct (not a plain string SQL), refers to the identity column, and +in this case will emit ``SET IDENTITY_INSERT ON`` prior to the insert +statement proceeding, and ``SET IDENTITY_INSERT OFF`` subsequent to the +execution. Given this example:: + + m = MetaData() + t = Table('t', m, Column('id', Integer, primary_key=True), + Column('x', Integer)) + m.create_all(engine) + + with engine.begin() as conn: + conn.execute(t.insert(), {'id': 1, 'x':1}, {'id':2, 'x':2}) + +The above column will be created with IDENTITY, however the INSERT statement +we emit is specifying explicit values. In the echo output we can see +how SQLAlchemy handles this: + +.. sourcecode:: sql + + CREATE TABLE t ( + id INTEGER NOT NULL IDENTITY(1,1), + x INTEGER NULL, + PRIMARY KEY (id) + ) + + COMMIT + SET IDENTITY_INSERT t ON + INSERT INTO t (id, x) VALUES (?, ?) + ((1, 1), (2, 2)) + SET IDENTITY_INSERT t OFF + COMMIT + + + +This is an auxiliary use case suitable for testing and bulk insert scenarios. + +SEQUENCE support +---------------- + +The :class:`.Sequence` object creates "real" sequences, i.e., +``CREATE SEQUENCE``: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import Sequence + >>> from sqlalchemy.schema import CreateSequence + >>> from sqlalchemy.dialects import mssql + >>> print(CreateSequence(Sequence("my_seq", start=1)).compile(dialect=mssql.dialect())) + {printsql}CREATE SEQUENCE my_seq START WITH 1 + +For integer primary key generation, SQL Server's ``IDENTITY`` construct should +generally be preferred vs. sequence. + +.. tip:: + + The default start value for T-SQL is ``-2**63`` instead of 1 as + in most other SQL databases. Users should explicitly set the + :paramref:`.Sequence.start` to 1 if that's the expected default:: + + seq = Sequence("my_sequence", start=1) + +.. versionadded:: 1.4 added SQL Server support for :class:`.Sequence` + +.. versionchanged:: 2.0 The SQL Server dialect will no longer implicitly + render "START WITH 1" for ``CREATE SEQUENCE``, which was the behavior + first implemented in version 1.4. + +MAX on VARCHAR / NVARCHAR +------------------------- + +SQL Server supports the special string "MAX" within the +:class:`_types.VARCHAR` and :class:`_types.NVARCHAR` datatypes, +to indicate "maximum length possible". The dialect currently handles this as +a length of "None" in the base type, rather than supplying a +dialect-specific version of these types, so that a base type +specified such as ``VARCHAR(None)`` can assume "unlengthed" behavior on +more than one backend without using dialect-specific types. + +To build a SQL Server VARCHAR or NVARCHAR with MAX length, use None:: + + my_table = Table( + 'my_table', metadata, + Column('my_data', VARCHAR(None)), + Column('my_n_data', NVARCHAR(None)) + ) + + +Collation Support +----------------- + +Character collations are supported by the base string types, +specified by the string argument "collation":: + + from sqlalchemy import VARCHAR + Column('login', VARCHAR(32, collation='Latin1_General_CI_AS')) + +When such a column is associated with a :class:`_schema.Table`, the +CREATE TABLE statement for this column will yield:: + + login VARCHAR(32) COLLATE Latin1_General_CI_AS NULL + +LIMIT/OFFSET Support +-------------------- + +MSSQL has added support for LIMIT / OFFSET as of SQL Server 2012, via the +"OFFSET n ROWS" and "FETCH NEXT n ROWS" clauses. SQLAlchemy supports these +syntaxes automatically if SQL Server 2012 or greater is detected. + +.. versionchanged:: 1.4 support added for SQL Server "OFFSET n ROWS" and + "FETCH NEXT n ROWS" syntax. + +For statements that specify only LIMIT and no OFFSET, all versions of SQL +Server support the TOP keyword. This syntax is used for all SQL Server +versions when no OFFSET clause is present. A statement such as:: + + select(some_table).limit(5) + +will render similarly to:: + + SELECT TOP 5 col1, col2.. FROM table + +For versions of SQL Server prior to SQL Server 2012, a statement that uses +LIMIT and OFFSET, or just OFFSET alone, will be rendered using the +``ROW_NUMBER()`` window function. A statement such as:: + + select(some_table).order_by(some_table.c.col3).limit(5).offset(10) + +will render similarly to:: + + SELECT anon_1.col1, anon_1.col2 FROM (SELECT col1, col2, + ROW_NUMBER() OVER (ORDER BY col3) AS + mssql_rn FROM table WHERE t.x = :x_1) AS + anon_1 WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1 + +Note that when using LIMIT and/or OFFSET, whether using the older +or newer SQL Server syntaxes, the statement must have an ORDER BY as well, +else a :class:`.CompileError` is raised. + +.. _mssql_comment_support: + +DDL Comment Support +-------------------- + +Comment support, which includes DDL rendering for attributes such as +:paramref:`_schema.Table.comment` and :paramref:`_schema.Column.comment`, as +well as the ability to reflect these comments, is supported assuming a +supported version of SQL Server is in use. If a non-supported version such as +Azure Synapse is detected at first-connect time (based on the presence +of the ``fn_listextendedproperty`` SQL function), comment support including +rendering and table-comment reflection is disabled, as both features rely upon +SQL Server stored procedures and functions that are not available on all +backend types. + +To force comment support to be on or off, bypassing autodetection, set the +parameter ``supports_comments`` within :func:`_sa.create_engine`:: + + e = create_engine("mssql+pyodbc://u:p@dsn", supports_comments=False) + +.. versionadded:: 2.0 Added support for table and column comments for + the SQL Server dialect, including DDL generation and reflection. + +.. _mssql_isolation_level: + +Transaction Isolation Level +--------------------------- + +All SQL Server dialects support setting of transaction isolation level +both via a dialect-specific parameter +:paramref:`_sa.create_engine.isolation_level` +accepted by :func:`_sa.create_engine`, +as well as the :paramref:`.Connection.execution_options.isolation_level` +argument as passed to +:meth:`_engine.Connection.execution_options`. +This feature works by issuing the +command ``SET TRANSACTION ISOLATION LEVEL `` for +each new connection. + +To set isolation level using :func:`_sa.create_engine`:: + + engine = create_engine( + "mssql+pyodbc://scott:tiger@ms_2008", + isolation_level="REPEATABLE READ" + ) + +To set using per-connection execution options:: + + connection = engine.connect() + connection = connection.execution_options( + isolation_level="READ COMMITTED" + ) + +Valid values for ``isolation_level`` include: + +* ``AUTOCOMMIT`` - pyodbc / pymssql-specific +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``SNAPSHOT`` - specific to SQL Server + +There are also more options for isolation level configurations, such as +"sub-engine" objects linked to a main :class:`_engine.Engine` which each apply +different isolation level settings. See the discussion at +:ref:`dbapi_autocommit` for background. + +.. seealso:: + + :ref:`dbapi_autocommit` + +.. _mssql_reset_on_return: + +Temporary Table / Resource Reset for Connection Pooling +------------------------------------------------------- + +The :class:`.QueuePool` connection pool implementation used +by the SQLAlchemy :class:`.Engine` object includes +:ref:`reset on return ` behavior that will invoke +the DBAPI ``.rollback()`` method when connections are returned to the pool. +While this rollback will clear out the immediate state used by the previous +transaction, it does not cover a wider range of session-level state, including +temporary tables as well as other server state such as prepared statement +handles and statement caches. An undocumented SQL Server procedure known +as ``sp_reset_connection`` is known to be a workaround for this issue which +will reset most of the session state that builds up on a connection, including +temporary tables. + +To install ``sp_reset_connection`` as the means of performing reset-on-return, +the :meth:`.PoolEvents.reset` event hook may be used, as demonstrated in the +example below. The :paramref:`_sa.create_engine.pool_reset_on_return` parameter +is set to ``None`` so that the custom scheme can replace the default behavior +completely. The custom hook implementation calls ``.rollback()`` in any case, +as it's usually important that the DBAPI's own tracking of commit/rollback +will remain consistent with the state of the transaction:: + + from sqlalchemy import create_engine + from sqlalchemy import event + + mssql_engine = create_engine( + "mssql+pyodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+17+for+SQL+Server", + + # disable default reset-on-return scheme + pool_reset_on_return=None, + ) + + + @event.listens_for(mssql_engine, "reset") + def _reset_mssql(dbapi_connection, connection_record, reset_state): + if not reset_state.terminate_only: + dbapi_connection.execute("{call sys.sp_reset_connection}") + + # so that the DBAPI itself knows that the connection has been + # reset + dbapi_connection.rollback() + +.. versionchanged:: 2.0.0b3 Added additional state arguments to + the :meth:`.PoolEvents.reset` event and additionally ensured the event + is invoked for all "reset" occurrences, so that it's appropriate + as a place for custom "reset" handlers. Previous schemes which + use the :meth:`.PoolEvents.checkin` handler remain usable as well. + +.. seealso:: + + :ref:`pool_reset_on_return` - in the :ref:`pooling_toplevel` documentation + +Nullability +----------- +MSSQL has support for three levels of column nullability. The default +nullability allows nulls and is explicit in the CREATE TABLE +construct:: + + name VARCHAR(20) NULL + +If ``nullable=None`` is specified then no specification is made. In +other words the database's configured default is used. This will +render:: + + name VARCHAR(20) + +If ``nullable`` is ``True`` or ``False`` then the column will be +``NULL`` or ``NOT NULL`` respectively. + +Date / Time Handling +-------------------- +DATE and TIME are supported. Bind parameters are converted +to datetime.datetime() objects as required by most MSSQL drivers, +and results are processed from strings if needed. +The DATE and TIME types are not available for MSSQL 2005 and +previous - if a server version below 2008 is detected, DDL +for these types will be issued as DATETIME. + +.. _mssql_large_type_deprecation: + +Large Text/Binary Type Deprecation +---------------------------------- + +Per +`SQL Server 2012/2014 Documentation `_, +the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL +Server in a future release. SQLAlchemy normally relates these types to the +:class:`.UnicodeText`, :class:`_expression.TextClause` and +:class:`.LargeBinary` datatypes. + +In order to accommodate this change, a new flag ``deprecate_large_types`` +is added to the dialect, which will be automatically set based on detection +of the server version in use, if not otherwise set by the user. The +behavior of this flag is as follows: + +* When this flag is ``True``, the :class:`.UnicodeText`, + :class:`_expression.TextClause` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``, + respectively. This is a new behavior as of the addition of this flag. + +* When this flag is ``False``, the :class:`.UnicodeText`, + :class:`_expression.TextClause` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NTEXT``, ``TEXT``, and ``IMAGE``, + respectively. This is the long-standing behavior of these types. + +* The flag begins with the value ``None``, before a database connection is + established. If the dialect is used to render DDL without the flag being + set, it is interpreted the same as ``False``. + +* On first connection, the dialect detects if SQL Server version 2012 or + greater is in use; if the flag is still at ``None``, it sets it to ``True`` + or ``False`` based on whether 2012 or greater is detected. + +* The flag can be set to either ``True`` or ``False`` when the dialect + is created, typically via :func:`_sa.create_engine`:: + + eng = create_engine("mssql+pymssql://user:pass@host/db", + deprecate_large_types=True) + +* Complete control over whether the "old" or "new" types are rendered is + available in all SQLAlchemy versions by using the UPPERCASE type objects + instead: :class:`_types.NVARCHAR`, :class:`_types.VARCHAR`, + :class:`_types.VARBINARY`, :class:`_types.TEXT`, :class:`_mssql.NTEXT`, + :class:`_mssql.IMAGE` + will always remain fixed and always output exactly that + type. + +.. _multipart_schema_names: + +Multipart Schema Names +---------------------- + +SQL Server schemas sometimes require multiple parts to their "schema" +qualifier, that is, including the database name and owner name as separate +tokens, such as ``mydatabase.dbo.some_table``. These multipart names can be set +at once using the :paramref:`_schema.Table.schema` argument of +:class:`_schema.Table`:: + + Table( + "some_table", metadata, + Column("q", String(50)), + schema="mydatabase.dbo" + ) + +When performing operations such as table or component reflection, a schema +argument that contains a dot will be split into separate +"database" and "owner" components in order to correctly query the SQL +Server information schema tables, as these two values are stored separately. +Additionally, when rendering the schema name for DDL or SQL, the two +components will be quoted separately for case sensitive names and other +special characters. Given an argument as below:: + + Table( + "some_table", metadata, + Column("q", String(50)), + schema="MyDataBase.dbo" + ) + +The above schema would be rendered as ``[MyDataBase].dbo``, and also in +reflection, would be reflected using "dbo" as the owner and "MyDataBase" +as the database name. + +To control how the schema name is broken into database / owner, +specify brackets (which in SQL Server are quoting characters) in the name. +Below, the "owner" will be considered as ``MyDataBase.dbo`` and the +"database" will be None:: + + Table( + "some_table", metadata, + Column("q", String(50)), + schema="[MyDataBase.dbo]" + ) + +To individually specify both database and owner name with special characters +or embedded dots, use two sets of brackets:: + + Table( + "some_table", metadata, + Column("q", String(50)), + schema="[MyDataBase.Period].[MyOwner.Dot]" + ) + + +.. versionchanged:: 1.2 the SQL Server dialect now treats brackets as + identifier delimiters splitting the schema into separate database + and owner tokens, to allow dots within either name itself. + +.. _legacy_schema_rendering: + +Legacy Schema Mode +------------------ + +Very old versions of the MSSQL dialect introduced the behavior such that a +schema-qualified table would be auto-aliased when used in a +SELECT statement; given a table:: + + account_table = Table( + 'account', metadata, + Column('id', Integer, primary_key=True), + Column('info', String(100)), + schema="customer_schema" + ) + +this legacy mode of rendering would assume that "customer_schema.account" +would not be accepted by all parts of the SQL statement, as illustrated +below: + +.. sourcecode:: pycon+sql + + >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=True) + >>> print(account_table.select().compile(eng)) + {printsql}SELECT account_1.id, account_1.info + FROM customer_schema.account AS account_1 + +This mode of behavior is now off by default, as it appears to have served +no purpose; however in the case that legacy applications rely upon it, +it is available using the ``legacy_schema_aliasing`` argument to +:func:`_sa.create_engine` as illustrated above. + +.. deprecated:: 1.4 + + The ``legacy_schema_aliasing`` flag is now + deprecated and will be removed in a future release. + +.. _mssql_indexes: + +Clustered Index Support +----------------------- + +The MSSQL dialect supports clustered indexes (and primary keys) via the +``mssql_clustered`` option. This option is available to :class:`.Index`, +:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`. +For indexes this option can be combined with the ``mssql_columnstore`` one +to create a clustered columnstore index. + +To generate a clustered index:: + + Index("my_index", table.c.x, mssql_clustered=True) + +which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``. + +To generate a clustered primary key use:: + + Table('my_table', metadata, + Column('x', ...), + Column('y', ...), + PrimaryKeyConstraint("x", "y", mssql_clustered=True)) + +which will render the table, for example, as:: + + CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, + PRIMARY KEY CLUSTERED (x, y)) + +Similarly, we can generate a clustered unique constraint using:: + + Table('my_table', metadata, + Column('x', ...), + Column('y', ...), + PrimaryKeyConstraint("x"), + UniqueConstraint("y", mssql_clustered=True), + ) + +To explicitly request a non-clustered primary key (for example, when +a separate clustered index is desired), use:: + + Table('my_table', metadata, + Column('x', ...), + Column('y', ...), + PrimaryKeyConstraint("x", "y", mssql_clustered=False)) + +which will render the table, for example, as:: + + CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, + PRIMARY KEY NONCLUSTERED (x, y)) + +Columnstore Index Support +------------------------- + +The MSSQL dialect supports columnstore indexes via the ``mssql_columnstore`` +option. This option is available to :class:`.Index`. It be combined with +the ``mssql_clustered`` option to create a clustered columnstore index. + +To generate a columnstore index:: + + Index("my_index", table.c.x, mssql_columnstore=True) + +which renders the index as ``CREATE COLUMNSTORE INDEX my_index ON table (x)``. + +To generate a clustered columnstore index provide no columns:: + + idx = Index("my_index", mssql_clustered=True, mssql_columnstore=True) + # required to associate the index with the table + table.append_constraint(idx) + +the above renders the index as +``CREATE CLUSTERED COLUMNSTORE INDEX my_index ON table``. + +.. versionadded:: 2.0.18 + +MSSQL-Specific Index Options +----------------------------- + +In addition to clustering, the MSSQL dialect supports other special options +for :class:`.Index`. + +INCLUDE +^^^^^^^ + +The ``mssql_include`` option renders INCLUDE(colname) for the given string +names:: + + Index("my_index", table.c.x, mssql_include=['y']) + +would render the index as ``CREATE INDEX my_index ON table (x) INCLUDE (y)`` + +.. _mssql_index_where: + +Filtered Indexes +^^^^^^^^^^^^^^^^ + +The ``mssql_where`` option renders WHERE(condition) for the given string +names:: + + Index("my_index", table.c.x, mssql_where=table.c.x > 10) + +would render the index as ``CREATE INDEX my_index ON table (x) WHERE x > 10``. + +.. versionadded:: 1.3.4 + +Index ordering +^^^^^^^^^^^^^^ + +Index ordering is available via functional expressions, such as:: + + Index("my_index", table.c.x.desc()) + +would render the index as ``CREATE INDEX my_index ON table (x DESC)`` + +.. seealso:: + + :ref:`schema_indexes_functional` + +Compatibility Levels +-------------------- +MSSQL supports the notion of setting compatibility levels at the +database level. This allows, for instance, to run a database that +is compatible with SQL2000 while running on a SQL2005 database +server. ``server_version_info`` will always return the database +server version information (in this case SQL2005) and not the +compatibility level information. Because of this, if running under +a backwards compatibility mode SQLAlchemy may attempt to use T-SQL +statements that are unable to be parsed by the database server. + +.. _mssql_triggers: + +Triggers +-------- + +SQLAlchemy by default uses OUTPUT INSERTED to get at newly +generated primary key values via IDENTITY columns or other +server side defaults. MS-SQL does not +allow the usage of OUTPUT INSERTED on tables that have triggers. +To disable the usage of OUTPUT INSERTED on a per-table basis, +specify ``implicit_returning=False`` for each :class:`_schema.Table` +which has triggers:: + + Table('mytable', metadata, + Column('id', Integer, primary_key=True), + # ..., + implicit_returning=False + ) + +Declarative form:: + + class MyClass(Base): + # ... + __table_args__ = {'implicit_returning':False} + + +.. _mssql_rowcount_versioning: + +Rowcount Support / ORM Versioning +--------------------------------- + +The SQL Server drivers may have limited ability to return the number +of rows updated from an UPDATE or DELETE statement. + +As of this writing, the PyODBC driver is not able to return a rowcount when +OUTPUT INSERTED is used. Previous versions of SQLAlchemy therefore had +limitations for features such as the "ORM Versioning" feature that relies upon +accurate rowcounts in order to match version numbers with matched rows. + +SQLAlchemy 2.0 now retrieves the "rowcount" manually for these particular use +cases based on counting the rows that arrived back within RETURNING; so while +the driver still has this limitation, the ORM Versioning feature is no longer +impacted by it. As of SQLAlchemy 2.0.5, ORM versioning has been fully +re-enabled for the pyodbc driver. + +.. versionchanged:: 2.0.5 ORM versioning support is restored for the pyodbc + driver. Previously, a warning would be emitted during ORM flush that + versioning was not supported. + + +Enabling Snapshot Isolation +--------------------------- + +SQL Server has a default transaction +isolation mode that locks entire tables, and causes even mildly concurrent +applications to have long held locks and frequent deadlocks. +Enabling snapshot isolation for the database as a whole is recommended +for modern levels of concurrency support. This is accomplished via the +following ALTER DATABASE commands executed at the SQL prompt:: + + ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON + + ALTER DATABASE MyDatabase SET READ_COMMITTED_SNAPSHOT ON + +Background on SQL Server snapshot isolation is available at +https://msdn.microsoft.com/en-us/library/ms175095.aspx. + +""" # noqa + +from __future__ import annotations + +import codecs +import datetime +import operator +import re +from typing import overload +from typing import TYPE_CHECKING +from uuid import UUID as _python_UUID + +from . import information_schema as ischema +from .json import JSON +from .json import JSONIndexType +from .json import JSONPathType +from ... import exc +from ... import Identity +from ... import schema as sa_schema +from ... import Sequence +from ... import sql +from ... import text +from ... import util +from ...engine import cursor as _cursor +from ...engine import default +from ...engine import reflection +from ...engine.reflection import ReflectionDefaults +from ...sql import coercions +from ...sql import compiler +from ...sql import elements +from ...sql import expression +from ...sql import func +from ...sql import quoted_name +from ...sql import roles +from ...sql import sqltypes +from ...sql import try_cast as try_cast # noqa: F401 +from ...sql import util as sql_util +from ...sql._typing import is_sql_compiler +from ...sql.compiler import InsertmanyvaluesSentinelOpts +from ...sql.elements import TryCast as TryCast # noqa: F401 +from ...types import BIGINT +from ...types import BINARY +from ...types import CHAR +from ...types import DATE +from ...types import DATETIME +from ...types import DECIMAL +from ...types import FLOAT +from ...types import INTEGER +from ...types import NCHAR +from ...types import NUMERIC +from ...types import NVARCHAR +from ...types import SMALLINT +from ...types import TEXT +from ...types import VARCHAR +from ...util import update_wrapper +from ...util.typing import Literal + +if TYPE_CHECKING: + from ...sql.dml import DMLState + from ...sql.selectable import TableClause + +# https://sqlserverbuilds.blogspot.com/ +MS_2017_VERSION = (14,) +MS_2016_VERSION = (13,) +MS_2014_VERSION = (12,) +MS_2012_VERSION = (11,) +MS_2008_VERSION = (10,) +MS_2005_VERSION = (9,) +MS_2000_VERSION = (8,) + +RESERVED_WORDS = { + "add", + "all", + "alter", + "and", + "any", + "as", + "asc", + "authorization", + "backup", + "begin", + "between", + "break", + "browse", + "bulk", + "by", + "cascade", + "case", + "check", + "checkpoint", + "close", + "clustered", + "coalesce", + "collate", + "column", + "commit", + "compute", + "constraint", + "contains", + "containstable", + "continue", + "convert", + "create", + "cross", + "current", + "current_date", + "current_time", + "current_timestamp", + "current_user", + "cursor", + "database", + "dbcc", + "deallocate", + "declare", + "default", + "delete", + "deny", + "desc", + "disk", + "distinct", + "distributed", + "double", + "drop", + "dump", + "else", + "end", + "errlvl", + "escape", + "except", + "exec", + "execute", + "exists", + "exit", + "external", + "fetch", + "file", + "fillfactor", + "for", + "foreign", + "freetext", + "freetexttable", + "from", + "full", + "function", + "goto", + "grant", + "group", + "having", + "holdlock", + "identity", + "identity_insert", + "identitycol", + "if", + "in", + "index", + "inner", + "insert", + "intersect", + "into", + "is", + "join", + "key", + "kill", + "left", + "like", + "lineno", + "load", + "merge", + "national", + "nocheck", + "nonclustered", + "not", + "null", + "nullif", + "of", + "off", + "offsets", + "on", + "open", + "opendatasource", + "openquery", + "openrowset", + "openxml", + "option", + "or", + "order", + "outer", + "over", + "percent", + "pivot", + "plan", + "precision", + "primary", + "print", + "proc", + "procedure", + "public", + "raiserror", + "read", + "readtext", + "reconfigure", + "references", + "replication", + "restore", + "restrict", + "return", + "revert", + "revoke", + "right", + "rollback", + "rowcount", + "rowguidcol", + "rule", + "save", + "schema", + "securityaudit", + "select", + "session_user", + "set", + "setuser", + "shutdown", + "some", + "statistics", + "system_user", + "table", + "tablesample", + "textsize", + "then", + "to", + "top", + "tran", + "transaction", + "trigger", + "truncate", + "tsequal", + "union", + "unique", + "unpivot", + "update", + "updatetext", + "use", + "user", + "values", + "varying", + "view", + "waitfor", + "when", + "where", + "while", + "with", + "writetext", +} + + +class REAL(sqltypes.REAL): + """the SQL Server REAL datatype.""" + + def __init__(self, **kw): + # REAL is a synonym for FLOAT(24) on SQL server. + # it is only accepted as the word "REAL" in DDL, the numeric + # precision value is not allowed to be present + kw.setdefault("precision", 24) + super().__init__(**kw) + + +class DOUBLE_PRECISION(sqltypes.DOUBLE_PRECISION): + """the SQL Server DOUBLE PRECISION datatype. + + .. versionadded:: 2.0.11 + + """ + + def __init__(self, **kw): + # DOUBLE PRECISION is a synonym for FLOAT(53) on SQL server. + # it is only accepted as the word "DOUBLE PRECISION" in DDL, + # the numeric precision value is not allowed to be present + kw.setdefault("precision", 53) + super().__init__(**kw) + + +class TINYINT(sqltypes.Integer): + __visit_name__ = "TINYINT" + + +# MSSQL DATE/TIME types have varied behavior, sometimes returning +# strings. MSDate/TIME check for everything, and always +# filter bind parameters into datetime objects (required by pyodbc, +# not sure about other dialects). + + +class _MSDate(sqltypes.Date): + def bind_processor(self, dialect): + def process(value): + if type(value) == datetime.date: + return datetime.datetime(value.year, value.month, value.day) + else: + return value + + return process + + _reg = re.compile(r"(\d+)-(\d+)-(\d+)") + + def result_processor(self, dialect, coltype): + def process(value): + if isinstance(value, datetime.datetime): + return value.date() + elif isinstance(value, str): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a date value" % (value,) + ) + return datetime.date(*[int(x or 0) for x in m.groups()]) + else: + return value + + return process + + +class TIME(sqltypes.TIME): + def __init__(self, precision=None, **kwargs): + self.precision = precision + super().__init__() + + __zero_date = datetime.date(1900, 1, 1) + + def bind_processor(self, dialect): + def process(value): + if isinstance(value, datetime.datetime): + value = datetime.datetime.combine( + self.__zero_date, value.time() + ) + elif isinstance(value, datetime.time): + """issue #5339 + per: https://github.com/mkleehammer/pyodbc/wiki/Tips-and-Tricks-by-Database-Platform#time-columns + pass TIME value as string + """ # noqa + value = str(value) + return value + + return process + + _reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d{0,6}))?") + + def result_processor(self, dialect, coltype): + def process(value): + if isinstance(value, datetime.datetime): + return value.time() + elif isinstance(value, str): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a time value" % (value,) + ) + return datetime.time(*[int(x or 0) for x in m.groups()]) + else: + return value + + return process + + +_MSTime = TIME + + +class _BASETIMEIMPL(TIME): + __visit_name__ = "_BASETIMEIMPL" + + +class _DateTimeBase: + def bind_processor(self, dialect): + def process(value): + if type(value) == datetime.date: + return datetime.datetime(value.year, value.month, value.day) + else: + return value + + return process + + +class _MSDateTime(_DateTimeBase, sqltypes.DateTime): + pass + + +class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime): + __visit_name__ = "SMALLDATETIME" + + +class DATETIME2(_DateTimeBase, sqltypes.DateTime): + __visit_name__ = "DATETIME2" + + def __init__(self, precision=None, **kw): + super().__init__(**kw) + self.precision = precision + + +class DATETIMEOFFSET(_DateTimeBase, sqltypes.DateTime): + __visit_name__ = "DATETIMEOFFSET" + + def __init__(self, precision=None, **kw): + super().__init__(**kw) + self.precision = precision + + +class _UnicodeLiteral: + def literal_processor(self, dialect): + def process(value): + value = value.replace("'", "''") + + if dialect.identifier_preparer._double_percents: + value = value.replace("%", "%%") + + return "N'%s'" % value + + return process + + +class _MSUnicode(_UnicodeLiteral, sqltypes.Unicode): + pass + + +class _MSUnicodeText(_UnicodeLiteral, sqltypes.UnicodeText): + pass + + +class TIMESTAMP(sqltypes._Binary): + """Implement the SQL Server TIMESTAMP type. + + Note this is **completely different** than the SQL Standard + TIMESTAMP type, which is not supported by SQL Server. It + is a read-only datatype that does not support INSERT of values. + + .. versionadded:: 1.2 + + .. seealso:: + + :class:`_mssql.ROWVERSION` + + """ + + __visit_name__ = "TIMESTAMP" + + # expected by _Binary to be present + length = None + + def __init__(self, convert_int=False): + """Construct a TIMESTAMP or ROWVERSION type. + + :param convert_int: if True, binary integer values will + be converted to integers on read. + + .. versionadded:: 1.2 + + """ + self.convert_int = convert_int + + def result_processor(self, dialect, coltype): + super_ = super().result_processor(dialect, coltype) + if self.convert_int: + + def process(value): + if super_: + value = super_(value) + if value is not None: + # https://stackoverflow.com/a/30403242/34549 + value = int(codecs.encode(value, "hex"), 16) + return value + + return process + else: + return super_ + + +class ROWVERSION(TIMESTAMP): + """Implement the SQL Server ROWVERSION type. + + The ROWVERSION datatype is a SQL Server synonym for the TIMESTAMP + datatype, however current SQL Server documentation suggests using + ROWVERSION for new datatypes going forward. + + The ROWVERSION datatype does **not** reflect (e.g. introspect) from the + database as itself; the returned datatype will be + :class:`_mssql.TIMESTAMP`. + + This is a read-only datatype that does not support INSERT of values. + + .. versionadded:: 1.2 + + .. seealso:: + + :class:`_mssql.TIMESTAMP` + + """ + + __visit_name__ = "ROWVERSION" + + +class NTEXT(sqltypes.UnicodeText): + """MSSQL NTEXT type, for variable-length unicode text up to 2^30 + characters.""" + + __visit_name__ = "NTEXT" + + +class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary): + """The MSSQL VARBINARY type. + + This type adds additional features to the core :class:`_types.VARBINARY` + type, including "deprecate_large_types" mode where + either ``VARBINARY(max)`` or IMAGE is rendered, as well as the SQL + Server ``FILESTREAM`` option. + + .. seealso:: + + :ref:`mssql_large_type_deprecation` + + """ + + __visit_name__ = "VARBINARY" + + def __init__(self, length=None, filestream=False): + """ + Construct a VARBINARY type. + + :param length: optional, a length for the column for use in + DDL statements, for those binary types that accept a length, + such as the MySQL BLOB type. + + :param filestream=False: if True, renders the ``FILESTREAM`` keyword + in the table definition. In this case ``length`` must be ``None`` + or ``'max'``. + + .. versionadded:: 1.4.31 + + """ + + self.filestream = filestream + if self.filestream and length not in (None, "max"): + raise ValueError( + "length must be None or 'max' when setting filestream" + ) + super().__init__(length=length) + + +class IMAGE(sqltypes.LargeBinary): + __visit_name__ = "IMAGE" + + +class XML(sqltypes.Text): + """MSSQL XML type. + + This is a placeholder type for reflection purposes that does not include + any Python-side datatype support. It also does not currently support + additional arguments, such as "CONTENT", "DOCUMENT", + "xml_schema_collection". + + """ + + __visit_name__ = "XML" + + +class BIT(sqltypes.Boolean): + """MSSQL BIT type. + + Both pyodbc and pymssql return values from BIT columns as + Python so just subclass Boolean. + + """ + + __visit_name__ = "BIT" + + +class MONEY(sqltypes.TypeEngine): + __visit_name__ = "MONEY" + + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__ = "SMALLMONEY" + + +class MSUUid(sqltypes.Uuid): + def bind_processor(self, dialect): + if self.native_uuid: + # this is currently assuming pyodbc; might not work for + # some other mssql driver + return None + else: + if self.as_uuid: + + def process(value): + if value is not None: + value = value.hex + return value + + return process + else: + + def process(value): + if value is not None: + value = value.replace("-", "").replace("''", "'") + return value + + return process + + def literal_processor(self, dialect): + if self.native_uuid: + + def process(value): + return f"""'{str(value).replace("''", "'")}'""" + + return process + else: + if self.as_uuid: + + def process(value): + return f"""'{value.hex}'""" + + return process + else: + + def process(value): + return f"""'{ + value.replace("-", "").replace("'", "''") + }'""" + + return process + + +class UNIQUEIDENTIFIER(sqltypes.Uuid[sqltypes._UUID_RETURN]): + __visit_name__ = "UNIQUEIDENTIFIER" + + @overload + def __init__( + self: UNIQUEIDENTIFIER[_python_UUID], as_uuid: Literal[True] = ... + ): ... + + @overload + def __init__( + self: UNIQUEIDENTIFIER[str], as_uuid: Literal[False] = ... + ): ... + + def __init__(self, as_uuid: bool = True): + """Construct a :class:`_mssql.UNIQUEIDENTIFIER` type. + + + :param as_uuid=True: if True, values will be interpreted + as Python uuid objects, converting to/from string via the + DBAPI. + + .. versionchanged: 2.0 Added direct "uuid" support to the + :class:`_mssql.UNIQUEIDENTIFIER` datatype; uuid interpretation + defaults to ``True``. + + """ + self.as_uuid = as_uuid + self.native_uuid = True + + +class SQL_VARIANT(sqltypes.TypeEngine): + __visit_name__ = "SQL_VARIANT" + + +# old names. +MSDateTime = _MSDateTime +MSDate = _MSDate +MSReal = REAL +MSTinyInteger = TINYINT +MSTime = TIME +MSSmallDateTime = SMALLDATETIME +MSDateTime2 = DATETIME2 +MSDateTimeOffset = DATETIMEOFFSET +MSText = TEXT +MSNText = NTEXT +MSString = VARCHAR +MSNVarchar = NVARCHAR +MSChar = CHAR +MSNChar = NCHAR +MSBinary = BINARY +MSVarBinary = VARBINARY +MSImage = IMAGE +MSBit = BIT +MSMoney = MONEY +MSSmallMoney = SMALLMONEY +MSUniqueIdentifier = UNIQUEIDENTIFIER +MSVariant = SQL_VARIANT + +ischema_names = { + "int": INTEGER, + "bigint": BIGINT, + "smallint": SMALLINT, + "tinyint": TINYINT, + "varchar": VARCHAR, + "nvarchar": NVARCHAR, + "char": CHAR, + "nchar": NCHAR, + "text": TEXT, + "ntext": NTEXT, + "decimal": DECIMAL, + "numeric": NUMERIC, + "float": FLOAT, + "datetime": DATETIME, + "datetime2": DATETIME2, + "datetimeoffset": DATETIMEOFFSET, + "date": DATE, + "time": TIME, + "smalldatetime": SMALLDATETIME, + "binary": BINARY, + "varbinary": VARBINARY, + "bit": BIT, + "real": REAL, + "double precision": DOUBLE_PRECISION, + "image": IMAGE, + "xml": XML, + "timestamp": TIMESTAMP, + "money": MONEY, + "smallmoney": SMALLMONEY, + "uniqueidentifier": UNIQUEIDENTIFIER, + "sql_variant": SQL_VARIANT, +} + + +class MSTypeCompiler(compiler.GenericTypeCompiler): + def _extend(self, spec, type_, length=None): + """Extend a string-type declaration with standard SQL + COLLATE annotations. + + """ + + if getattr(type_, "collation", None): + collation = "COLLATE %s" % type_.collation + else: + collation = None + + if not length: + length = type_.length + + if length: + spec = spec + "(%s)" % length + + return " ".join([c for c in (spec, collation) if c is not None]) + + def visit_double(self, type_, **kw): + return self.visit_DOUBLE_PRECISION(type_, **kw) + + def visit_FLOAT(self, type_, **kw): + precision = getattr(type_, "precision", None) + if precision is None: + return "FLOAT" + else: + return "FLOAT(%(precision)s)" % {"precision": precision} + + def visit_TINYINT(self, type_, **kw): + return "TINYINT" + + def visit_TIME(self, type_, **kw): + precision = getattr(type_, "precision", None) + if precision is not None: + return "TIME(%s)" % precision + else: + return "TIME" + + def visit_TIMESTAMP(self, type_, **kw): + return "TIMESTAMP" + + def visit_ROWVERSION(self, type_, **kw): + return "ROWVERSION" + + def visit_datetime(self, type_, **kw): + if type_.timezone: + return self.visit_DATETIMEOFFSET(type_, **kw) + else: + return self.visit_DATETIME(type_, **kw) + + def visit_DATETIMEOFFSET(self, type_, **kw): + precision = getattr(type_, "precision", None) + if precision is not None: + return "DATETIMEOFFSET(%s)" % type_.precision + else: + return "DATETIMEOFFSET" + + def visit_DATETIME2(self, type_, **kw): + precision = getattr(type_, "precision", None) + if precision is not None: + return "DATETIME2(%s)" % precision + else: + return "DATETIME2" + + def visit_SMALLDATETIME(self, type_, **kw): + return "SMALLDATETIME" + + def visit_unicode(self, type_, **kw): + return self.visit_NVARCHAR(type_, **kw) + + def visit_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARCHAR(type_, **kw) + else: + return self.visit_TEXT(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_NVARCHAR(type_, **kw) + else: + return self.visit_NTEXT(type_, **kw) + + def visit_NTEXT(self, type_, **kw): + return self._extend("NTEXT", type_) + + def visit_TEXT(self, type_, **kw): + return self._extend("TEXT", type_) + + def visit_VARCHAR(self, type_, **kw): + return self._extend("VARCHAR", type_, length=type_.length or "max") + + def visit_CHAR(self, type_, **kw): + return self._extend("CHAR", type_) + + def visit_NCHAR(self, type_, **kw): + return self._extend("NCHAR", type_) + + def visit_NVARCHAR(self, type_, **kw): + return self._extend("NVARCHAR", type_, length=type_.length or "max") + + def visit_date(self, type_, **kw): + if self.dialect.server_version_info < MS_2008_VERSION: + return self.visit_DATETIME(type_, **kw) + else: + return self.visit_DATE(type_, **kw) + + def visit__BASETIMEIMPL(self, type_, **kw): + return self.visit_time(type_, **kw) + + def visit_time(self, type_, **kw): + if self.dialect.server_version_info < MS_2008_VERSION: + return self.visit_DATETIME(type_, **kw) + else: + return self.visit_TIME(type_, **kw) + + def visit_large_binary(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARBINARY(type_, **kw) + else: + return self.visit_IMAGE(type_, **kw) + + def visit_IMAGE(self, type_, **kw): + return "IMAGE" + + def visit_XML(self, type_, **kw): + return "XML" + + def visit_VARBINARY(self, type_, **kw): + text = self._extend("VARBINARY", type_, length=type_.length or "max") + if getattr(type_, "filestream", False): + text += " FILESTREAM" + return text + + def visit_boolean(self, type_, **kw): + return self.visit_BIT(type_) + + def visit_BIT(self, type_, **kw): + return "BIT" + + def visit_JSON(self, type_, **kw): + # this is a bit of a break with SQLAlchemy's convention of + # "UPPERCASE name goes to UPPERCASE type name with no modification" + return self._extend("NVARCHAR", type_, length="max") + + def visit_MONEY(self, type_, **kw): + return "MONEY" + + def visit_SMALLMONEY(self, type_, **kw): + return "SMALLMONEY" + + def visit_uuid(self, type_, **kw): + if type_.native_uuid: + return self.visit_UNIQUEIDENTIFIER(type_, **kw) + else: + return super().visit_uuid(type_, **kw) + + def visit_UNIQUEIDENTIFIER(self, type_, **kw): + return "UNIQUEIDENTIFIER" + + def visit_SQL_VARIANT(self, type_, **kw): + return "SQL_VARIANT" + + +class MSExecutionContext(default.DefaultExecutionContext): + _enable_identity_insert = False + _select_lastrowid = False + _lastrowid = None + + dialect: MSDialect + + def _opt_encode(self, statement): + if self.compiled and self.compiled.schema_translate_map: + rst = self.compiled.preparer._render_schema_translates + statement = rst(statement, self.compiled.schema_translate_map) + + return statement + + def pre_exec(self): + """Activate IDENTITY_INSERT if needed.""" + + if self.isinsert: + if TYPE_CHECKING: + assert is_sql_compiler(self.compiled) + assert isinstance(self.compiled.compile_state, DMLState) + assert isinstance( + self.compiled.compile_state.dml_table, TableClause + ) + + tbl = self.compiled.compile_state.dml_table + id_column = tbl._autoincrement_column + + if id_column is not None and ( + not isinstance(id_column.default, Sequence) + ): + insert_has_identity = True + compile_state = self.compiled.dml_compile_state + self._enable_identity_insert = ( + id_column.key in self.compiled_parameters[0] + ) or ( + compile_state._dict_parameters + and (id_column.key in compile_state._insert_col_keys) + ) + + else: + insert_has_identity = False + self._enable_identity_insert = False + + self._select_lastrowid = ( + not self.compiled.inline + and insert_has_identity + and not self.compiled.effective_returning + and not self._enable_identity_insert + and not self.executemany + ) + + if self._enable_identity_insert: + self.root_connection._cursor_execute( + self.cursor, + self._opt_encode( + "SET IDENTITY_INSERT %s ON" + % self.identifier_preparer.format_table(tbl) + ), + (), + self, + ) + + def post_exec(self): + """Disable IDENTITY_INSERT if enabled.""" + + conn = self.root_connection + + if self.isinsert or self.isupdate or self.isdelete: + self._rowcount = self.cursor.rowcount + + if self._select_lastrowid: + if self.dialect.use_scope_identity: + conn._cursor_execute( + self.cursor, + "SELECT scope_identity() AS lastrowid", + (), + self, + ) + else: + conn._cursor_execute( + self.cursor, "SELECT @@identity AS lastrowid", (), self + ) + # fetchall() ensures the cursor is consumed without closing it + row = self.cursor.fetchall()[0] + self._lastrowid = int(row[0]) + + self.cursor_fetch_strategy = _cursor._NO_CURSOR_DML + elif ( + self.compiled is not None + and is_sql_compiler(self.compiled) + and self.compiled.effective_returning + ): + self.cursor_fetch_strategy = ( + _cursor.FullyBufferedCursorFetchStrategy( + self.cursor, + self.cursor.description, + self.cursor.fetchall(), + ) + ) + + if self._enable_identity_insert: + if TYPE_CHECKING: + assert is_sql_compiler(self.compiled) + assert isinstance(self.compiled.compile_state, DMLState) + assert isinstance( + self.compiled.compile_state.dml_table, TableClause + ) + conn._cursor_execute( + self.cursor, + self._opt_encode( + "SET IDENTITY_INSERT %s OFF" + % self.identifier_preparer.format_table( + self.compiled.compile_state.dml_table + ) + ), + (), + self, + ) + + def get_lastrowid(self): + return self._lastrowid + + def handle_dbapi_exception(self, e): + if self._enable_identity_insert: + try: + self.cursor.execute( + self._opt_encode( + "SET IDENTITY_INSERT %s OFF" + % self.identifier_preparer.format_table( + self.compiled.compile_state.dml_table + ) + ) + ) + except Exception: + pass + + def fire_sequence(self, seq, type_): + return self._execute_scalar( + ( + "SELECT NEXT VALUE FOR %s" + % self.identifier_preparer.format_sequence(seq) + ), + type_, + ) + + def get_insert_default(self, column): + if ( + isinstance(column, sa_schema.Column) + and column is column.table._autoincrement_column + and isinstance(column.default, sa_schema.Sequence) + and column.default.optional + ): + return None + return super().get_insert_default(column) + + +class MSSQLCompiler(compiler.SQLCompiler): + returning_precedes_values = True + + extract_map = util.update_copy( + compiler.SQLCompiler.extract_map, + { + "doy": "dayofyear", + "dow": "weekday", + "milliseconds": "millisecond", + "microseconds": "microsecond", + }, + ) + + def __init__(self, *args, **kwargs): + self.tablealiases = {} + super().__init__(*args, **kwargs) + + def _format_frame_clause(self, range_, **kw): + kw["literal_execute"] = True + return super()._format_frame_clause(range_, **kw) + + def _with_legacy_schema_aliasing(fn): + def decorate(self, *arg, **kw): + if self.dialect.legacy_schema_aliasing: + return fn(self, *arg, **kw) + else: + super_ = getattr(super(MSSQLCompiler, self), fn.__name__) + return super_(*arg, **kw) + + return decorate + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_current_date_func(self, fn, **kw): + return "GETDATE()" + + def visit_length_func(self, fn, **kw): + return "LEN%s" % self.function_argspec(fn, **kw) + + def visit_char_length_func(self, fn, **kw): + return "LEN%s" % self.function_argspec(fn, **kw) + + def visit_aggregate_strings_func(self, fn, **kw): + expr = fn.clauses.clauses[0]._compiler_dispatch(self, **kw) + kw["literal_execute"] = True + delimeter = fn.clauses.clauses[1]._compiler_dispatch(self, **kw) + return f"string_agg({expr}, {delimeter})" + + def visit_concat_op_expression_clauselist( + self, clauselist, operator, **kw + ): + return " + ".join(self.process(elem, **kw) for elem in clauselist) + + def visit_concat_op_binary(self, binary, operator, **kw): + return "%s + %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_true(self, expr, **kw): + return "1" + + def visit_false(self, expr, **kw): + return "0" + + def visit_match_op_binary(self, binary, operator, **kw): + return "CONTAINS (%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def get_select_precolumns(self, select, **kw): + """MS-SQL puts TOP, it's version of LIMIT here""" + + s = super().get_select_precolumns(select, **kw) + + if select._has_row_limiting_clause and self._use_top(select): + # ODBC drivers and possibly others + # don't support bind params in the SELECT clause on SQL Server. + # so have to use literal here. + kw["literal_execute"] = True + s += "TOP %s " % self.process( + self._get_limit_or_fetch(select), **kw + ) + if select._fetch_clause is not None: + if select._fetch_clause_options["percent"]: + s += "PERCENT " + if select._fetch_clause_options["with_ties"]: + s += "WITH TIES " + + return s + + def get_from_hint_text(self, table, text): + return text + + def get_crud_hint_text(self, table, text): + return text + + def _get_limit_or_fetch(self, select): + if select._fetch_clause is None: + return select._limit_clause + else: + return select._fetch_clause + + def _use_top(self, select): + return (select._offset_clause is None) and ( + select._simple_int_clause(select._limit_clause) + or ( + # limit can use TOP with is by itself. fetch only uses TOP + # when it needs to because of PERCENT and/or WITH TIES + # TODO: Why? shouldn't we use TOP always ? + select._simple_int_clause(select._fetch_clause) + and ( + select._fetch_clause_options["percent"] + or select._fetch_clause_options["with_ties"] + ) + ) + ) + + def limit_clause(self, cs, **kwargs): + return "" + + def _check_can_use_fetch_limit(self, select): + # to use ROW_NUMBER(), an ORDER BY is required. + # OFFSET are FETCH are options of the ORDER BY clause + if not select._order_by_clause.clauses: + raise exc.CompileError( + "MSSQL requires an order_by when " + "using an OFFSET or a non-simple " + "LIMIT clause" + ) + + if select._fetch_clause_options is not None and ( + select._fetch_clause_options["percent"] + or select._fetch_clause_options["with_ties"] + ): + raise exc.CompileError( + "MSSQL needs TOP to use PERCENT and/or WITH TIES. " + "Only simple fetch without offset can be used." + ) + + def _row_limit_clause(self, select, **kw): + """MSSQL 2012 supports OFFSET/FETCH operators + Use it instead subquery with row_number + + """ + + if self.dialect._supports_offset_fetch and not self._use_top(select): + self._check_can_use_fetch_limit(select) + + return self.fetch_clause( + select, + fetch_clause=self._get_limit_or_fetch(select), + require_offset=True, + **kw, + ) + + else: + return "" + + def visit_try_cast(self, element, **kw): + return "TRY_CAST (%s AS %s)" % ( + self.process(element.clause, **kw), + self.process(element.typeclause, **kw), + ) + + def translate_select_structure(self, select_stmt, **kwargs): + """Look for ``LIMIT`` and OFFSET in a select statement, and if + so tries to wrap it in a subquery with ``row_number()`` criterion. + MSSQL 2012 and above are excluded + + """ + select = select_stmt + + if ( + select._has_row_limiting_clause + and not self.dialect._supports_offset_fetch + and not self._use_top(select) + and not getattr(select, "_mssql_visit", None) + ): + self._check_can_use_fetch_limit(select) + + _order_by_clauses = [ + sql_util.unwrap_label_reference(elem) + for elem in select._order_by_clause.clauses + ] + + limit_clause = self._get_limit_or_fetch(select) + offset_clause = select._offset_clause + + select = select._generate() + select._mssql_visit = True + select = ( + select.add_columns( + sql.func.ROW_NUMBER() + .over(order_by=_order_by_clauses) + .label("mssql_rn") + ) + .order_by(None) + .alias() + ) + + mssql_rn = sql.column("mssql_rn") + limitselect = sql.select( + *[c for c in select.c if c.key != "mssql_rn"] + ) + if offset_clause is not None: + limitselect = limitselect.where(mssql_rn > offset_clause) + if limit_clause is not None: + limitselect = limitselect.where( + mssql_rn <= (limit_clause + offset_clause) + ) + else: + limitselect = limitselect.where(mssql_rn <= (limit_clause)) + return limitselect + else: + return select + + @_with_legacy_schema_aliasing + def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs): + if mssql_aliased is table or iscrud: + return super().visit_table(table, **kwargs) + + # alias schema-qualified tables + alias = self._schema_aliased_table(table) + if alias is not None: + return self.process(alias, mssql_aliased=table, **kwargs) + else: + return super().visit_table(table, **kwargs) + + @_with_legacy_schema_aliasing + def visit_alias(self, alias, **kw): + # translate for schema-qualified table aliases + kw["mssql_aliased"] = alias.element + return super().visit_alias(alias, **kw) + + @_with_legacy_schema_aliasing + def visit_column(self, column, add_to_result_map=None, **kw): + if ( + column.table is not None + and (not self.isupdate and not self.isdelete) + or self.is_subquery() + ): + # translate for schema-qualified table aliases + t = self._schema_aliased_table(column.table) + if t is not None: + converted = elements._corresponding_column_or_error(t, column) + if add_to_result_map is not None: + add_to_result_map( + column.name, + column.name, + (column, column.name, column.key), + column.type, + ) + + return super().visit_column(converted, **kw) + + return super().visit_column( + column, add_to_result_map=add_to_result_map, **kw + ) + + def _schema_aliased_table(self, table): + if getattr(table, "schema", None) is not None: + if table not in self.tablealiases: + self.tablealiases[table] = table.alias() + return self.tablealiases[table] + else: + return None + + def visit_extract(self, extract, **kw): + field = self.extract_map.get(extract.field, extract.field) + return "DATEPART(%s, %s)" % (field, self.process(extract.expr, **kw)) + + def visit_savepoint(self, savepoint_stmt, **kw): + return "SAVE TRANSACTION %s" % self.preparer.format_savepoint( + savepoint_stmt + ) + + def visit_rollback_to_savepoint(self, savepoint_stmt, **kw): + return "ROLLBACK TRANSACTION %s" % self.preparer.format_savepoint( + savepoint_stmt + ) + + def visit_binary(self, binary, **kwargs): + """Move bind parameters to the right-hand side of an operator, where + possible. + + """ + if ( + isinstance(binary.left, expression.BindParameter) + and binary.operator == operator.eq + and not isinstance(binary.right, expression.BindParameter) + ): + return self.process( + expression.BinaryExpression( + binary.right, binary.left, binary.operator + ), + **kwargs, + ) + return super().visit_binary(binary, **kwargs) + + def returning_clause( + self, stmt, returning_cols, *, populate_result_map, **kw + ): + # SQL server returning clause requires that the columns refer to + # the virtual table names "inserted" or "deleted". Here, we make + # a simple alias of our table with that name, and then adapt the + # columns we have from the list of RETURNING columns to that new name + # so that they render as "inserted." / "deleted.". + + if stmt.is_insert or stmt.is_update: + target = stmt.table.alias("inserted") + elif stmt.is_delete: + target = stmt.table.alias("deleted") + else: + assert False, "expected Insert, Update or Delete statement" + + adapter = sql_util.ClauseAdapter(target) + + # adapter.traverse() takes a column from our target table and returns + # the one that is linked to the "inserted" / "deleted" tables. So in + # order to retrieve these values back from the result (e.g. like + # row[column]), tell the compiler to also add the original unadapted + # column to the result map. Before #4877, these were (unknowingly) + # falling back using string name matching in the result set which + # necessarily used an expensive KeyError in order to match. + + columns = [ + self._label_returning_column( + stmt, + adapter.traverse(column), + populate_result_map, + {"result_map_targets": (column,)}, + fallback_label_name=fallback_label_name, + column_is_repeated=repeated, + name=name, + proxy_name=proxy_name, + **kw, + ) + for ( + name, + proxy_name, + fallback_label_name, + column, + repeated, + ) in stmt._generate_columns_plus_names( + True, cols=expression._select_iterables(returning_cols) + ) + ] + + return "OUTPUT " + ", ".join(columns) + + def get_cte_preamble(self, recursive): + # SQL Server finds it too inconvenient to accept + # an entirely optional, SQL standard specified, + # "RECURSIVE" word with their "WITH", + # so here we go + return "WITH" + + def label_select_column(self, select, column, asfrom): + if isinstance(column, expression.Function): + return column.label(None) + else: + return super().label_select_column(select, column, asfrom) + + def for_update_clause(self, select, **kw): + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which + # SQLAlchemy doesn't use + return "" + + def order_by_clause(self, select, **kw): + # MSSQL only allows ORDER BY in subqueries if there is a LIMIT: + # "The ORDER BY clause is invalid in views, inline functions, + # derived tables, subqueries, and common table expressions, + # unless TOP, OFFSET or FOR XML is also specified." + if ( + self.is_subquery() + and not self._use_top(select) + and ( + select._offset is None + or not self.dialect._supports_offset_fetch + ) + ): + # avoid processing the order by clause if we won't end up + # using it, because we don't want all the bind params tacked + # onto the positional list if that is what the dbapi requires + return "" + + order_by = self.process(select._order_by_clause, **kw) + + if order_by: + return " ORDER BY " + order_by + else: + return "" + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + """Render the UPDATE..FROM clause specific to MSSQL. + + In MSSQL, if the UPDATE statement involves an alias of the table to + be updated, then the table itself must be added to the FROM list as + well. Otherwise, it is optional. Here, we add it regardless. + + """ + return "FROM " + ", ".join( + t._compiler_dispatch(self, asfrom=True, fromhints=from_hints, **kw) + for t in [from_table] + extra_froms + ) + + def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): + """If we have extra froms make sure we render any alias as hint.""" + ashint = False + if extra_froms: + ashint = True + return from_table._compiler_dispatch( + self, asfrom=True, iscrud=True, ashint=ashint, **kw + ) + + def delete_extra_from_clause( + self, delete_stmt, from_table, extra_froms, from_hints, **kw + ): + """Render the DELETE .. FROM clause specific to MSSQL. + + Yes, it has the FROM keyword twice. + + """ + return "FROM " + ", ".join( + t._compiler_dispatch(self, asfrom=True, fromhints=from_hints, **kw) + for t in [from_table] + extra_froms + ) + + def visit_empty_set_expr(self, type_, **kw): + return "SELECT 1 WHERE 1!=1" + + def visit_is_distinct_from_binary(self, binary, operator, **kw): + return "NOT EXISTS (SELECT %s INTERSECT SELECT %s)" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): + return "EXISTS (SELECT %s INTERSECT SELECT %s)" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def _render_json_extract_from_binary(self, binary, operator, **kw): + # note we are intentionally calling upon the process() calls in the + # order in which they appear in the SQL String as this is used + # by positional parameter rendering + + if binary.type._type_affinity is sqltypes.JSON: + return "JSON_QUERY(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + # as with other dialects, start with an explicit test for NULL + case_expression = "CASE JSON_VALUE(%s, %s) WHEN NULL THEN NULL" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + if binary.type._type_affinity is sqltypes.Integer: + type_expression = "ELSE CAST(JSON_VALUE(%s, %s) AS INTEGER)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + elif binary.type._type_affinity is sqltypes.Numeric: + type_expression = "ELSE CAST(JSON_VALUE(%s, %s) AS %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ( + "FLOAT" + if isinstance(binary.type, sqltypes.Float) + else "NUMERIC(%s, %s)" + % (binary.type.precision, binary.type.scale) + ), + ) + elif binary.type._type_affinity is sqltypes.Boolean: + # the NULL handling is particularly weird with boolean, so + # explicitly return numeric (BIT) constants + type_expression = ( + "WHEN 'true' THEN 1 WHEN 'false' THEN 0 ELSE NULL" + ) + elif binary.type._type_affinity is sqltypes.String: + # TODO: does this comment (from mysql) apply to here, too? + # this fails with a JSON value that's a four byte unicode + # string. SQLite has the same problem at the moment + type_expression = "ELSE JSON_VALUE(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + else: + # other affinity....this is not expected right now + type_expression = "ELSE JSON_QUERY(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + return case_expression + " " + type_expression + " END" + + def visit_json_getitem_op_binary(self, binary, operator, **kw): + return self._render_json_extract_from_binary(binary, operator, **kw) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + return self._render_json_extract_from_binary(binary, operator, **kw) + + def visit_sequence(self, seq, **kw): + return "NEXT VALUE FOR %s" % self.preparer.format_sequence(seq) + + +class MSSQLStrictCompiler(MSSQLCompiler): + """A subclass of MSSQLCompiler which disables the usage of bind + parameters where not allowed natively by MS-SQL. + + A dialect may use this compiler on a platform where native + binds are used. + + """ + + ansi_bind_rules = True + + def visit_in_op_binary(self, binary, operator, **kw): + kw["literal_execute"] = True + return "%s IN %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_not_in_op_binary(self, binary, operator, **kw): + kw["literal_execute"] = True + return "%s NOT IN %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def render_literal_value(self, value, type_): + """ + For date and datetime values, convert to a string + format acceptable to MSSQL. That seems to be the + so-called ODBC canonical date format which looks + like this: + + yyyy-mm-dd hh:mi:ss.mmm(24h) + + For other data types, call the base class implementation. + """ + # datetime and date are both subclasses of datetime.date + if issubclass(type(value), datetime.date): + # SQL Server wants single quotes around the date string. + return "'" + str(value) + "'" + else: + return super().render_literal_value(value, type_) + + +class MSDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): + colspec = self.preparer.format_column(column) + + # type is not accepted in a computed column + if column.computed is not None: + colspec += " " + self.process(column.computed) + else: + colspec += " " + self.dialect.type_compiler_instance.process( + column.type, type_expression=column + ) + + if column.nullable is not None: + if ( + not column.nullable + or column.primary_key + or isinstance(column.default, sa_schema.Sequence) + or column.autoincrement is True + or column.identity + ): + colspec += " NOT NULL" + elif column.computed is None: + # don't specify "NULL" for computed columns + colspec += " NULL" + + if column.table is None: + raise exc.CompileError( + "mssql requires Table-bound columns " + "in order to generate DDL" + ) + + d_opt = column.dialect_options["mssql"] + start = d_opt["identity_start"] + increment = d_opt["identity_increment"] + if start is not None or increment is not None: + if column.identity: + raise exc.CompileError( + "Cannot specify options 'mssql_identity_start' and/or " + "'mssql_identity_increment' while also using the " + "'Identity' construct." + ) + util.warn_deprecated( + "The dialect options 'mssql_identity_start' and " + "'mssql_identity_increment' are deprecated. " + "Use the 'Identity' object instead.", + "1.4", + ) + + if column.identity: + colspec += self.process(column.identity, **kwargs) + elif ( + column is column.table._autoincrement_column + or column.autoincrement is True + ) and ( + not isinstance(column.default, Sequence) or column.default.optional + ): + colspec += self.process(Identity(start=start, increment=increment)) + else: + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + return colspec + + def visit_create_index(self, create, include_schema=False, **kw): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + + # handle clustering option + clustered = index.dialect_options["mssql"]["clustered"] + if clustered is not None: + if clustered: + text += "CLUSTERED " + else: + text += "NONCLUSTERED " + + # handle columnstore option (has no negative value) + columnstore = index.dialect_options["mssql"]["columnstore"] + if columnstore: + text += "COLUMNSTORE " + + text += "INDEX %s ON %s" % ( + self._prepared_index_name(index, include_schema=include_schema), + preparer.format_table(index.table), + ) + + # in some case mssql allows indexes with no columns defined + if len(index.expressions) > 0: + text += " (%s)" % ", ".join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True + ) + for expr in index.expressions + ) + + # handle other included columns + if index.dialect_options["mssql"]["include"]: + inclusions = [ + index.table.c[col] if isinstance(col, str) else col + for col in index.dialect_options["mssql"]["include"] + ] + + text += " INCLUDE (%s)" % ", ".join( + [preparer.quote(c.name) for c in inclusions] + ) + + whereclause = index.dialect_options["mssql"]["where"] + + if whereclause is not None: + whereclause = coercions.expect( + roles.DDLExpressionRole, whereclause + ) + + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, literal_binds=True + ) + text += " WHERE " + where_compiled + + return text + + def visit_drop_index(self, drop, **kw): + return "\nDROP INDEX %s ON %s" % ( + self._prepared_index_name(drop.element, include_schema=False), + self.preparer.format_table(drop.element.table), + ) + + def visit_primary_key_constraint(self, constraint, **kw): + if len(constraint) == 0: + return "" + text = "" + if constraint.name is not None: + text += "CONSTRAINT %s " % self.preparer.format_constraint( + constraint + ) + text += "PRIMARY KEY " + + clustered = constraint.dialect_options["mssql"]["clustered"] + if clustered is not None: + if clustered: + text += "CLUSTERED " + else: + text += "NONCLUSTERED " + + text += "(%s)" % ", ".join( + self.preparer.quote(c.name) for c in constraint + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_unique_constraint(self, constraint, **kw): + if len(constraint) == 0: + return "" + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "UNIQUE %s" % self.define_unique_constraint_distinct( + constraint, **kw + ) + clustered = constraint.dialect_options["mssql"]["clustered"] + if clustered is not None: + if clustered: + text += "CLUSTERED " + else: + text += "NONCLUSTERED " + + text += "(%s)" % ", ".join( + self.preparer.quote(c.name) for c in constraint + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_computed_column(self, generated, **kw): + text = "AS (%s)" % self.sql_compiler.process( + generated.sqltext, include_table=False, literal_binds=True + ) + # explicitly check for True|False since None means server default + if generated.persisted is True: + text += " PERSISTED" + return text + + def visit_set_table_comment(self, create, **kw): + schema = self.preparer.schema_for_object(create.element) + schema_name = schema if schema else self.dialect.default_schema_name + return ( + "execute sp_addextendedproperty 'MS_Description', " + "{}, 'schema', {}, 'table', {}".format( + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.NVARCHAR() + ), + self.preparer.quote_schema(schema_name), + self.preparer.format_table(create.element, use_schema=False), + ) + ) + + def visit_drop_table_comment(self, drop, **kw): + schema = self.preparer.schema_for_object(drop.element) + schema_name = schema if schema else self.dialect.default_schema_name + return ( + "execute sp_dropextendedproperty 'MS_Description', 'schema', " + "{}, 'table', {}".format( + self.preparer.quote_schema(schema_name), + self.preparer.format_table(drop.element, use_schema=False), + ) + ) + + def visit_set_column_comment(self, create, **kw): + schema = self.preparer.schema_for_object(create.element.table) + schema_name = schema if schema else self.dialect.default_schema_name + return ( + "execute sp_addextendedproperty 'MS_Description', " + "{}, 'schema', {}, 'table', {}, 'column', {}".format( + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.NVARCHAR() + ), + self.preparer.quote_schema(schema_name), + self.preparer.format_table( + create.element.table, use_schema=False + ), + self.preparer.format_column(create.element), + ) + ) + + def visit_drop_column_comment(self, drop, **kw): + schema = self.preparer.schema_for_object(drop.element.table) + schema_name = schema if schema else self.dialect.default_schema_name + return ( + "execute sp_dropextendedproperty 'MS_Description', 'schema', " + "{}, 'table', {}, 'column', {}".format( + self.preparer.quote_schema(schema_name), + self.preparer.format_table( + drop.element.table, use_schema=False + ), + self.preparer.format_column(drop.element), + ) + ) + + def visit_create_sequence(self, create, **kw): + prefix = None + if create.element.data_type is not None: + data_type = create.element.data_type + prefix = " AS %s" % self.type_compiler.process(data_type) + return super().visit_create_sequence(create, prefix=prefix, **kw) + + def visit_identity_column(self, identity, **kw): + text = " IDENTITY" + if identity.start is not None or identity.increment is not None: + start = 1 if identity.start is None else identity.start + increment = 1 if identity.increment is None else identity.increment + text += "(%s,%s)" % (start, increment) + return text + + +class MSIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = RESERVED_WORDS + + def __init__(self, dialect): + super().__init__( + dialect, + initial_quote="[", + final_quote="]", + quote_case_sensitive_collations=False, + ) + + def _escape_identifier(self, value): + return value.replace("]", "]]") + + def _unescape_identifier(self, value): + return value.replace("]]", "]") + + def quote_schema(self, schema, force=None): + """Prepare a quoted table and schema name.""" + + # need to re-implement the deprecation warning entirely + if force is not None: + # not using the util.deprecated_params() decorator in this + # case because of the additional function call overhead on this + # very performance-critical spot. + util.warn_deprecated( + "The IdentifierPreparer.quote_schema.force parameter is " + "deprecated and will be removed in a future release. This " + "flag has no effect on the behavior of the " + "IdentifierPreparer.quote method; please refer to " + "quoted_name().", + version="1.3", + ) + + dbname, owner = _schema_elements(schema) + if dbname: + result = "%s.%s" % (self.quote(dbname), self.quote(owner)) + elif owner: + result = self.quote(owner) + else: + result = "" + return result + + +def _db_plus_owner_listing(fn): + def wrap(dialect, connection, schema=None, **kw): + dbname, owner = _owner_plus_db(dialect, schema) + return _switch_db( + dbname, + connection, + fn, + dialect, + connection, + dbname, + owner, + schema, + **kw, + ) + + return update_wrapper(wrap, fn) + + +def _db_plus_owner(fn): + def wrap(dialect, connection, tablename, schema=None, **kw): + dbname, owner = _owner_plus_db(dialect, schema) + return _switch_db( + dbname, + connection, + fn, + dialect, + connection, + tablename, + dbname, + owner, + schema, + **kw, + ) + + return update_wrapper(wrap, fn) + + +def _switch_db(dbname, connection, fn, *arg, **kw): + if dbname: + current_db = connection.exec_driver_sql("select db_name()").scalar() + if current_db != dbname: + connection.exec_driver_sql( + "use %s" % connection.dialect.identifier_preparer.quote(dbname) + ) + try: + return fn(*arg, **kw) + finally: + if dbname and current_db != dbname: + connection.exec_driver_sql( + "use %s" + % connection.dialect.identifier_preparer.quote(current_db) + ) + + +def _owner_plus_db(dialect, schema): + if not schema: + return None, dialect.default_schema_name + else: + return _schema_elements(schema) + + +_memoized_schema = util.LRUCache() + + +def _schema_elements(schema): + if isinstance(schema, quoted_name) and schema.quote: + return None, schema + + if schema in _memoized_schema: + return _memoized_schema[schema] + + # tests for this function are in: + # test/dialect/mssql/test_reflection.py -> + # OwnerPlusDBTest.test_owner_database_pairs + # test/dialect/mssql/test_compiler.py -> test_force_schema_* + # test/dialect/mssql/test_compiler.py -> test_schema_many_tokens_* + # + + if schema.startswith("__[SCHEMA_"): + return None, schema + + push = [] + symbol = "" + bracket = False + has_brackets = False + for token in re.split(r"(\[|\]|\.)", schema): + if not token: + continue + if token == "[": + bracket = True + has_brackets = True + elif token == "]": + bracket = False + elif not bracket and token == ".": + if has_brackets: + push.append("[%s]" % symbol) + else: + push.append(symbol) + symbol = "" + has_brackets = False + else: + symbol += token + if symbol: + push.append(symbol) + if len(push) > 1: + dbname, owner = ".".join(push[0:-1]), push[-1] + + # test for internal brackets + if re.match(r".*\].*\[.*", dbname[1:-1]): + dbname = quoted_name(dbname, quote=False) + else: + dbname = dbname.lstrip("[").rstrip("]") + + elif len(push): + dbname, owner = None, push[0] + else: + dbname, owner = None, None + + _memoized_schema[schema] = dbname, owner + return dbname, owner + + +class MSDialect(default.DefaultDialect): + # will assume it's at least mssql2005 + name = "mssql" + supports_statement_cache = True + supports_default_values = True + supports_empty_insert = False + favor_returning_over_lastrowid = True + + returns_native_bytes = True + + supports_comments = True + supports_default_metavalue = False + """dialect supports INSERT... VALUES (DEFAULT) syntax - + SQL Server **does** support this, but **not** for the IDENTITY column, + so we can't turn this on. + + """ + + # supports_native_uuid is partial here, so we implement our + # own impl type + + execution_ctx_cls = MSExecutionContext + use_scope_identity = True + max_identifier_length = 128 + schema_name = "dbo" + + insert_returning = True + update_returning = True + delete_returning = True + update_returning_multifrom = True + delete_returning_multifrom = True + + colspecs = { + sqltypes.DateTime: _MSDateTime, + sqltypes.Date: _MSDate, + sqltypes.JSON: JSON, + sqltypes.JSON.JSONIndexType: JSONIndexType, + sqltypes.JSON.JSONPathType: JSONPathType, + sqltypes.Time: _BASETIMEIMPL, + sqltypes.Unicode: _MSUnicode, + sqltypes.UnicodeText: _MSUnicodeText, + DATETIMEOFFSET: DATETIMEOFFSET, + DATETIME2: DATETIME2, + SMALLDATETIME: SMALLDATETIME, + DATETIME: DATETIME, + sqltypes.Uuid: MSUUid, + } + + engine_config_types = default.DefaultDialect.engine_config_types.union( + {"legacy_schema_aliasing": util.asbool} + ) + + ischema_names = ischema_names + + supports_sequences = True + sequences_optional = True + # This is actually used for autoincrement, where itentity is used that + # starts with 1. + # for sequences T-SQL's actual default is -9223372036854775808 + default_sequence_base = 1 + + supports_native_boolean = False + non_native_boolean_check_constraint = False + supports_unicode_binds = True + postfetch_lastrowid = True + + # may be changed at server inspection time for older SQL server versions + supports_multivalues_insert = True + + use_insertmanyvalues = True + + # note pyodbc will set this to False if fast_executemany is set, + # as of SQLAlchemy 2.0.9 + use_insertmanyvalues_wo_returning = True + + insertmanyvalues_implicit_sentinel = ( + InsertmanyvaluesSentinelOpts.AUTOINCREMENT + | InsertmanyvaluesSentinelOpts.IDENTITY + | InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT + ) + + # "The incoming request has too many parameters. The server supports a " + # "maximum of 2100 parameters." + # in fact you can have 2099 parameters. + insertmanyvalues_max_parameters = 2099 + + _supports_offset_fetch = False + _supports_nvarchar_max = False + + legacy_schema_aliasing = False + + server_version_info = () + + statement_compiler = MSSQLCompiler + ddl_compiler = MSDDLCompiler + type_compiler_cls = MSTypeCompiler + preparer = MSIdentifierPreparer + + construct_arguments = [ + (sa_schema.PrimaryKeyConstraint, {"clustered": None}), + (sa_schema.UniqueConstraint, {"clustered": None}), + ( + sa_schema.Index, + { + "clustered": None, + "include": None, + "where": None, + "columnstore": None, + }, + ), + ( + sa_schema.Column, + {"identity_start": None, "identity_increment": None}, + ), + ] + + def __init__( + self, + query_timeout=None, + use_scope_identity=True, + schema_name="dbo", + deprecate_large_types=None, + supports_comments=None, + json_serializer=None, + json_deserializer=None, + legacy_schema_aliasing=None, + ignore_no_transaction_on_rollback=False, + **opts, + ): + self.query_timeout = int(query_timeout or 0) + self.schema_name = schema_name + + self.use_scope_identity = use_scope_identity + self.deprecate_large_types = deprecate_large_types + self.ignore_no_transaction_on_rollback = ( + ignore_no_transaction_on_rollback + ) + self._user_defined_supports_comments = uds = supports_comments + if uds is not None: + self.supports_comments = uds + + if legacy_schema_aliasing is not None: + util.warn_deprecated( + "The legacy_schema_aliasing parameter is " + "deprecated and will be removed in a future release.", + "1.4", + ) + self.legacy_schema_aliasing = legacy_schema_aliasing + + super().__init__(**opts) + + self._json_serializer = json_serializer + self._json_deserializer = json_deserializer + + def do_savepoint(self, connection, name): + # give the DBAPI a push + connection.exec_driver_sql("IF @@TRANCOUNT = 0 BEGIN TRANSACTION") + super().do_savepoint(connection, name) + + def do_release_savepoint(self, connection, name): + # SQL Server does not support RELEASE SAVEPOINT + pass + + def do_rollback(self, dbapi_connection): + try: + super().do_rollback(dbapi_connection) + except self.dbapi.ProgrammingError as e: + if self.ignore_no_transaction_on_rollback and re.match( + r".*\b111214\b", str(e) + ): + util.warn( + "ProgrammingError 111214 " + "'No corresponding transaction found.' " + "has been suppressed via " + "ignore_no_transaction_on_rollback=True" + ) + else: + raise + + _isolation_lookup = { + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "SNAPSHOT", + } + + def get_isolation_level_values(self, dbapi_connection): + return list(self._isolation_lookup) + + def set_isolation_level(self, dbapi_connection, level): + cursor = dbapi_connection.cursor() + cursor.execute(f"SET TRANSACTION ISOLATION LEVEL {level}") + cursor.close() + if level == "SNAPSHOT": + dbapi_connection.commit() + + def get_isolation_level(self, dbapi_connection): + cursor = dbapi_connection.cursor() + view_name = "sys.system_views" + try: + cursor.execute( + ( + "SELECT name FROM {} WHERE name IN " + "('dm_exec_sessions', 'dm_pdw_nodes_exec_sessions')" + ).format(view_name) + ) + row = cursor.fetchone() + if not row: + raise NotImplementedError( + "Can't fetch isolation level on this particular " + "SQL Server version." + ) + + view_name = f"sys.{row[0]}" + + cursor.execute( + """ + SELECT CASE transaction_isolation_level + WHEN 0 THEN NULL + WHEN 1 THEN 'READ UNCOMMITTED' + WHEN 2 THEN 'READ COMMITTED' + WHEN 3 THEN 'REPEATABLE READ' + WHEN 4 THEN 'SERIALIZABLE' + WHEN 5 THEN 'SNAPSHOT' END + AS TRANSACTION_ISOLATION_LEVEL + FROM {} + where session_id = @@SPID + """.format( + view_name + ) + ) + except self.dbapi.Error as err: + raise NotImplementedError( + "Can't fetch isolation level; encountered error {} when " + 'attempting to query the "{}" view.'.format(err, view_name) + ) from err + else: + row = cursor.fetchone() + return row[0].upper() + finally: + cursor.close() + + def initialize(self, connection): + super().initialize(connection) + self._setup_version_attributes() + self._setup_supports_nvarchar_max(connection) + self._setup_supports_comments(connection) + + def _setup_version_attributes(self): + if self.server_version_info[0] not in list(range(8, 17)): + util.warn( + "Unrecognized server version info '%s'. Some SQL Server " + "features may not function properly." + % ".".join(str(x) for x in self.server_version_info) + ) + + if self.server_version_info >= MS_2008_VERSION: + self.supports_multivalues_insert = True + else: + self.supports_multivalues_insert = False + + if self.deprecate_large_types is None: + self.deprecate_large_types = ( + self.server_version_info >= MS_2012_VERSION + ) + + self._supports_offset_fetch = ( + self.server_version_info and self.server_version_info[0] >= 11 + ) + + def _setup_supports_nvarchar_max(self, connection): + try: + connection.scalar( + sql.text("SELECT CAST('test max support' AS NVARCHAR(max))") + ) + except exc.DBAPIError: + self._supports_nvarchar_max = False + else: + self._supports_nvarchar_max = True + + def _setup_supports_comments(self, connection): + if self._user_defined_supports_comments is not None: + return + + try: + connection.scalar( + sql.text( + "SELECT 1 FROM fn_listextendedproperty" + "(default, default, default, default, " + "default, default, default)" + ) + ) + except exc.DBAPIError: + self.supports_comments = False + else: + self.supports_comments = True + + def _get_default_schema_name(self, connection): + query = sql.text("SELECT schema_name()") + default_schema_name = connection.scalar(query) + if default_schema_name is not None: + # guard against the case where the default_schema_name is being + # fed back into a table reflection function. + return quoted_name(default_schema_name, quote=True) + else: + return self.schema_name + + @_db_plus_owner + def has_table(self, connection, tablename, dbname, owner, schema, **kw): + self._ensure_has_table_connection(connection) + + return self._internal_has_table(connection, tablename, owner, **kw) + + @reflection.cache + @_db_plus_owner + def has_sequence( + self, connection, sequencename, dbname, owner, schema, **kw + ): + sequences = ischema.sequences + + s = sql.select(sequences.c.sequence_name).where( + sequences.c.sequence_name == sequencename + ) + + if owner: + s = s.where(sequences.c.sequence_schema == owner) + + c = connection.execute(s) + + return c.first() is not None + + @reflection.cache + @_db_plus_owner_listing + def get_sequence_names(self, connection, dbname, owner, schema, **kw): + sequences = ischema.sequences + + s = sql.select(sequences.c.sequence_name) + if owner: + s = s.where(sequences.c.sequence_schema == owner) + + c = connection.execute(s) + + return [row[0] for row in c] + + @reflection.cache + def get_schema_names(self, connection, **kw): + s = sql.select(ischema.schemata.c.schema_name).order_by( + ischema.schemata.c.schema_name + ) + schema_names = [r[0] for r in connection.execute(s)] + return schema_names + + @reflection.cache + @_db_plus_owner_listing + def get_table_names(self, connection, dbname, owner, schema, **kw): + tables = ischema.tables + s = ( + sql.select(tables.c.table_name) + .where( + sql.and_( + tables.c.table_schema == owner, + tables.c.table_type == "BASE TABLE", + ) + ) + .order_by(tables.c.table_name) + ) + table_names = [r[0] for r in connection.execute(s)] + return table_names + + @reflection.cache + @_db_plus_owner_listing + def get_view_names(self, connection, dbname, owner, schema, **kw): + tables = ischema.tables + s = ( + sql.select(tables.c.table_name) + .where( + sql.and_( + tables.c.table_schema == owner, + tables.c.table_type == "VIEW", + ) + ) + .order_by(tables.c.table_name) + ) + view_names = [r[0] for r in connection.execute(s)] + return view_names + + @reflection.cache + def _internal_has_table(self, connection, tablename, owner, **kw): + if tablename.startswith("#"): # temporary table + # mssql does not support temporary views + # SQL Error [4103] [S0001]: "#v": Temporary views are not allowed + return bool( + connection.scalar( + # U filters on user tables only. + text("SELECT object_id(:table_name, 'U')"), + {"table_name": f"tempdb.dbo.[{tablename}]"}, + ) + ) + else: + tables = ischema.tables + + s = sql.select(tables.c.table_name).where( + sql.and_( + sql.or_( + tables.c.table_type == "BASE TABLE", + tables.c.table_type == "VIEW", + ), + tables.c.table_name == tablename, + ) + ) + + if owner: + s = s.where(tables.c.table_schema == owner) + + c = connection.execute(s) + + return c.first() is not None + + def _default_or_error(self, connection, tablename, owner, method, **kw): + # TODO: try to avoid having to run a separate query here + if self._internal_has_table(connection, tablename, owner, **kw): + return method() + else: + raise exc.NoSuchTableError(f"{owner}.{tablename}") + + @reflection.cache + @_db_plus_owner + def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): + filter_definition = ( + "ind.filter_definition" + if self.server_version_info >= MS_2008_VERSION + else "NULL as filter_definition" + ) + rp = connection.execution_options(future_result=True).execute( + sql.text( + f""" +select + ind.index_id, + ind.is_unique, + ind.name, + ind.type, + {filter_definition} +from + sys.indexes as ind +join sys.tables as tab on + ind.object_id = tab.object_id +join sys.schemas as sch on + sch.schema_id = tab.schema_id +where + tab.name = :tabname + and sch.name = :schname + and ind.is_primary_key = 0 + and ind.type != 0 +order by + ind.name + """ + ) + .bindparams( + sql.bindparam("tabname", tablename, ischema.CoerceUnicode()), + sql.bindparam("schname", owner, ischema.CoerceUnicode()), + ) + .columns(name=sqltypes.Unicode()) + ) + indexes = {} + for row in rp.mappings(): + indexes[row["index_id"]] = current = { + "name": row["name"], + "unique": row["is_unique"] == 1, + "column_names": [], + "include_columns": [], + "dialect_options": {}, + } + + do = current["dialect_options"] + index_type = row["type"] + if index_type in {1, 2}: + do["mssql_clustered"] = index_type == 1 + if index_type in {5, 6}: + do["mssql_clustered"] = index_type == 5 + do["mssql_columnstore"] = True + if row["filter_definition"] is not None: + do["mssql_where"] = row["filter_definition"] + + rp = connection.execution_options(future_result=True).execute( + sql.text( + """ +select + ind_col.index_id, + col.name, + ind_col.is_included_column +from + sys.columns as col +join sys.tables as tab on + tab.object_id = col.object_id +join sys.index_columns as ind_col on + ind_col.column_id = col.column_id + and ind_col.object_id = tab.object_id +join sys.schemas as sch on + sch.schema_id = tab.schema_id +where + tab.name = :tabname + and sch.name = :schname + """ + ) + .bindparams( + sql.bindparam("tabname", tablename, ischema.CoerceUnicode()), + sql.bindparam("schname", owner, ischema.CoerceUnicode()), + ) + .columns(name=sqltypes.Unicode()) + ) + for row in rp.mappings(): + if row["index_id"] not in indexes: + continue + index_def = indexes[row["index_id"]] + is_colstore = index_def["dialect_options"].get("mssql_columnstore") + is_clustered = index_def["dialect_options"].get("mssql_clustered") + if not (is_colstore and is_clustered): + # a clustered columnstore index includes all columns but does + # not want them in the index definition + if row["is_included_column"] and not is_colstore: + # a noncludsted columnstore index reports that includes + # columns but requires that are listed as normal columns + index_def["include_columns"].append(row["name"]) + else: + index_def["column_names"].append(row["name"]) + for index_info in indexes.values(): + # NOTE: "root level" include_columns is legacy, now part of + # dialect_options (issue #7382) + index_info["dialect_options"]["mssql_include"] = index_info[ + "include_columns" + ] + + if indexes: + return list(indexes.values()) + else: + return self._default_or_error( + connection, tablename, owner, ReflectionDefaults.indexes, **kw + ) + + @reflection.cache + @_db_plus_owner + def get_view_definition( + self, connection, viewname, dbname, owner, schema, **kw + ): + view_def = connection.execute( + sql.text( + "select mod.definition " + "from sys.sql_modules as mod " + "join sys.views as views on mod.object_id = views.object_id " + "join sys.schemas as sch on views.schema_id = sch.schema_id " + "where views.name=:viewname and sch.name=:schname" + ).bindparams( + sql.bindparam("viewname", viewname, ischema.CoerceUnicode()), + sql.bindparam("schname", owner, ischema.CoerceUnicode()), + ) + ).scalar() + if view_def: + return view_def + else: + raise exc.NoSuchTableError(f"{owner}.{viewname}") + + @reflection.cache + def get_table_comment(self, connection, table_name, schema=None, **kw): + if not self.supports_comments: + raise NotImplementedError( + "Can't get table comments on current SQL Server version in use" + ) + + schema_name = schema if schema else self.default_schema_name + COMMENT_SQL = """ + SELECT cast(com.value as nvarchar(max)) + FROM fn_listextendedproperty('MS_Description', + 'schema', :schema, 'table', :table, NULL, NULL + ) as com; + """ + + comment = connection.execute( + sql.text(COMMENT_SQL).bindparams( + sql.bindparam("schema", schema_name, ischema.CoerceUnicode()), + sql.bindparam("table", table_name, ischema.CoerceUnicode()), + ) + ).scalar() + if comment: + return {"text": comment} + else: + return self._default_or_error( + connection, + table_name, + None, + ReflectionDefaults.table_comment, + **kw, + ) + + def _temp_table_name_like_pattern(self, tablename): + # LIKE uses '%' to match zero or more characters and '_' to match any + # single character. We want to match literal underscores, so T-SQL + # requires that we enclose them in square brackets. + return tablename + ( + ("[_][_][_]%") if not tablename.startswith("##") else "" + ) + + def _get_internal_temp_table_name(self, connection, tablename): + # it's likely that schema is always "dbo", but since we can + # get it here, let's get it. + # see https://stackoverflow.com/questions/8311959/ + # specifying-schema-for-temporary-tables + + try: + return connection.execute( + sql.text( + "select table_schema, table_name " + "from tempdb.information_schema.tables " + "where table_name like :p1" + ), + {"p1": self._temp_table_name_like_pattern(tablename)}, + ).one() + except exc.MultipleResultsFound as me: + raise exc.UnreflectableTableError( + "Found more than one temporary table named '%s' in tempdb " + "at this time. Cannot reliably resolve that name to its " + "internal table name." % tablename + ) from me + except exc.NoResultFound as ne: + raise exc.NoSuchTableError( + "Unable to find a temporary table named '%s' in tempdb." + % tablename + ) from ne + + @reflection.cache + @_db_plus_owner + def get_columns(self, connection, tablename, dbname, owner, schema, **kw): + is_temp_table = tablename.startswith("#") + if is_temp_table: + owner, tablename = self._get_internal_temp_table_name( + connection, tablename + ) + + columns = ischema.mssql_temp_table_columns + else: + columns = ischema.columns + + computed_cols = ischema.computed_columns + identity_cols = ischema.identity_columns + if owner: + whereclause = sql.and_( + columns.c.table_name == tablename, + columns.c.table_schema == owner, + ) + full_name = columns.c.table_schema + "." + columns.c.table_name + else: + whereclause = columns.c.table_name == tablename + full_name = columns.c.table_name + + if self._supports_nvarchar_max: + computed_definition = computed_cols.c.definition + else: + # tds_version 4.2 does not support NVARCHAR(MAX) + computed_definition = sql.cast( + computed_cols.c.definition, NVARCHAR(4000) + ) + + object_id = func.object_id(full_name) + + s = ( + sql.select( + columns.c.column_name, + columns.c.data_type, + columns.c.is_nullable, + columns.c.character_maximum_length, + columns.c.numeric_precision, + columns.c.numeric_scale, + columns.c.column_default, + columns.c.collation_name, + computed_definition, + computed_cols.c.is_persisted, + identity_cols.c.is_identity, + identity_cols.c.seed_value, + identity_cols.c.increment_value, + ischema.extended_properties.c.value.label("comment"), + ) + .select_from(columns) + .outerjoin( + computed_cols, + onclause=sql.and_( + computed_cols.c.object_id == object_id, + computed_cols.c.name + == columns.c.column_name.collate("DATABASE_DEFAULT"), + ), + ) + .outerjoin( + identity_cols, + onclause=sql.and_( + identity_cols.c.object_id == object_id, + identity_cols.c.name + == columns.c.column_name.collate("DATABASE_DEFAULT"), + ), + ) + .outerjoin( + ischema.extended_properties, + onclause=sql.and_( + ischema.extended_properties.c["class"] == 1, + ischema.extended_properties.c.major_id == object_id, + ischema.extended_properties.c.minor_id + == columns.c.ordinal_position, + ischema.extended_properties.c.name == "MS_Description", + ), + ) + .where(whereclause) + .order_by(columns.c.ordinal_position) + ) + + c = connection.execution_options(future_result=True).execute(s) + + cols = [] + for row in c.mappings(): + name = row[columns.c.column_name] + type_ = row[columns.c.data_type] + nullable = row[columns.c.is_nullable] == "YES" + charlen = row[columns.c.character_maximum_length] + numericprec = row[columns.c.numeric_precision] + numericscale = row[columns.c.numeric_scale] + default = row[columns.c.column_default] + collation = row[columns.c.collation_name] + definition = row[computed_definition] + is_persisted = row[computed_cols.c.is_persisted] + is_identity = row[identity_cols.c.is_identity] + identity_start = row[identity_cols.c.seed_value] + identity_increment = row[identity_cols.c.increment_value] + comment = row[ischema.extended_properties.c.value] + + coltype = self.ischema_names.get(type_, None) + + kwargs = {} + if coltype in ( + MSString, + MSChar, + MSNVarchar, + MSNChar, + MSText, + MSNText, + MSBinary, + MSVarBinary, + sqltypes.LargeBinary, + ): + if charlen == -1: + charlen = None + kwargs["length"] = charlen + if collation: + kwargs["collation"] = collation + + if coltype is None: + util.warn( + "Did not recognize type '%s' of column '%s'" + % (type_, name) + ) + coltype = sqltypes.NULLTYPE + else: + if issubclass(coltype, sqltypes.Numeric): + kwargs["precision"] = numericprec + + if not issubclass(coltype, sqltypes.Float): + kwargs["scale"] = numericscale + + coltype = coltype(**kwargs) + cdict = { + "name": name, + "type": coltype, + "nullable": nullable, + "default": default, + "autoincrement": is_identity is not None, + "comment": comment, + } + + if definition is not None and is_persisted is not None: + cdict["computed"] = { + "sqltext": definition, + "persisted": is_persisted, + } + + if is_identity is not None: + # identity_start and identity_increment are Decimal or None + if identity_start is None or identity_increment is None: + cdict["identity"] = {} + else: + if isinstance(coltype, sqltypes.BigInteger): + start = int(identity_start) + increment = int(identity_increment) + elif isinstance(coltype, sqltypes.Integer): + start = int(identity_start) + increment = int(identity_increment) + else: + start = identity_start + increment = identity_increment + + cdict["identity"] = { + "start": start, + "increment": increment, + } + + cols.append(cdict) + + if cols: + return cols + else: + return self._default_or_error( + connection, tablename, owner, ReflectionDefaults.columns, **kw + ) + + @reflection.cache + @_db_plus_owner + def get_pk_constraint( + self, connection, tablename, dbname, owner, schema, **kw + ): + pkeys = [] + TC = ischema.constraints + C = ischema.key_constraints.alias("C") + + # Primary key constraints + s = ( + sql.select( + C.c.column_name, + TC.c.constraint_type, + C.c.constraint_name, + func.objectproperty( + func.object_id( + C.c.table_schema + "." + C.c.constraint_name + ), + "CnstIsClustKey", + ).label("is_clustered"), + ) + .where( + sql.and_( + TC.c.constraint_name == C.c.constraint_name, + TC.c.table_schema == C.c.table_schema, + C.c.table_name == tablename, + C.c.table_schema == owner, + ), + ) + .order_by(TC.c.constraint_name, C.c.ordinal_position) + ) + c = connection.execution_options(future_result=True).execute(s) + constraint_name = None + is_clustered = None + for row in c.mappings(): + if "PRIMARY" in row[TC.c.constraint_type.name]: + pkeys.append(row["COLUMN_NAME"]) + if constraint_name is None: + constraint_name = row[C.c.constraint_name.name] + if is_clustered is None: + is_clustered = row["is_clustered"] + if pkeys: + return { + "constrained_columns": pkeys, + "name": constraint_name, + "dialect_options": {"mssql_clustered": is_clustered}, + } + else: + return self._default_or_error( + connection, + tablename, + owner, + ReflectionDefaults.pk_constraint, + **kw, + ) + + @reflection.cache + @_db_plus_owner + def get_foreign_keys( + self, connection, tablename, dbname, owner, schema, **kw + ): + # Foreign key constraints + s = ( + text( + """\ +WITH fk_info AS ( + SELECT + ischema_ref_con.constraint_schema, + ischema_ref_con.constraint_name, + ischema_key_col.ordinal_position, + ischema_key_col.table_schema, + ischema_key_col.table_name, + ischema_ref_con.unique_constraint_schema, + ischema_ref_con.unique_constraint_name, + ischema_ref_con.match_option, + ischema_ref_con.update_rule, + ischema_ref_con.delete_rule, + ischema_key_col.column_name AS constrained_column + FROM + INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS ischema_ref_con + INNER JOIN + INFORMATION_SCHEMA.KEY_COLUMN_USAGE ischema_key_col ON + ischema_key_col.table_schema = ischema_ref_con.constraint_schema + AND ischema_key_col.constraint_name = + ischema_ref_con.constraint_name + WHERE ischema_key_col.table_name = :tablename + AND ischema_key_col.table_schema = :owner +), +constraint_info AS ( + SELECT + ischema_key_col.constraint_schema, + ischema_key_col.constraint_name, + ischema_key_col.ordinal_position, + ischema_key_col.table_schema, + ischema_key_col.table_name, + ischema_key_col.column_name + FROM + INFORMATION_SCHEMA.KEY_COLUMN_USAGE ischema_key_col +), +index_info AS ( + SELECT + sys.schemas.name AS index_schema, + sys.indexes.name AS index_name, + sys.index_columns.key_ordinal AS ordinal_position, + sys.schemas.name AS table_schema, + sys.objects.name AS table_name, + sys.columns.name AS column_name + FROM + sys.indexes + INNER JOIN + sys.objects ON + sys.objects.object_id = sys.indexes.object_id + INNER JOIN + sys.schemas ON + sys.schemas.schema_id = sys.objects.schema_id + INNER JOIN + sys.index_columns ON + sys.index_columns.object_id = sys.objects.object_id + AND sys.index_columns.index_id = sys.indexes.index_id + INNER JOIN + sys.columns ON + sys.columns.object_id = sys.indexes.object_id + AND sys.columns.column_id = sys.index_columns.column_id +) + SELECT + fk_info.constraint_schema, + fk_info.constraint_name, + fk_info.ordinal_position, + fk_info.constrained_column, + constraint_info.table_schema AS referred_table_schema, + constraint_info.table_name AS referred_table_name, + constraint_info.column_name AS referred_column, + fk_info.match_option, + fk_info.update_rule, + fk_info.delete_rule + FROM + fk_info INNER JOIN constraint_info ON + constraint_info.constraint_schema = + fk_info.unique_constraint_schema + AND constraint_info.constraint_name = + fk_info.unique_constraint_name + AND constraint_info.ordinal_position = fk_info.ordinal_position + UNION + SELECT + fk_info.constraint_schema, + fk_info.constraint_name, + fk_info.ordinal_position, + fk_info.constrained_column, + index_info.table_schema AS referred_table_schema, + index_info.table_name AS referred_table_name, + index_info.column_name AS referred_column, + fk_info.match_option, + fk_info.update_rule, + fk_info.delete_rule + FROM + fk_info INNER JOIN index_info ON + index_info.index_schema = fk_info.unique_constraint_schema + AND index_info.index_name = fk_info.unique_constraint_name + AND index_info.ordinal_position = fk_info.ordinal_position + + ORDER BY fk_info.constraint_schema, fk_info.constraint_name, + fk_info.ordinal_position +""" + ) + .bindparams( + sql.bindparam("tablename", tablename, ischema.CoerceUnicode()), + sql.bindparam("owner", owner, ischema.CoerceUnicode()), + ) + .columns( + constraint_schema=sqltypes.Unicode(), + constraint_name=sqltypes.Unicode(), + table_schema=sqltypes.Unicode(), + table_name=sqltypes.Unicode(), + constrained_column=sqltypes.Unicode(), + referred_table_schema=sqltypes.Unicode(), + referred_table_name=sqltypes.Unicode(), + referred_column=sqltypes.Unicode(), + ) + ) + + # group rows by constraint ID, to handle multi-column FKs + fkeys = [] + + def fkey_rec(): + return { + "name": None, + "constrained_columns": [], + "referred_schema": None, + "referred_table": None, + "referred_columns": [], + "options": {}, + } + + fkeys = util.defaultdict(fkey_rec) + + for r in connection.execute(s).all(): + ( + _, # constraint schema + rfknm, + _, # ordinal position + scol, + rschema, + rtbl, + rcol, + # TODO: we support match= for foreign keys so + # we can support this also, PG has match=FULL for example + # but this seems to not be a valid value for SQL Server + _, # match rule + fkuprule, + fkdelrule, + ) = r + + rec = fkeys[rfknm] + rec["name"] = rfknm + + if fkuprule != "NO ACTION": + rec["options"]["onupdate"] = fkuprule + + if fkdelrule != "NO ACTION": + rec["options"]["ondelete"] = fkdelrule + + if not rec["referred_table"]: + rec["referred_table"] = rtbl + if schema is not None or owner != rschema: + if dbname: + rschema = dbname + "." + rschema + rec["referred_schema"] = rschema + + local_cols, remote_cols = ( + rec["constrained_columns"], + rec["referred_columns"], + ) + + local_cols.append(scol) + remote_cols.append(rcol) + + if fkeys: + return list(fkeys.values()) + else: + return self._default_or_error( + connection, + tablename, + owner, + ReflectionDefaults.foreign_keys, + **kw, + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/information_schema.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/information_schema.py new file mode 100644 index 00000000..0c5f2372 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/information_schema.py @@ -0,0 +1,254 @@ +# dialects/mssql/information_schema.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from ... import cast +from ... import Column +from ... import MetaData +from ... import Table +from ...ext.compiler import compiles +from ...sql import expression +from ...types import Boolean +from ...types import Integer +from ...types import Numeric +from ...types import NVARCHAR +from ...types import String +from ...types import TypeDecorator +from ...types import Unicode + + +ischema = MetaData() + + +class CoerceUnicode(TypeDecorator): + impl = Unicode + cache_ok = True + + def bind_expression(self, bindvalue): + return _cast_on_2005(bindvalue) + + +class _cast_on_2005(expression.ColumnElement): + def __init__(self, bindvalue): + self.bindvalue = bindvalue + + +@compiles(_cast_on_2005) +def _compile(element, compiler, **kw): + from . import base + + if ( + compiler.dialect.server_version_info is None + or compiler.dialect.server_version_info < base.MS_2005_VERSION + ): + return compiler.process(element.bindvalue, **kw) + else: + return compiler.process(cast(element.bindvalue, Unicode), **kw) + + +schemata = Table( + "SCHEMATA", + ischema, + Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"), + Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"), + Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"), + schema="INFORMATION_SCHEMA", +) + +tables = Table( + "TABLES", + ischema, + Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("TABLE_TYPE", CoerceUnicode, key="table_type"), + schema="INFORMATION_SCHEMA", +) + +columns = Table( + "COLUMNS", + ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, key="column_name"), + Column("IS_NULLABLE", Integer, key="is_nullable"), + Column("DATA_TYPE", String, key="data_type"), + Column("ORDINAL_POSITION", Integer, key="ordinal_position"), + Column( + "CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length" + ), + Column("NUMERIC_PRECISION", Integer, key="numeric_precision"), + Column("NUMERIC_SCALE", Integer, key="numeric_scale"), + Column("COLUMN_DEFAULT", Integer, key="column_default"), + Column("COLLATION_NAME", String, key="collation_name"), + schema="INFORMATION_SCHEMA", +) + +mssql_temp_table_columns = Table( + "COLUMNS", + ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, key="column_name"), + Column("IS_NULLABLE", Integer, key="is_nullable"), + Column("DATA_TYPE", String, key="data_type"), + Column("ORDINAL_POSITION", Integer, key="ordinal_position"), + Column( + "CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length" + ), + Column("NUMERIC_PRECISION", Integer, key="numeric_precision"), + Column("NUMERIC_SCALE", Integer, key="numeric_scale"), + Column("COLUMN_DEFAULT", Integer, key="column_default"), + Column("COLLATION_NAME", String, key="collation_name"), + schema="tempdb.INFORMATION_SCHEMA", +) + +constraints = Table( + "TABLE_CONSTRAINTS", + ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), + Column("CONSTRAINT_TYPE", CoerceUnicode, key="constraint_type"), + schema="INFORMATION_SCHEMA", +) + +column_constraints = Table( + "CONSTRAINT_COLUMN_USAGE", + ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, key="column_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), + schema="INFORMATION_SCHEMA", +) + +key_constraints = Table( + "KEY_COLUMN_USAGE", + ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, key="column_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), + Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"), + Column("ORDINAL_POSITION", Integer, key="ordinal_position"), + schema="INFORMATION_SCHEMA", +) + +ref_constraints = Table( + "REFERENTIAL_CONSTRAINTS", + ischema, + Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"), + Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"), + Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), + # TODO: is CATLOG misspelled ? + Column( + "UNIQUE_CONSTRAINT_CATLOG", + CoerceUnicode, + key="unique_constraint_catalog", + ), + Column( + "UNIQUE_CONSTRAINT_SCHEMA", + CoerceUnicode, + key="unique_constraint_schema", + ), + Column( + "UNIQUE_CONSTRAINT_NAME", CoerceUnicode, key="unique_constraint_name" + ), + Column("MATCH_OPTION", String, key="match_option"), + Column("UPDATE_RULE", String, key="update_rule"), + Column("DELETE_RULE", String, key="delete_rule"), + schema="INFORMATION_SCHEMA", +) + +views = Table( + "VIEWS", + ischema, + Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"), + Column("CHECK_OPTION", String, key="check_option"), + Column("IS_UPDATABLE", String, key="is_updatable"), + schema="INFORMATION_SCHEMA", +) + +computed_columns = Table( + "computed_columns", + ischema, + Column("object_id", Integer), + Column("name", CoerceUnicode), + Column("is_computed", Boolean), + Column("is_persisted", Boolean), + Column("definition", CoerceUnicode), + schema="sys", +) + +sequences = Table( + "SEQUENCES", + ischema, + Column("SEQUENCE_CATALOG", CoerceUnicode, key="sequence_catalog"), + Column("SEQUENCE_SCHEMA", CoerceUnicode, key="sequence_schema"), + Column("SEQUENCE_NAME", CoerceUnicode, key="sequence_name"), + schema="INFORMATION_SCHEMA", +) + + +class NumericSqlVariant(TypeDecorator): + r"""This type casts sql_variant columns in the identity_columns view + to numeric. This is required because: + + * pyodbc does not support sql_variant + * pymssql under python 2 return the byte representation of the number, + int 1 is returned as "\x01\x00\x00\x00". On python 3 it returns the + correct value as string. + """ + + impl = Unicode + cache_ok = True + + def column_expression(self, colexpr): + return cast(colexpr, Numeric(38, 0)) + + +identity_columns = Table( + "identity_columns", + ischema, + Column("object_id", Integer), + Column("name", CoerceUnicode), + Column("is_identity", Boolean), + Column("seed_value", NumericSqlVariant), + Column("increment_value", NumericSqlVariant), + Column("last_value", NumericSqlVariant), + Column("is_not_for_replication", Boolean), + schema="sys", +) + + +class NVarcharSqlVariant(TypeDecorator): + """This type casts sql_variant columns in the extended_properties view + to nvarchar. This is required because pyodbc does not support sql_variant + """ + + impl = Unicode + cache_ok = True + + def column_expression(self, colexpr): + return cast(colexpr, NVARCHAR) + + +extended_properties = Table( + "extended_properties", + ischema, + Column("class", Integer), # TINYINT + Column("class_desc", CoerceUnicode), + Column("major_id", Integer), + Column("minor_id", Integer), + Column("name", CoerceUnicode), + Column("value", NVarcharSqlVariant), + schema="sys", +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/json.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/json.py new file mode 100644 index 00000000..18bea09d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/json.py @@ -0,0 +1,133 @@ +# dialects/mssql/json.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from ... import types as sqltypes + +# technically, all the dialect-specific datatypes that don't have any special +# behaviors would be private with names like _MSJson. However, we haven't been +# doing this for mysql.JSON or sqlite.JSON which both have JSON / JSONIndexType +# / JSONPathType in their json.py files, so keep consistent with that +# sub-convention for now. A future change can update them all to be +# package-private at once. + + +class JSON(sqltypes.JSON): + """MSSQL JSON type. + + MSSQL supports JSON-formatted data as of SQL Server 2016. + + The :class:`_mssql.JSON` datatype at the DDL level will represent the + datatype as ``NVARCHAR(max)``, but provides for JSON-level comparison + functions as well as Python coercion behavior. + + :class:`_mssql.JSON` is used automatically whenever the base + :class:`_types.JSON` datatype is used against a SQL Server backend. + + .. seealso:: + + :class:`_types.JSON` - main documentation for the generic + cross-platform JSON datatype. + + The :class:`_mssql.JSON` type supports persistence of JSON values + as well as the core index operations provided by :class:`_types.JSON` + datatype, by adapting the operations to render the ``JSON_VALUE`` + or ``JSON_QUERY`` functions at the database level. + + The SQL Server :class:`_mssql.JSON` type necessarily makes use of the + ``JSON_QUERY`` and ``JSON_VALUE`` functions when querying for elements + of a JSON object. These two functions have a major restriction in that + they are **mutually exclusive** based on the type of object to be returned. + The ``JSON_QUERY`` function **only** returns a JSON dictionary or list, + but not an individual string, numeric, or boolean element; the + ``JSON_VALUE`` function **only** returns an individual string, numeric, + or boolean element. **both functions either return NULL or raise + an error if they are not used against the correct expected value**. + + To handle this awkward requirement, indexed access rules are as follows: + + 1. When extracting a sub element from a JSON that is itself a JSON + dictionary or list, the :meth:`_types.JSON.Comparator.as_json` accessor + should be used:: + + stmt = select( + data_table.c.data["some key"].as_json() + ).where( + data_table.c.data["some key"].as_json() == {"sub": "structure"} + ) + + 2. When extracting a sub element from a JSON that is a plain boolean, + string, integer, or float, use the appropriate method among + :meth:`_types.JSON.Comparator.as_boolean`, + :meth:`_types.JSON.Comparator.as_string`, + :meth:`_types.JSON.Comparator.as_integer`, + :meth:`_types.JSON.Comparator.as_float`:: + + stmt = select( + data_table.c.data["some key"].as_string() + ).where( + data_table.c.data["some key"].as_string() == "some string" + ) + + .. versionadded:: 1.4 + + + """ + + # note there was a result processor here that was looking for "number", + # but none of the tests seem to exercise it. + + +# Note: these objects currently match exactly those of MySQL, however since +# these are not generalizable to all JSON implementations, remain separately +# implemented for each dialect. +class _FormatTypeMixin: + def _format_value(self, value): + raise NotImplementedError() + + def bind_processor(self, dialect): + super_proc = self.string_bind_processor(dialect) + + def process(value): + value = self._format_value(value) + if super_proc: + value = super_proc(value) + return value + + return process + + def literal_processor(self, dialect): + super_proc = self.string_literal_processor(dialect) + + def process(value): + value = self._format_value(value) + if super_proc: + value = super_proc(value) + return value + + return process + + +class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): + def _format_value(self, value): + if isinstance(value, int): + value = "$[%s]" % value + else: + value = '$."%s"' % value + return value + + +class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): + def _format_value(self, value): + return "$%s" % ( + "".join( + [ + "[%s]" % elem if isinstance(elem, int) else '."%s"' % elem + for elem in value + ] + ) + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/provision.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/provision.py new file mode 100644 index 00000000..1c684b1d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/provision.py @@ -0,0 +1,162 @@ +# dialects/mssql/provision.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from sqlalchemy import inspect +from sqlalchemy import Integer +from ... import create_engine +from ... import exc +from ...schema import Column +from ...schema import DropConstraint +from ...schema import ForeignKeyConstraint +from ...schema import MetaData +from ...schema import Table +from ...testing.provision import create_db +from ...testing.provision import drop_all_schema_objects_pre_tables +from ...testing.provision import drop_db +from ...testing.provision import generate_driver_url +from ...testing.provision import get_temp_table_name +from ...testing.provision import log +from ...testing.provision import normalize_sequence +from ...testing.provision import post_configure_engine +from ...testing.provision import run_reap_dbs +from ...testing.provision import temp_table_keyword_args + + +@post_configure_engine.for_db("mssql") +def post_configure_engine(url, engine, follower_ident): + if engine.driver == "pyodbc": + engine.dialect.dbapi.pooling = False + + +@generate_driver_url.for_db("mssql") +def generate_driver_url(url, driver, query_str): + backend = url.get_backend_name() + + new_url = url.set(drivername="%s+%s" % (backend, driver)) + + if driver not in ("pyodbc", "aioodbc"): + new_url = new_url.set(query="") + + if driver == "aioodbc": + new_url = new_url.update_query_dict({"MARS_Connection": "Yes"}) + + if query_str: + new_url = new_url.update_query_string(query_str) + + try: + new_url.get_dialect() + except exc.NoSuchModuleError: + return None + else: + return new_url + + +@create_db.for_db("mssql") +def _mssql_create_db(cfg, eng, ident): + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + conn.exec_driver_sql("create database %s" % ident) + conn.exec_driver_sql( + "ALTER DATABASE %s SET ALLOW_SNAPSHOT_ISOLATION ON" % ident + ) + conn.exec_driver_sql( + "ALTER DATABASE %s SET READ_COMMITTED_SNAPSHOT ON" % ident + ) + conn.exec_driver_sql("use %s" % ident) + conn.exec_driver_sql("create schema test_schema") + conn.exec_driver_sql("create schema test_schema_2") + + +@drop_db.for_db("mssql") +def _mssql_drop_db(cfg, eng, ident): + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + _mssql_drop_ignore(conn, ident) + + +def _mssql_drop_ignore(conn, ident): + try: + # typically when this happens, we can't KILL the session anyway, + # so let the cleanup process drop the DBs + # for row in conn.exec_driver_sql( + # "select session_id from sys.dm_exec_sessions " + # "where database_id=db_id('%s')" % ident): + # log.info("killing SQL server session %s", row['session_id']) + # conn.exec_driver_sql("kill %s" % row['session_id']) + conn.exec_driver_sql("drop database %s" % ident) + log.info("Reaped db: %s", ident) + return True + except exc.DatabaseError as err: + log.warning("couldn't drop db: %s", err) + return False + + +@run_reap_dbs.for_db("mssql") +def _reap_mssql_dbs(url, idents): + log.info("db reaper connecting to %r", url) + eng = create_engine(url) + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + log.info("identifiers in file: %s", ", ".join(idents)) + + to_reap = conn.exec_driver_sql( + "select d.name from sys.databases as d where name " + "like 'TEST_%' and not exists (select session_id " + "from sys.dm_exec_sessions " + "where database_id=d.database_id)" + ) + all_names = {dbname.lower() for (dbname,) in to_reap} + to_drop = set() + for name in all_names: + if name in idents: + to_drop.add(name) + + dropped = total = 0 + for total, dbname in enumerate(to_drop, 1): + if _mssql_drop_ignore(conn, dbname): + dropped += 1 + log.info( + "Dropped %d out of %d stale databases detected", dropped, total + ) + + +@temp_table_keyword_args.for_db("mssql") +def _mssql_temp_table_keyword_args(cfg, eng): + return {} + + +@get_temp_table_name.for_db("mssql") +def _mssql_get_temp_table_name(cfg, eng, base_name): + return "##" + base_name + + +@drop_all_schema_objects_pre_tables.for_db("mssql") +def drop_all_schema_objects_pre_tables(cfg, eng): + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + inspector = inspect(conn) + for schema in (None, "dbo", cfg.test_schema, cfg.test_schema_2): + for tname in inspector.get_table_names(schema=schema): + tb = Table( + tname, + MetaData(), + Column("x", Integer), + Column("y", Integer), + schema=schema, + ) + for fk in inspect(conn).get_foreign_keys(tname, schema=schema): + conn.execute( + DropConstraint( + ForeignKeyConstraint( + [tb.c.x], [tb.c.y], name=fk["name"] + ) + ) + ) + + +@normalize_sequence.for_db("mssql") +def normalize_sequence(cfg, sequence): + if sequence.start is None: + sequence.start = 1 + return sequence diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/pymssql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/pymssql.py new file mode 100644 index 00000000..c4207987 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/pymssql.py @@ -0,0 +1,126 @@ +# dialects/mssql/pymssql.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +""" +.. dialect:: mssql+pymssql + :name: pymssql + :dbapi: pymssql + :connectstring: mssql+pymssql://:@/?charset=utf8 + +pymssql is a Python module that provides a Python DBAPI interface around +`FreeTDS `_. + +.. versionchanged:: 2.0.5 + + pymssql was restored to SQLAlchemy's continuous integration testing + + +""" # noqa +import re + +from .base import MSDialect +from .base import MSIdentifierPreparer +from ... import types as sqltypes +from ... import util +from ...engine import processors + + +class _MSNumeric_pymssql(sqltypes.Numeric): + def result_processor(self, dialect, type_): + if not self.asdecimal: + return processors.to_float + else: + return sqltypes.Numeric.result_processor(self, dialect, type_) + + +class MSIdentifierPreparer_pymssql(MSIdentifierPreparer): + def __init__(self, dialect): + super().__init__(dialect) + # pymssql has the very unusual behavior that it uses pyformat + # yet does not require that percent signs be doubled + self._double_percents = False + + +class MSDialect_pymssql(MSDialect): + supports_statement_cache = True + supports_native_decimal = True + supports_native_uuid = True + driver = "pymssql" + + preparer = MSIdentifierPreparer_pymssql + + colspecs = util.update_copy( + MSDialect.colspecs, + {sqltypes.Numeric: _MSNumeric_pymssql, sqltypes.Float: sqltypes.Float}, + ) + + @classmethod + def import_dbapi(cls): + module = __import__("pymssql") + # pymmsql < 2.1.1 doesn't have a Binary method. we use string + client_ver = tuple(int(x) for x in module.__version__.split(".")) + if client_ver < (2, 1, 1): + # TODO: monkeypatching here is less than ideal + module.Binary = lambda x: x if hasattr(x, "decode") else str(x) + + if client_ver < (1,): + util.warn( + "The pymssql dialect expects at least " + "the 1.0 series of the pymssql DBAPI." + ) + return module + + def _get_server_version_info(self, connection): + vers = connection.exec_driver_sql("select @@version").scalar() + m = re.match(r"Microsoft .*? - (\d+)\.(\d+)\.(\d+)\.(\d+)", vers) + if m: + return tuple(int(x) for x in m.group(1, 2, 3, 4)) + else: + return None + + def create_connect_args(self, url): + opts = url.translate_connect_args(username="user") + opts.update(url.query) + port = opts.pop("port", None) + if port and "host" in opts: + opts["host"] = "%s:%s" % (opts["host"], port) + return ([], opts) + + def is_disconnect(self, e, connection, cursor): + for msg in ( + "Adaptive Server connection timed out", + "Net-Lib error during Connection reset by peer", + "message 20003", # connection timeout + "Error 10054", + "Not connected to any MS SQL server", + "Connection is closed", + "message 20006", # Write to the server failed + "message 20017", # Unexpected EOF from the server + "message 20047", # DBPROCESS is dead or not enabled + "The server failed to resume the transaction", + ): + if msg in str(e): + return True + else: + return False + + def get_isolation_level_values(self, dbapi_connection): + return super().get_isolation_level_values(dbapi_connection) + [ + "AUTOCOMMIT" + ] + + def set_isolation_level(self, dbapi_connection, level): + if level == "AUTOCOMMIT": + dbapi_connection.autocommit(True) + else: + dbapi_connection.autocommit(False) + super().set_isolation_level(dbapi_connection, level) + + +dialect = MSDialect_pymssql diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/pyodbc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/pyodbc.py new file mode 100644 index 00000000..76ea046d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mssql/pyodbc.py @@ -0,0 +1,745 @@ +# dialects/mssql/pyodbc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: mssql+pyodbc + :name: PyODBC + :dbapi: pyodbc + :connectstring: mssql+pyodbc://:@ + :url: https://pypi.org/project/pyodbc/ + +Connecting to PyODBC +-------------------- + +The URL here is to be translated to PyODBC connection strings, as +detailed in `ConnectionStrings `_. + +DSN Connections +^^^^^^^^^^^^^^^ + +A DSN connection in ODBC means that a pre-existing ODBC datasource is +configured on the client machine. The application then specifies the name +of this datasource, which encompasses details such as the specific ODBC driver +in use as well as the network address of the database. Assuming a datasource +is configured on the client, a basic DSN-based connection looks like:: + + engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn") + +Which above, will pass the following connection string to PyODBC:: + + DSN=some_dsn;UID=scott;PWD=tiger + +If the username and password are omitted, the DSN form will also add +the ``Trusted_Connection=yes`` directive to the ODBC string. + +Hostname Connections +^^^^^^^^^^^^^^^^^^^^ + +Hostname-based connections are also supported by pyodbc. These are often +easier to use than a DSN and have the additional advantage that the specific +database name to connect towards may be specified locally in the URL, rather +than it being fixed as part of a datasource configuration. + +When using a hostname connection, the driver name must also be specified in the +query parameters of the URL. As these names usually have spaces in them, the +name must be URL encoded which means using plus signs for spaces:: + + engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server") + +The ``driver`` keyword is significant to the pyodbc dialect and must be +specified in lowercase. + +Any other names passed in the query string are passed through in the pyodbc +connect string, such as ``authentication``, ``TrustServerCertificate``, etc. +Multiple keyword arguments must be separated by an ampersand (``&``); these +will be translated to semicolons when the pyodbc connect string is generated +internally:: + + e = create_engine( + "mssql+pyodbc://scott:tiger@mssql2017:1433/test?" + "driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes" + "&authentication=ActiveDirectoryIntegrated" + ) + +The equivalent URL can be constructed using :class:`_sa.engine.URL`:: + + from sqlalchemy.engine import URL + connection_url = URL.create( + "mssql+pyodbc", + username="scott", + password="tiger", + host="mssql2017", + port=1433, + database="test", + query={ + "driver": "ODBC Driver 18 for SQL Server", + "TrustServerCertificate": "yes", + "authentication": "ActiveDirectoryIntegrated", + }, + ) + + +Pass through exact Pyodbc string +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A PyODBC connection string can also be sent in pyodbc's format directly, as +specified in `the PyODBC documentation +`_, +using the parameter ``odbc_connect``. A :class:`_sa.engine.URL` object +can help make this easier:: + + from sqlalchemy.engine import URL + connection_string = "DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password" + connection_url = URL.create("mssql+pyodbc", query={"odbc_connect": connection_string}) + + engine = create_engine(connection_url) + +.. _mssql_pyodbc_access_tokens: + +Connecting to databases with access tokens +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Some database servers are set up to only accept access tokens for login. For +example, SQL Server allows the use of Azure Active Directory tokens to connect +to databases. This requires creating a credential object using the +``azure-identity`` library. More information about the authentication step can be +found in `Microsoft's documentation +`_. + +After getting an engine, the credentials need to be sent to ``pyodbc.connect`` +each time a connection is requested. One way to do this is to set up an event +listener on the engine that adds the credential token to the dialect's connect +call. This is discussed more generally in :ref:`engines_dynamic_tokens`. For +SQL Server in particular, this is passed as an ODBC connection attribute with +a data structure `described by Microsoft +`_. + +The following code snippet will create an engine that connects to an Azure SQL +database using Azure credentials:: + + import struct + from sqlalchemy import create_engine, event + from sqlalchemy.engine.url import URL + from azure import identity + + SQL_COPT_SS_ACCESS_TOKEN = 1256 # Connection option for access tokens, as defined in msodbcsql.h + TOKEN_URL = "https://database.windows.net/" # The token URL for any Azure SQL database + + connection_string = "mssql+pyodbc://@my-server.database.windows.net/myDb?driver=ODBC+Driver+17+for+SQL+Server" + + engine = create_engine(connection_string) + + azure_credentials = identity.DefaultAzureCredential() + + @event.listens_for(engine, "do_connect") + def provide_token(dialect, conn_rec, cargs, cparams): + # remove the "Trusted_Connection" parameter that SQLAlchemy adds + cargs[0] = cargs[0].replace(";Trusted_Connection=Yes", "") + + # create token credential + raw_token = azure_credentials.get_token(TOKEN_URL).token.encode("utf-16-le") + token_struct = struct.pack(f"`_, + stating that a connection string when using an access token must not contain + ``UID``, ``PWD``, ``Authentication`` or ``Trusted_Connection`` parameters. + +.. _azure_synapse_ignore_no_transaction_on_rollback: + +Avoiding transaction-related exceptions on Azure Synapse Analytics +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Azure Synapse Analytics has a significant difference in its transaction +handling compared to plain SQL Server; in some cases an error within a Synapse +transaction can cause it to be arbitrarily terminated on the server side, which +then causes the DBAPI ``.rollback()`` method (as well as ``.commit()``) to +fail. The issue prevents the usual DBAPI contract of allowing ``.rollback()`` +to pass silently if no transaction is present as the driver does not expect +this condition. The symptom of this failure is an exception with a message +resembling 'No corresponding transaction found. (111214)' when attempting to +emit a ``.rollback()`` after an operation had a failure of some kind. + +This specific case can be handled by passing ``ignore_no_transaction_on_rollback=True`` to +the SQL Server dialect via the :func:`_sa.create_engine` function as follows:: + + engine = create_engine(connection_url, ignore_no_transaction_on_rollback=True) + +Using the above parameter, the dialect will catch ``ProgrammingError`` +exceptions raised during ``connection.rollback()`` and emit a warning +if the error message contains code ``111214``, however will not raise +an exception. + +.. versionadded:: 1.4.40 Added the + ``ignore_no_transaction_on_rollback=True`` parameter. + +Enable autocommit for Azure SQL Data Warehouse (DW) connections +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Azure SQL Data Warehouse does not support transactions, +and that can cause problems with SQLAlchemy's "autobegin" (and implicit +commit/rollback) behavior. We can avoid these problems by enabling autocommit +at both the pyodbc and engine levels:: + + connection_url = sa.engine.URL.create( + "mssql+pyodbc", + username="scott", + password="tiger", + host="dw.azure.example.com", + database="mydb", + query={ + "driver": "ODBC Driver 17 for SQL Server", + "autocommit": "True", + }, + ) + + engine = create_engine(connection_url).execution_options( + isolation_level="AUTOCOMMIT" + ) + +Avoiding sending large string parameters as TEXT/NTEXT +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default, for historical reasons, Microsoft's ODBC drivers for SQL Server +send long string parameters (greater than 4000 SBCS characters or 2000 Unicode +characters) as TEXT/NTEXT values. TEXT and NTEXT have been deprecated for many +years and are starting to cause compatibility issues with newer versions of +SQL_Server/Azure. For example, see `this +issue `_. + +Starting with ODBC Driver 18 for SQL Server we can override the legacy +behavior and pass long strings as varchar(max)/nvarchar(max) using the +``LongAsMax=Yes`` connection string parameter:: + + connection_url = sa.engine.URL.create( + "mssql+pyodbc", + username="scott", + password="tiger", + host="mssqlserver.example.com", + database="mydb", + query={ + "driver": "ODBC Driver 18 for SQL Server", + "LongAsMax": "Yes", + }, + ) + + +Pyodbc Pooling / connection close behavior +------------------------------------------ + +PyODBC uses internal `pooling +`_ by +default, which means connections will be longer lived than they are within +SQLAlchemy itself. As SQLAlchemy has its own pooling behavior, it is often +preferable to disable this behavior. This behavior can only be disabled +globally at the PyODBC module level, **before** any connections are made:: + + import pyodbc + + pyodbc.pooling = False + + # don't use the engine before pooling is set to False + engine = create_engine("mssql+pyodbc://user:pass@dsn") + +If this variable is left at its default value of ``True``, **the application +will continue to maintain active database connections**, even when the +SQLAlchemy engine itself fully discards a connection or if the engine is +disposed. + +.. seealso:: + + `pooling `_ - + in the PyODBC documentation. + +Driver / Unicode Support +------------------------- + +PyODBC works best with Microsoft ODBC drivers, particularly in the area +of Unicode support on both Python 2 and Python 3. + +Using the FreeTDS ODBC drivers on Linux or OSX with PyODBC is **not** +recommended; there have been historically many Unicode-related issues +in this area, including before Microsoft offered ODBC drivers for Linux +and OSX. Now that Microsoft offers drivers for all platforms, for +PyODBC support these are recommended. FreeTDS remains relevant for +non-ODBC drivers such as pymssql where it works very well. + + +Rowcount Support +---------------- + +Previous limitations with the SQLAlchemy ORM's "versioned rows" feature with +Pyodbc have been resolved as of SQLAlchemy 2.0.5. See the notes at +:ref:`mssql_rowcount_versioning`. + +.. _mssql_pyodbc_fastexecutemany: + +Fast Executemany Mode +--------------------- + +The PyODBC driver includes support for a "fast executemany" mode of execution +which greatly reduces round trips for a DBAPI ``executemany()`` call when using +Microsoft ODBC drivers, for **limited size batches that fit in memory**. The +feature is enabled by setting the attribute ``.fast_executemany`` on the DBAPI +cursor when an executemany call is to be used. The SQLAlchemy PyODBC SQL +Server dialect supports this parameter by passing the +``fast_executemany`` parameter to +:func:`_sa.create_engine` , when using the **Microsoft ODBC driver only**:: + + engine = create_engine( + "mssql+pyodbc://scott:tiger@mssql2017:1433/test?driver=ODBC+Driver+17+for+SQL+Server", + fast_executemany=True) + +.. versionchanged:: 2.0.9 - the ``fast_executemany`` parameter now has its + intended effect of this PyODBC feature taking effect for all INSERT + statements that are executed with multiple parameter sets, which don't + include RETURNING. Previously, SQLAlchemy 2.0's :term:`insertmanyvalues` + feature would cause ``fast_executemany`` to not be used in most cases + even if specified. + +.. versionadded:: 1.3 + +.. seealso:: + + `fast executemany `_ + - on github + +.. _mssql_pyodbc_setinputsizes: + +Setinputsizes Support +----------------------- + +As of version 2.0, the pyodbc ``cursor.setinputsizes()`` method is used for +all statement executions, except for ``cursor.executemany()`` calls when +fast_executemany=True where it is not supported (assuming +:ref:`insertmanyvalues ` is kept enabled, +"fastexecutemany" will not take place for INSERT statements in any case). + +The use of ``cursor.setinputsizes()`` can be disabled by passing +``use_setinputsizes=False`` to :func:`_sa.create_engine`. + +When ``use_setinputsizes`` is left at its default of ``True``, the +specific per-type symbols passed to ``cursor.setinputsizes()`` can be +programmatically customized using the :meth:`.DialectEvents.do_setinputsizes` +hook. See that method for usage examples. + +.. versionchanged:: 2.0 The mssql+pyodbc dialect now defaults to using + ``use_setinputsizes=True`` for all statement executions with the exception of + cursor.executemany() calls when fast_executemany=True. The behavior can + be turned off by passing ``use_setinputsizes=False`` to + :func:`_sa.create_engine`. + +""" # noqa + + +import datetime +import decimal +import re +import struct + +from .base import _MSDateTime +from .base import _MSUnicode +from .base import _MSUnicodeText +from .base import BINARY +from .base import DATETIMEOFFSET +from .base import MSDialect +from .base import MSExecutionContext +from .base import VARBINARY +from .json import JSON as _MSJson +from .json import JSONIndexType as _MSJsonIndexType +from .json import JSONPathType as _MSJsonPathType +from ... import exc +from ... import types as sqltypes +from ... import util +from ...connectors.pyodbc import PyODBCConnector +from ...engine import cursor as _cursor + + +class _ms_numeric_pyodbc: + """Turns Decimals with adjusted() < 0 or > 7 into strings. + + The routines here are needed for older pyodbc versions + as well as current mxODBC versions. + + """ + + def bind_processor(self, dialect): + super_process = super().bind_processor(dialect) + + if not dialect._need_decimal_fix: + return super_process + + def process(value): + if self.asdecimal and isinstance(value, decimal.Decimal): + adjusted = value.adjusted() + if adjusted < 0: + return self._small_dec_to_string(value) + elif adjusted > 7: + return self._large_dec_to_string(value) + + if super_process: + return super_process(value) + else: + return value + + return process + + # these routines needed for older versions of pyodbc. + # as of 2.1.8 this logic is integrated. + + def _small_dec_to_string(self, value): + return "%s0.%s%s" % ( + (value < 0 and "-" or ""), + "0" * (abs(value.adjusted()) - 1), + "".join([str(nint) for nint in value.as_tuple()[1]]), + ) + + def _large_dec_to_string(self, value): + _int = value.as_tuple()[1] + if "E" in str(value): + result = "%s%s%s" % ( + (value < 0 and "-" or ""), + "".join([str(s) for s in _int]), + "0" * (value.adjusted() - (len(_int) - 1)), + ) + else: + if (len(_int) - 1) > value.adjusted(): + result = "%s%s.%s" % ( + (value < 0 and "-" or ""), + "".join([str(s) for s in _int][0 : value.adjusted() + 1]), + "".join([str(s) for s in _int][value.adjusted() + 1 :]), + ) + else: + result = "%s%s" % ( + (value < 0 and "-" or ""), + "".join([str(s) for s in _int][0 : value.adjusted() + 1]), + ) + return result + + +class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric): + pass + + +class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float): + pass + + +class _ms_binary_pyodbc: + """Wraps binary values in dialect-specific Binary wrapper. + If the value is null, return a pyodbc-specific BinaryNull + object to prevent pyODBC [and FreeTDS] from defaulting binary + NULL types to SQLWCHAR and causing implicit conversion errors. + """ + + def bind_processor(self, dialect): + if dialect.dbapi is None: + return None + + DBAPIBinary = dialect.dbapi.Binary + + def process(value): + if value is not None: + return DBAPIBinary(value) + else: + # pyodbc-specific + return dialect.dbapi.BinaryNull + + return process + + +class _ODBCDateTimeBindProcessor: + """Add bind processors to handle datetimeoffset behaviors""" + + has_tz = False + + def bind_processor(self, dialect): + def process(value): + if value is None: + return None + elif isinstance(value, str): + # if a string was passed directly, allow it through + return value + elif not value.tzinfo or (not self.timezone and not self.has_tz): + # for DateTime(timezone=False) + return value + else: + # for DATETIMEOFFSET or DateTime(timezone=True) + # + # Convert to string format required by T-SQL + dto_string = value.strftime("%Y-%m-%d %H:%M:%S.%f %z") + # offset needs a colon, e.g., -0700 -> -07:00 + # "UTC offset in the form (+-)HHMM[SS[.ffffff]]" + # backend currently rejects seconds / fractional seconds + dto_string = re.sub( + r"([\+\-]\d{2})([\d\.]+)$", r"\1:\2", dto_string + ) + return dto_string + + return process + + +class _ODBCDateTime(_ODBCDateTimeBindProcessor, _MSDateTime): + pass + + +class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET): + has_tz = True + + +class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY): + pass + + +class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY): + pass + + +class _String_pyodbc(sqltypes.String): + def get_dbapi_type(self, dbapi): + if self.length in (None, "max") or self.length >= 2000: + return (dbapi.SQL_VARCHAR, 0, 0) + else: + return dbapi.SQL_VARCHAR + + +class _Unicode_pyodbc(_MSUnicode): + def get_dbapi_type(self, dbapi): + if self.length in (None, "max") or self.length >= 2000: + return (dbapi.SQL_WVARCHAR, 0, 0) + else: + return dbapi.SQL_WVARCHAR + + +class _UnicodeText_pyodbc(_MSUnicodeText): + def get_dbapi_type(self, dbapi): + if self.length in (None, "max") or self.length >= 2000: + return (dbapi.SQL_WVARCHAR, 0, 0) + else: + return dbapi.SQL_WVARCHAR + + +class _JSON_pyodbc(_MSJson): + def get_dbapi_type(self, dbapi): + return (dbapi.SQL_WVARCHAR, 0, 0) + + +class _JSONIndexType_pyodbc(_MSJsonIndexType): + def get_dbapi_type(self, dbapi): + return dbapi.SQL_WVARCHAR + + +class _JSONPathType_pyodbc(_MSJsonPathType): + def get_dbapi_type(self, dbapi): + return dbapi.SQL_WVARCHAR + + +class MSExecutionContext_pyodbc(MSExecutionContext): + _embedded_scope_identity = False + + def pre_exec(self): + """where appropriate, issue "select scope_identity()" in the same + statement. + + Background on why "scope_identity()" is preferable to "@@identity": + https://msdn.microsoft.com/en-us/library/ms190315.aspx + + Background on why we attempt to embed "scope_identity()" into the same + statement as the INSERT: + https://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values? + + """ + + super().pre_exec() + + # don't embed the scope_identity select into an + # "INSERT .. DEFAULT VALUES" + if ( + self._select_lastrowid + and self.dialect.use_scope_identity + and len(self.parameters[0]) + ): + self._embedded_scope_identity = True + + self.statement += "; select scope_identity()" + + def post_exec(self): + if self._embedded_scope_identity: + # Fetch the last inserted id from the manipulated statement + # We may have to skip over a number of result sets with + # no data (due to triggers, etc.) + while True: + try: + # fetchall() ensures the cursor is consumed + # without closing it (FreeTDS particularly) + rows = self.cursor.fetchall() + except self.dialect.dbapi.Error: + # no way around this - nextset() consumes the previous set + # so we need to just keep flipping + self.cursor.nextset() + else: + if not rows: + # async adapter drivers just return None here + self.cursor.nextset() + continue + row = rows[0] + break + + self._lastrowid = int(row[0]) + + self.cursor_fetch_strategy = _cursor._NO_CURSOR_DML + else: + super().post_exec() + + +class MSDialect_pyodbc(PyODBCConnector, MSDialect): + supports_statement_cache = True + + # note this parameter is no longer used by the ORM or default dialect + # see #9414 + supports_sane_rowcount_returning = False + + execution_ctx_cls = MSExecutionContext_pyodbc + + colspecs = util.update_copy( + MSDialect.colspecs, + { + sqltypes.Numeric: _MSNumeric_pyodbc, + sqltypes.Float: _MSFloat_pyodbc, + BINARY: _BINARY_pyodbc, + # support DateTime(timezone=True) + sqltypes.DateTime: _ODBCDateTime, + DATETIMEOFFSET: _ODBCDATETIMEOFFSET, + # SQL Server dialect has a VARBINARY that is just to support + # "deprecate_large_types" w/ VARBINARY(max), but also we must + # handle the usual SQL standard VARBINARY + VARBINARY: _VARBINARY_pyodbc, + sqltypes.VARBINARY: _VARBINARY_pyodbc, + sqltypes.LargeBinary: _VARBINARY_pyodbc, + sqltypes.String: _String_pyodbc, + sqltypes.Unicode: _Unicode_pyodbc, + sqltypes.UnicodeText: _UnicodeText_pyodbc, + sqltypes.JSON: _JSON_pyodbc, + sqltypes.JSON.JSONIndexType: _JSONIndexType_pyodbc, + sqltypes.JSON.JSONPathType: _JSONPathType_pyodbc, + # this excludes Enum from the string/VARCHAR thing for now + # it looks like Enum's adaptation doesn't really support the + # String type itself having a dialect-level impl + sqltypes.Enum: sqltypes.Enum, + }, + ) + + def __init__( + self, + fast_executemany=False, + use_setinputsizes=True, + **params, + ): + super().__init__(use_setinputsizes=use_setinputsizes, **params) + self.use_scope_identity = ( + self.use_scope_identity + and self.dbapi + and hasattr(self.dbapi.Cursor, "nextset") + ) + self._need_decimal_fix = self.dbapi and self._dbapi_version() < ( + 2, + 1, + 8, + ) + self.fast_executemany = fast_executemany + if fast_executemany: + self.use_insertmanyvalues_wo_returning = False + + def _get_server_version_info(self, connection): + try: + # "Version of the instance of SQL Server, in the form + # of 'major.minor.build.revision'" + raw = connection.exec_driver_sql( + "SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)" + ).scalar() + except exc.DBAPIError: + # SQL Server docs indicate this function isn't present prior to + # 2008. Before we had the VARCHAR cast above, pyodbc would also + # fail on this query. + return super()._get_server_version_info(connection) + else: + version = [] + r = re.compile(r"[.\-]") + for n in r.split(raw): + try: + version.append(int(n)) + except ValueError: + pass + return tuple(version) + + def on_connect(self): + super_ = super().on_connect() + + def on_connect(conn): + if super_ is not None: + super_(conn) + + self._setup_timestampoffset_type(conn) + + return on_connect + + def _setup_timestampoffset_type(self, connection): + # output converter function for datetimeoffset + def _handle_datetimeoffset(dto_value): + tup = struct.unpack("<6hI2h", dto_value) + return datetime.datetime( + tup[0], + tup[1], + tup[2], + tup[3], + tup[4], + tup[5], + tup[6] // 1000, + datetime.timezone( + datetime.timedelta(hours=tup[7], minutes=tup[8]) + ), + ) + + odbc_SQL_SS_TIMESTAMPOFFSET = -155 # as defined in SQLNCLI.h + connection.add_output_converter( + odbc_SQL_SS_TIMESTAMPOFFSET, _handle_datetimeoffset + ) + + def do_executemany(self, cursor, statement, parameters, context=None): + if self.fast_executemany: + cursor.fast_executemany = True + super().do_executemany(cursor, statement, parameters, context=context) + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.Error): + code = e.args[0] + if code in { + "08S01", + "01000", + "01002", + "08003", + "08007", + "08S02", + "08001", + "HYT00", + "HY010", + "10054", + }: + return True + return super().is_disconnect(e, connection, cursor) + + +dialect = MSDialect_pyodbc diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py new file mode 100644 index 00000000..60bac874 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py @@ -0,0 +1,101 @@ +# dialects/mysql/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from . import aiomysql # noqa +from . import asyncmy # noqa +from . import base # noqa +from . import cymysql # noqa +from . import mariadbconnector # noqa +from . import mysqlconnector # noqa +from . import mysqldb # noqa +from . import pymysql # noqa +from . import pyodbc # noqa +from .base import BIGINT +from .base import BINARY +from .base import BIT +from .base import BLOB +from .base import BOOLEAN +from .base import CHAR +from .base import DATE +from .base import DATETIME +from .base import DECIMAL +from .base import DOUBLE +from .base import ENUM +from .base import FLOAT +from .base import INTEGER +from .base import JSON +from .base import LONGBLOB +from .base import LONGTEXT +from .base import MEDIUMBLOB +from .base import MEDIUMINT +from .base import MEDIUMTEXT +from .base import NCHAR +from .base import NUMERIC +from .base import NVARCHAR +from .base import REAL +from .base import SET +from .base import SMALLINT +from .base import TEXT +from .base import TIME +from .base import TIMESTAMP +from .base import TINYBLOB +from .base import TINYINT +from .base import TINYTEXT +from .base import VARBINARY +from .base import VARCHAR +from .base import YEAR +from .dml import Insert +from .dml import insert +from .expression import match +from ...util import compat + +# default dialect +base.dialect = dialect = mysqldb.dialect + +__all__ = ( + "BIGINT", + "BINARY", + "BIT", + "BLOB", + "BOOLEAN", + "CHAR", + "DATE", + "DATETIME", + "DECIMAL", + "DOUBLE", + "ENUM", + "FLOAT", + "INTEGER", + "INTEGER", + "JSON", + "LONGBLOB", + "LONGTEXT", + "MEDIUMBLOB", + "MEDIUMINT", + "MEDIUMTEXT", + "NCHAR", + "NVARCHAR", + "NUMERIC", + "SET", + "SMALLINT", + "REAL", + "TEXT", + "TIME", + "TIMESTAMP", + "TINYBLOB", + "TINYINT", + "TINYTEXT", + "VARBINARY", + "VARCHAR", + "YEAR", + "dialect", + "insert", + "Insert", + "match", +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d980cb6a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc new file mode 100644 index 00000000..6d635d98 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc new file mode 100644 index 00000000..4d68d30c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..adbf912b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc new file mode 100644 index 00000000..3de324b4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc new file mode 100644 index 00000000..3409ce99 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc new file mode 100644 index 00000000..d83a4491 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc new file mode 100644 index 00000000..8e30b34e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc new file mode 100644 index 00000000..8bf2f073 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc new file mode 100644 index 00000000..0989eb9a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc new file mode 100644 index 00000000..45796700 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc new file mode 100644 index 00000000..a76aa79c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc new file mode 100644 index 00000000..bff61093 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc new file mode 100644 index 00000000..01bfca0b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc new file mode 100644 index 00000000..96eb656c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc new file mode 100644 index 00000000..d3aa03fa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc new file mode 100644 index 00000000..a61ab7ee Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc new file mode 100644 index 00000000..c4584707 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..cfe8d446 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py new file mode 100644 index 00000000..45e226b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py @@ -0,0 +1,333 @@ +# dialects/mysql/aiomysql.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: mysql+aiomysql + :name: aiomysql + :dbapi: aiomysql + :connectstring: mysql+aiomysql://user:password@host:port/dbname[?key=value&key=value...] + :url: https://github.com/aio-libs/aiomysql + +The aiomysql dialect is SQLAlchemy's second Python asyncio dialect. + +Using a special asyncio mediation layer, the aiomysql dialect is usable +as the backend for the :ref:`SQLAlchemy asyncio ` +extension package. + +This dialect should normally be used only with the +:func:`_asyncio.create_async_engine` engine creation function:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("mysql+aiomysql://user:pass@hostname/dbname?charset=utf8mb4") + + +""" # noqa +from collections import deque + +from .pymysql import MySQLDialect_pymysql +from ... import pool +from ... import util +from ...engine import AdaptedConnection +from ...util.concurrency import asyncio +from ...util.concurrency import await_fallback +from ...util.concurrency import await_only + + +class AsyncAdapt_aiomysql_cursor: + # TODO: base on connectors/asyncio.py + # see #10415 + server_side = False + __slots__ = ( + "_adapt_connection", + "_connection", + "await_", + "_cursor", + "_rows", + ) + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + + cursor = self._connection.cursor(adapt_connection.dbapi.Cursor) + + # see https://github.com/aio-libs/aiomysql/issues/543 + self._cursor = self.await_(cursor.__aenter__()) + self._rows = deque() + + @property + def description(self): + return self._cursor.description + + @property + def rowcount(self): + return self._cursor.rowcount + + @property + def arraysize(self): + return self._cursor.arraysize + + @arraysize.setter + def arraysize(self, value): + self._cursor.arraysize = value + + @property + def lastrowid(self): + return self._cursor.lastrowid + + def close(self): + # note we aren't actually closing the cursor here, + # we are just letting GC do it. to allow this to be async + # we would need the Result to change how it does "Safe close cursor". + # MySQL "cursors" don't actually have state to be "closed" besides + # exhausting rows, which we already have done for sync cursor. + # another option would be to emulate aiosqlite dialect and assign + # cursor only if we are doing server side cursor operation. + self._rows.clear() + + def execute(self, operation, parameters=None): + return self.await_(self._execute_async(operation, parameters)) + + def executemany(self, operation, seq_of_parameters): + return self.await_( + self._executemany_async(operation, seq_of_parameters) + ) + + async def _execute_async(self, operation, parameters): + async with self._adapt_connection._execute_mutex: + result = await self._cursor.execute(operation, parameters) + + if not self.server_side: + # aiomysql has a "fake" async result, so we have to pull it out + # of that here since our default result is not async. + # we could just as easily grab "_rows" here and be done with it + # but this is safer. + self._rows = deque(await self._cursor.fetchall()) + return result + + async def _executemany_async(self, operation, seq_of_parameters): + async with self._adapt_connection._execute_mutex: + return await self._cursor.executemany(operation, seq_of_parameters) + + def setinputsizes(self, *inputsizes): + pass + + def __iter__(self): + while self._rows: + yield self._rows.popleft() + + def fetchone(self): + if self._rows: + return self._rows.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + size = self.arraysize + + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] + + def fetchall(self): + retval = list(self._rows) + self._rows.clear() + return retval + + +class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor): + # TODO: base on connectors/asyncio.py + # see #10415 + __slots__ = () + server_side = True + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + + cursor = self._connection.cursor(adapt_connection.dbapi.SSCursor) + + self._cursor = self.await_(cursor.__aenter__()) + + def close(self): + if self._cursor is not None: + self.await_(self._cursor.close()) + self._cursor = None + + def fetchone(self): + return self.await_(self._cursor.fetchone()) + + def fetchmany(self, size=None): + return self.await_(self._cursor.fetchmany(size=size)) + + def fetchall(self): + return self.await_(self._cursor.fetchall()) + + +class AsyncAdapt_aiomysql_connection(AdaptedConnection): + # TODO: base on connectors/asyncio.py + # see #10415 + await_ = staticmethod(await_only) + __slots__ = ("dbapi", "_execute_mutex") + + def __init__(self, dbapi, connection): + self.dbapi = dbapi + self._connection = connection + self._execute_mutex = asyncio.Lock() + + def ping(self, reconnect): + return self.await_(self._connection.ping(reconnect)) + + def character_set_name(self): + return self._connection.character_set_name() + + def autocommit(self, value): + self.await_(self._connection.autocommit(value)) + + def cursor(self, server_side=False): + if server_side: + return AsyncAdapt_aiomysql_ss_cursor(self) + else: + return AsyncAdapt_aiomysql_cursor(self) + + def rollback(self): + self.await_(self._connection.rollback()) + + def commit(self): + self.await_(self._connection.commit()) + + def terminate(self): + # it's not awaitable. + self._connection.close() + + def close(self) -> None: + self.await_(self._connection.ensure_closed()) + + +class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection): + # TODO: base on connectors/asyncio.py + # see #10415 + __slots__ = () + + await_ = staticmethod(await_fallback) + + +class AsyncAdapt_aiomysql_dbapi: + def __init__(self, aiomysql, pymysql): + self.aiomysql = aiomysql + self.pymysql = pymysql + self.paramstyle = "format" + self._init_dbapi_attributes() + self.Cursor, self.SSCursor = self._init_cursors_subclasses() + + def _init_dbapi_attributes(self): + for name in ( + "Warning", + "Error", + "InterfaceError", + "DataError", + "DatabaseError", + "OperationalError", + "InterfaceError", + "IntegrityError", + "ProgrammingError", + "InternalError", + "NotSupportedError", + ): + setattr(self, name, getattr(self.aiomysql, name)) + + for name in ( + "NUMBER", + "STRING", + "DATETIME", + "BINARY", + "TIMESTAMP", + "Binary", + ): + setattr(self, name, getattr(self.pymysql, name)) + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop("async_creator_fn", self.aiomysql.connect) + + if util.asbool(async_fallback): + return AsyncAdaptFallback_aiomysql_connection( + self, + await_fallback(creator_fn(*arg, **kw)), + ) + else: + return AsyncAdapt_aiomysql_connection( + self, + await_only(creator_fn(*arg, **kw)), + ) + + def _init_cursors_subclasses(self): + # suppress unconditional warning emitted by aiomysql + class Cursor(self.aiomysql.Cursor): + async def _show_warnings(self, conn): + pass + + class SSCursor(self.aiomysql.SSCursor): + async def _show_warnings(self, conn): + pass + + return Cursor, SSCursor + + +class MySQLDialect_aiomysql(MySQLDialect_pymysql): + driver = "aiomysql" + supports_statement_cache = True + + supports_server_side_cursors = True + _sscursor = AsyncAdapt_aiomysql_ss_cursor + + is_async = True + has_terminate = True + + @classmethod + def import_dbapi(cls): + return AsyncAdapt_aiomysql_dbapi( + __import__("aiomysql"), __import__("pymysql") + ) + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if util.asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def do_terminate(self, dbapi_connection) -> None: + dbapi_connection.terminate() + + def create_connect_args(self, url): + return super().create_connect_args( + url, _translate_args=dict(username="user", database="db") + ) + + def is_disconnect(self, e, connection, cursor): + if super().is_disconnect(e, connection, cursor): + return True + else: + str_e = str(e).lower() + return "not connected" in str_e + + def _found_rows_client_flag(self): + from pymysql.constants import CLIENT + + return CLIENT.FOUND_ROWS + + def get_driver_connection(self, connection): + return connection._connection + + +dialect = MySQLDialect_aiomysql diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py new file mode 100644 index 00000000..474eb626 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py @@ -0,0 +1,337 @@ +# dialects/mysql/asyncmy.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: mysql+asyncmy + :name: asyncmy + :dbapi: asyncmy + :connectstring: mysql+asyncmy://user:password@host:port/dbname[?key=value&key=value...] + :url: https://github.com/long2ice/asyncmy + +Using a special asyncio mediation layer, the asyncmy dialect is usable +as the backend for the :ref:`SQLAlchemy asyncio ` +extension package. + +This dialect should normally be used only with the +:func:`_asyncio.create_async_engine` engine creation function:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("mysql+asyncmy://user:pass@hostname/dbname?charset=utf8mb4") + + +""" # noqa +from collections import deque +from contextlib import asynccontextmanager + +from .pymysql import MySQLDialect_pymysql +from ... import pool +from ... import util +from ...engine import AdaptedConnection +from ...util.concurrency import asyncio +from ...util.concurrency import await_fallback +from ...util.concurrency import await_only + + +class AsyncAdapt_asyncmy_cursor: + # TODO: base on connectors/asyncio.py + # see #10415 + server_side = False + __slots__ = ( + "_adapt_connection", + "_connection", + "await_", + "_cursor", + "_rows", + ) + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + + cursor = self._connection.cursor() + + self._cursor = self.await_(cursor.__aenter__()) + self._rows = deque() + + @property + def description(self): + return self._cursor.description + + @property + def rowcount(self): + return self._cursor.rowcount + + @property + def arraysize(self): + return self._cursor.arraysize + + @arraysize.setter + def arraysize(self, value): + self._cursor.arraysize = value + + @property + def lastrowid(self): + return self._cursor.lastrowid + + def close(self): + # note we aren't actually closing the cursor here, + # we are just letting GC do it. to allow this to be async + # we would need the Result to change how it does "Safe close cursor". + # MySQL "cursors" don't actually have state to be "closed" besides + # exhausting rows, which we already have done for sync cursor. + # another option would be to emulate aiosqlite dialect and assign + # cursor only if we are doing server side cursor operation. + self._rows.clear() + + def execute(self, operation, parameters=None): + return self.await_(self._execute_async(operation, parameters)) + + def executemany(self, operation, seq_of_parameters): + return self.await_( + self._executemany_async(operation, seq_of_parameters) + ) + + async def _execute_async(self, operation, parameters): + async with self._adapt_connection._mutex_and_adapt_errors(): + if parameters is None: + result = await self._cursor.execute(operation) + else: + result = await self._cursor.execute(operation, parameters) + + if not self.server_side: + # asyncmy has a "fake" async result, so we have to pull it out + # of that here since our default result is not async. + # we could just as easily grab "_rows" here and be done with it + # but this is safer. + self._rows = deque(await self._cursor.fetchall()) + return result + + async def _executemany_async(self, operation, seq_of_parameters): + async with self._adapt_connection._mutex_and_adapt_errors(): + return await self._cursor.executemany(operation, seq_of_parameters) + + def setinputsizes(self, *inputsizes): + pass + + def __iter__(self): + while self._rows: + yield self._rows.popleft() + + def fetchone(self): + if self._rows: + return self._rows.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + size = self.arraysize + + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] + + def fetchall(self): + retval = list(self._rows) + self._rows.clear() + return retval + + +class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor): + # TODO: base on connectors/asyncio.py + # see #10415 + __slots__ = () + server_side = True + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + + cursor = self._connection.cursor( + adapt_connection.dbapi.asyncmy.cursors.SSCursor + ) + + self._cursor = self.await_(cursor.__aenter__()) + + def close(self): + if self._cursor is not None: + self.await_(self._cursor.close()) + self._cursor = None + + def fetchone(self): + return self.await_(self._cursor.fetchone()) + + def fetchmany(self, size=None): + return self.await_(self._cursor.fetchmany(size=size)) + + def fetchall(self): + return self.await_(self._cursor.fetchall()) + + +class AsyncAdapt_asyncmy_connection(AdaptedConnection): + # TODO: base on connectors/asyncio.py + # see #10415 + await_ = staticmethod(await_only) + __slots__ = ("dbapi", "_execute_mutex") + + def __init__(self, dbapi, connection): + self.dbapi = dbapi + self._connection = connection + self._execute_mutex = asyncio.Lock() + + @asynccontextmanager + async def _mutex_and_adapt_errors(self): + async with self._execute_mutex: + try: + yield + except AttributeError: + raise self.dbapi.InternalError( + "network operation failed due to asyncmy attribute error" + ) + + def ping(self, reconnect): + assert not reconnect + return self.await_(self._do_ping()) + + async def _do_ping(self): + async with self._mutex_and_adapt_errors(): + return await self._connection.ping(False) + + def character_set_name(self): + return self._connection.character_set_name() + + def autocommit(self, value): + self.await_(self._connection.autocommit(value)) + + def cursor(self, server_side=False): + if server_side: + return AsyncAdapt_asyncmy_ss_cursor(self) + else: + return AsyncAdapt_asyncmy_cursor(self) + + def rollback(self): + self.await_(self._connection.rollback()) + + def commit(self): + self.await_(self._connection.commit()) + + def terminate(self): + # it's not awaitable. + self._connection.close() + + def close(self) -> None: + self.await_(self._connection.ensure_closed()) + + +class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection): + __slots__ = () + + await_ = staticmethod(await_fallback) + + +def _Binary(x): + """Return x as a binary type.""" + return bytes(x) + + +class AsyncAdapt_asyncmy_dbapi: + def __init__(self, asyncmy): + self.asyncmy = asyncmy + self.paramstyle = "format" + self._init_dbapi_attributes() + + def _init_dbapi_attributes(self): + for name in ( + "Warning", + "Error", + "InterfaceError", + "DataError", + "DatabaseError", + "OperationalError", + "InterfaceError", + "IntegrityError", + "ProgrammingError", + "InternalError", + "NotSupportedError", + ): + setattr(self, name, getattr(self.asyncmy.errors, name)) + + STRING = util.symbol("STRING") + NUMBER = util.symbol("NUMBER") + BINARY = util.symbol("BINARY") + DATETIME = util.symbol("DATETIME") + TIMESTAMP = util.symbol("TIMESTAMP") + Binary = staticmethod(_Binary) + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop("async_creator_fn", self.asyncmy.connect) + + if util.asbool(async_fallback): + return AsyncAdaptFallback_asyncmy_connection( + self, + await_fallback(creator_fn(*arg, **kw)), + ) + else: + return AsyncAdapt_asyncmy_connection( + self, + await_only(creator_fn(*arg, **kw)), + ) + + +class MySQLDialect_asyncmy(MySQLDialect_pymysql): + driver = "asyncmy" + supports_statement_cache = True + + supports_server_side_cursors = True + _sscursor = AsyncAdapt_asyncmy_ss_cursor + + is_async = True + has_terminate = True + + @classmethod + def import_dbapi(cls): + return AsyncAdapt_asyncmy_dbapi(__import__("asyncmy")) + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if util.asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def do_terminate(self, dbapi_connection) -> None: + dbapi_connection.terminate() + + def create_connect_args(self, url): + return super().create_connect_args( + url, _translate_args=dict(username="user", database="db") + ) + + def is_disconnect(self, e, connection, cursor): + if super().is_disconnect(e, connection, cursor): + return True + else: + str_e = str(e).lower() + return ( + "not connected" in str_e or "network operation failed" in str_e + ) + + def _found_rows_client_flag(self): + from asyncmy.constants import CLIENT + + return CLIENT.FOUND_ROWS + + def get_driver_connection(self, connection): + return connection._connection + + +dialect = MySQLDialect_asyncmy diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py new file mode 100644 index 00000000..e512b9c7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py @@ -0,0 +1,3449 @@ +# dialects/mysql/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" + +.. dialect:: mysql + :name: MySQL / MariaDB + :full_support: 5.6, 5.7, 8.0 / 10.8, 10.9 + :normal_support: 5.6+ / 10+ + :best_effort: 5.0.2+ / 5.0.2+ + +Supported Versions and Features +------------------------------- + +SQLAlchemy supports MySQL starting with version 5.0.2 through modern releases, +as well as all modern versions of MariaDB. See the official MySQL +documentation for detailed information about features supported in any given +server release. + +.. versionchanged:: 1.4 minimum MySQL version supported is now 5.0.2. + +MariaDB Support +~~~~~~~~~~~~~~~ + +The MariaDB variant of MySQL retains fundamental compatibility with MySQL's +protocols however the development of these two products continues to diverge. +Within the realm of SQLAlchemy, the two databases have a small number of +syntactical and behavioral differences that SQLAlchemy accommodates automatically. +To connect to a MariaDB database, no changes to the database URL are required:: + + + engine = create_engine("mysql+pymysql://user:pass@some_mariadb/dbname?charset=utf8mb4") + +Upon first connect, the SQLAlchemy dialect employs a +server version detection scheme that determines if the +backing database reports as MariaDB. Based on this flag, the dialect +can make different choices in those of areas where its behavior +must be different. + +.. _mysql_mariadb_only_mode: + +MariaDB-Only Mode +~~~~~~~~~~~~~~~~~ + +The dialect also supports an **optional** "MariaDB-only" mode of connection, which may be +useful for the case where an application makes use of MariaDB-specific features +and is not compatible with a MySQL database. To use this mode of operation, +replace the "mysql" token in the above URL with "mariadb":: + + engine = create_engine("mariadb+pymysql://user:pass@some_mariadb/dbname?charset=utf8mb4") + +The above engine, upon first connect, will raise an error if the server version +detection detects that the backing database is not MariaDB. + +When using an engine with ``"mariadb"`` as the dialect name, **all mysql-specific options +that include the name "mysql" in them are now named with "mariadb"**. This means +options like ``mysql_engine`` should be named ``mariadb_engine``, etc. Both +"mysql" and "mariadb" options can be used simultaneously for applications that +use URLs with both "mysql" and "mariadb" dialects:: + + my_table = Table( + "mytable", + metadata, + Column("id", Integer, primary_key=True), + Column("textdata", String(50)), + mariadb_engine="InnoDB", + mysql_engine="InnoDB", + ) + + Index( + "textdata_ix", + my_table.c.textdata, + mysql_prefix="FULLTEXT", + mariadb_prefix="FULLTEXT", + ) + +Similar behavior will occur when the above structures are reflected, i.e. the +"mariadb" prefix will be present in the option names when the database URL +is based on the "mariadb" name. + +.. versionadded:: 1.4 Added "mariadb" dialect name supporting "MariaDB-only mode" + for the MySQL dialect. + +.. _mysql_connection_timeouts: + +Connection Timeouts and Disconnects +----------------------------------- + +MySQL / MariaDB feature an automatic connection close behavior, for connections that +have been idle for a fixed period of time, defaulting to eight hours. +To circumvent having this issue, use +the :paramref:`_sa.create_engine.pool_recycle` option which ensures that +a connection will be discarded and replaced with a new one if it has been +present in the pool for a fixed number of seconds:: + + engine = create_engine('mysql+mysqldb://...', pool_recycle=3600) + +For more comprehensive disconnect detection of pooled connections, including +accommodation of server restarts and network issues, a pre-ping approach may +be employed. See :ref:`pool_disconnects` for current approaches. + +.. seealso:: + + :ref:`pool_disconnects` - Background on several techniques for dealing + with timed out connections as well as database restarts. + +.. _mysql_storage_engines: + +CREATE TABLE arguments including Storage Engines +------------------------------------------------ + +Both MySQL's and MariaDB's CREATE TABLE syntax includes a wide array of special options, +including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``, +``INSERT_METHOD``, and many more. +To accommodate the rendering of these arguments, specify the form +``mysql_argument_name="value"``. For example, to specify a table with +``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8mb4``, and ``KEY_BLOCK_SIZE`` +of ``1024``:: + + Table('mytable', metadata, + Column('data', String(32)), + mysql_engine='InnoDB', + mysql_charset='utf8mb4', + mysql_key_block_size="1024" + ) + +When supporting :ref:`mysql_mariadb_only_mode` mode, similar keys against +the "mariadb" prefix must be included as well. The values can of course +vary independently so that different settings on MySQL vs. MariaDB may +be maintained:: + + # support both "mysql" and "mariadb-only" engine URLs + + Table('mytable', metadata, + Column('data', String(32)), + + mysql_engine='InnoDB', + mariadb_engine='InnoDB', + + mysql_charset='utf8mb4', + mariadb_charset='utf8', + + mysql_key_block_size="1024" + mariadb_key_block_size="1024" + + ) + +The MySQL / MariaDB dialects will normally transfer any keyword specified as +``mysql_keyword_name`` to be rendered as ``KEYWORD_NAME`` in the +``CREATE TABLE`` statement. A handful of these names will render with a space +instead of an underscore; to support this, the MySQL dialect has awareness of +these particular names, which include ``DATA DIRECTORY`` +(e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g. +``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g. +``mysql_index_directory``). + +The most common argument is ``mysql_engine``, which refers to the storage +engine for the table. Historically, MySQL server installations would default +to ``MyISAM`` for this value, although newer versions may be defaulting +to ``InnoDB``. The ``InnoDB`` engine is typically preferred for its support +of transactions and foreign keys. + +A :class:`_schema.Table` +that is created in a MySQL / MariaDB database with a storage engine +of ``MyISAM`` will be essentially non-transactional, meaning any +INSERT/UPDATE/DELETE statement referring to this table will be invoked as +autocommit. It also will have no support for foreign key constraints; while +the ``CREATE TABLE`` statement accepts foreign key options, when using the +``MyISAM`` storage engine these arguments are discarded. Reflecting such a +table will also produce no foreign key constraint information. + +For fully atomic transactions as well as support for foreign key +constraints, all participating ``CREATE TABLE`` statements must specify a +transactional engine, which in the vast majority of cases is ``InnoDB``. + + +Case Sensitivity and Table Reflection +------------------------------------- + +Both MySQL and MariaDB have inconsistent support for case-sensitive identifier +names, basing support on specific details of the underlying +operating system. However, it has been observed that no matter +what case sensitivity behavior is present, the names of tables in +foreign key declarations are *always* received from the database +as all-lower case, making it impossible to accurately reflect a +schema where inter-related tables use mixed-case identifier names. + +Therefore it is strongly advised that table names be declared as +all lower case both within SQLAlchemy as well as on the MySQL / MariaDB +database itself, especially if database reflection features are +to be used. + +.. _mysql_isolation_level: + +Transaction Isolation Level +--------------------------- + +All MySQL / MariaDB dialects support setting of transaction isolation level both via a +dialect-specific parameter :paramref:`_sa.create_engine.isolation_level` +accepted +by :func:`_sa.create_engine`, as well as the +:paramref:`.Connection.execution_options.isolation_level` argument as passed to +:meth:`_engine.Connection.execution_options`. +This feature works by issuing the +command ``SET SESSION TRANSACTION ISOLATION LEVEL `` for each new +connection. For the special AUTOCOMMIT isolation level, DBAPI-specific +techniques are used. + +To set isolation level using :func:`_sa.create_engine`:: + + engine = create_engine( + "mysql+mysqldb://scott:tiger@localhost/test", + isolation_level="READ UNCOMMITTED" + ) + +To set using per-connection execution options:: + + connection = engine.connect() + connection = connection.execution_options( + isolation_level="READ COMMITTED" + ) + +Valid values for ``isolation_level`` include: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +The special ``AUTOCOMMIT`` value makes use of the various "autocommit" +attributes provided by specific DBAPIs, and is currently supported by +MySQLdb, MySQL-Client, MySQL-Connector Python, and PyMySQL. Using it, +the database connection will return true for the value of +``SELECT @@autocommit;``. + +There are also more options for isolation level configurations, such as +"sub-engine" objects linked to a main :class:`_engine.Engine` which each apply +different isolation level settings. See the discussion at +:ref:`dbapi_autocommit` for background. + +.. seealso:: + + :ref:`dbapi_autocommit` + +AUTO_INCREMENT Behavior +----------------------- + +When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on +the first :class:`.Integer` primary key column which is not marked as a +foreign key:: + + >>> t = Table('mytable', metadata, + ... Column('mytable_id', Integer, primary_key=True) + ... ) + >>> t.create() + CREATE TABLE mytable ( + id INTEGER NOT NULL AUTO_INCREMENT, + PRIMARY KEY (id) + ) + +You can disable this behavior by passing ``False`` to the +:paramref:`_schema.Column.autoincrement` argument of :class:`_schema.Column`. +This flag +can also be used to enable auto-increment on a secondary column in a +multi-column key for some storage engines:: + + Table('mytable', metadata, + Column('gid', Integer, primary_key=True, autoincrement=False), + Column('id', Integer, primary_key=True) + ) + +.. _mysql_ss_cursors: + +Server Side Cursors +------------------- + +Server-side cursor support is available for the mysqlclient, PyMySQL, +mariadbconnector dialects and may also be available in others. This makes use +of either the "buffered=True/False" flag if available or by using a class such +as ``MySQLdb.cursors.SSCursor`` or ``pymysql.cursors.SSCursor`` internally. + + +Server side cursors are enabled on a per-statement basis by using the +:paramref:`.Connection.execution_options.stream_results` connection execution +option:: + + with engine.connect() as conn: + result = conn.execution_options(stream_results=True).execute(text("select * from table")) + +Note that some kinds of SQL statements may not be supported with +server side cursors; generally, only SQL statements that return rows should be +used with this option. + +.. deprecated:: 1.4 The dialect-level server_side_cursors flag is deprecated + and will be removed in a future release. Please use the + :paramref:`_engine.Connection.stream_results` execution option for + unbuffered cursor support. + +.. seealso:: + + :ref:`engine_stream_results` + +.. _mysql_unicode: + +Unicode +------- + +Charset Selection +~~~~~~~~~~~~~~~~~ + +Most MySQL / MariaDB DBAPIs offer the option to set the client character set for +a connection. This is typically delivered using the ``charset`` parameter +in the URL, such as:: + + e = create_engine( + "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4") + +This charset is the **client character set** for the connection. Some +MySQL DBAPIs will default this to a value such as ``latin1``, and some +will make use of the ``default-character-set`` setting in the ``my.cnf`` +file as well. Documentation for the DBAPI in use should be consulted +for specific behavior. + +The encoding used for Unicode has traditionally been ``'utf8'``. However, for +MySQL versions 5.5.3 and MariaDB 5.5 on forward, a new MySQL-specific encoding +``'utf8mb4'`` has been introduced, and as of MySQL 8.0 a warning is emitted by +the server if plain ``utf8`` is specified within any server-side directives, +replaced with ``utf8mb3``. The rationale for this new encoding is due to the +fact that MySQL's legacy utf-8 encoding only supports codepoints up to three +bytes instead of four. Therefore, when communicating with a MySQL or MariaDB +database that includes codepoints more than three bytes in size, this new +charset is preferred, if supported by both the database as well as the client +DBAPI, as in:: + + e = create_engine( + "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4") + +All modern DBAPIs should support the ``utf8mb4`` charset. + +In order to use ``utf8mb4`` encoding for a schema that was created with legacy +``utf8``, changes to the MySQL/MariaDB schema and/or server configuration may be +required. + +.. seealso:: + + `The utf8mb4 Character Set \ + `_ - \ + in the MySQL documentation + +.. _mysql_binary_introducer: + +Dealing with Binary Data Warnings and Unicode +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +MySQL versions 5.6, 5.7 and later (not MariaDB at the time of this writing) now +emit a warning when attempting to pass binary data to the database, while a +character set encoding is also in place, when the binary data itself is not +valid for that encoding:: + + default.py:509: Warning: (1300, "Invalid utf8mb4 character string: + 'F9876A'") + cursor.execute(statement, parameters) + +This warning is due to the fact that the MySQL client library is attempting to +interpret the binary string as a unicode object even if a datatype such +as :class:`.LargeBinary` is in use. To resolve this, the SQL statement requires +a binary "character set introducer" be present before any non-NULL value +that renders like this:: + + INSERT INTO table (data) VALUES (_binary %s) + +These character set introducers are provided by the DBAPI driver, assuming the +use of mysqlclient or PyMySQL (both of which are recommended). Add the query +string parameter ``binary_prefix=true`` to the URL to repair this warning:: + + # mysqlclient + engine = create_engine( + "mysql+mysqldb://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true") + + # PyMySQL + engine = create_engine( + "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true") + + +The ``binary_prefix`` flag may or may not be supported by other MySQL drivers. + +SQLAlchemy itself cannot render this ``_binary`` prefix reliably, as it does +not work with the NULL value, which is valid to be sent as a bound parameter. +As the MySQL driver renders parameters directly into the SQL string, it's the +most efficient place for this additional keyword to be passed. + +.. seealso:: + + `Character set introducers `_ - on the MySQL website + + +ANSI Quoting Style +------------------ + +MySQL / MariaDB feature two varieties of identifier "quoting style", one using +backticks and the other using quotes, e.g. ```some_identifier``` vs. +``"some_identifier"``. All MySQL dialects detect which version +is in use by checking the value of :ref:`sql_mode` when a connection is first +established with a particular :class:`_engine.Engine`. +This quoting style comes +into play when rendering table and column names as well as when reflecting +existing database structures. The detection is entirely automatic and +no special configuration is needed to use either quoting style. + + +.. _mysql_sql_mode: + +Changing the sql_mode +--------------------- + +MySQL supports operating in multiple +`Server SQL Modes `_ for +both Servers and Clients. To change the ``sql_mode`` for a given application, a +developer can leverage SQLAlchemy's Events system. + +In the following example, the event system is used to set the ``sql_mode`` on +the ``first_connect`` and ``connect`` events:: + + from sqlalchemy import create_engine, event + + eng = create_engine("mysql+mysqldb://scott:tiger@localhost/test", echo='debug') + + # `insert=True` will ensure this is the very first listener to run + @event.listens_for(eng, "connect", insert=True) + def connect(dbapi_connection, connection_record): + cursor = dbapi_connection.cursor() + cursor.execute("SET sql_mode = 'STRICT_ALL_TABLES'") + + conn = eng.connect() + +In the example illustrated above, the "connect" event will invoke the "SET" +statement on the connection at the moment a particular DBAPI connection is +first created for a given Pool, before the connection is made available to the +connection pool. Additionally, because the function was registered with +``insert=True``, it will be prepended to the internal list of registered +functions. + + +MySQL / MariaDB SQL Extensions +------------------------------ + +Many of the MySQL / MariaDB SQL extensions are handled through SQLAlchemy's generic +function and operator support:: + + table.select(table.c.password==func.md5('plaintext')) + table.select(table.c.username.op('regexp')('^[a-d]')) + +And of course any valid SQL statement can be executed as a string as well. + +Some limited direct support for MySQL / MariaDB extensions to SQL is currently +available. + +* INSERT..ON DUPLICATE KEY UPDATE: See + :ref:`mysql_insert_on_duplicate_key_update` + +* SELECT pragma, use :meth:`_expression.Select.prefix_with` and + :meth:`_query.Query.prefix_with`:: + + select(...).prefix_with(['HIGH_PRIORITY', 'SQL_SMALL_RESULT']) + +* UPDATE with LIMIT:: + + update(..., mysql_limit=10, mariadb_limit=10) + +* optimizer hints, use :meth:`_expression.Select.prefix_with` and + :meth:`_query.Query.prefix_with`:: + + select(...).prefix_with("/*+ NO_RANGE_OPTIMIZATION(t4 PRIMARY) */") + +* index hints, use :meth:`_expression.Select.with_hint` and + :meth:`_query.Query.with_hint`:: + + select(...).with_hint(some_table, "USE INDEX xyz") + +* MATCH operator support:: + + from sqlalchemy.dialects.mysql import match + select(...).where(match(col1, col2, against="some expr").in_boolean_mode()) + + .. seealso:: + + :class:`_mysql.match` + +INSERT/DELETE...RETURNING +------------------------- + +The MariaDB dialect supports 10.5+'s ``INSERT..RETURNING`` and +``DELETE..RETURNING`` (10.0+) syntaxes. ``INSERT..RETURNING`` may be used +automatically in some cases in order to fetch newly generated identifiers in +place of the traditional approach of using ``cursor.lastrowid``, however +``cursor.lastrowid`` is currently still preferred for simple single-statement +cases for its better performance. + +To specify an explicit ``RETURNING`` clause, use the +:meth:`._UpdateBase.returning` method on a per-statement basis:: + + # INSERT..RETURNING + result = connection.execute( + table.insert(). + values(name='foo'). + returning(table.c.col1, table.c.col2) + ) + print(result.all()) + + # DELETE..RETURNING + result = connection.execute( + table.delete(). + where(table.c.name=='foo'). + returning(table.c.col1, table.c.col2) + ) + print(result.all()) + +.. versionadded:: 2.0 Added support for MariaDB RETURNING + +.. _mysql_insert_on_duplicate_key_update: + +INSERT...ON DUPLICATE KEY UPDATE (Upsert) +------------------------------------------ + +MySQL / MariaDB allow "upserts" (update or insert) +of rows into a table via the ``ON DUPLICATE KEY UPDATE`` clause of the +``INSERT`` statement. A candidate row will only be inserted if that row does +not match an existing primary or unique key in the table; otherwise, an UPDATE +will be performed. The statement allows for separate specification of the +values to INSERT versus the values for UPDATE. + +SQLAlchemy provides ``ON DUPLICATE KEY UPDATE`` support via the MySQL-specific +:func:`.mysql.insert()` function, which provides +the generative method :meth:`~.mysql.Insert.on_duplicate_key_update`: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy.dialects.mysql import insert + + >>> insert_stmt = insert(my_table).values( + ... id='some_existing_id', + ... data='inserted value') + + >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( + ... data=insert_stmt.inserted.data, + ... status='U' + ... ) + >>> print(on_duplicate_key_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s) + ON DUPLICATE KEY UPDATE data = VALUES(data), status = %s + + +Unlike PostgreSQL's "ON CONFLICT" phrase, the "ON DUPLICATE KEY UPDATE" +phrase will always match on any primary key or unique key, and will always +perform an UPDATE if there's a match; there are no options for it to raise +an error or to skip performing an UPDATE. + +``ON DUPLICATE KEY UPDATE`` is used to perform an update of the already +existing row, using any combination of new values as well as values +from the proposed insertion. These values are normally specified using +keyword arguments passed to the +:meth:`_mysql.Insert.on_duplicate_key_update` +given column key values (usually the name of the column, unless it +specifies :paramref:`_schema.Column.key` +) as keys and literal or SQL expressions +as values: + +.. sourcecode:: pycon+sql + + >>> insert_stmt = insert(my_table).values( + ... id='some_existing_id', + ... data='inserted value') + + >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( + ... data="some data", + ... updated_at=func.current_timestamp(), + ... ) + + >>> print(on_duplicate_key_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s) + ON DUPLICATE KEY UPDATE data = %s, updated_at = CURRENT_TIMESTAMP + +In a manner similar to that of :meth:`.UpdateBase.values`, other parameter +forms are accepted, including a single dictionary: + +.. sourcecode:: pycon+sql + + >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( + ... {"data": "some data", "updated_at": func.current_timestamp()}, + ... ) + +as well as a list of 2-tuples, which will automatically provide +a parameter-ordered UPDATE statement in a manner similar to that described +at :ref:`tutorial_parameter_ordered_updates`. Unlike the :class:`_expression.Update` +object, +no special flag is needed to specify the intent since the argument form is +this context is unambiguous: + +.. sourcecode:: pycon+sql + + >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( + ... [ + ... ("data", "some data"), + ... ("updated_at", func.current_timestamp()), + ... ] + ... ) + + >>> print(on_duplicate_key_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s) + ON DUPLICATE KEY UPDATE data = %s, updated_at = CURRENT_TIMESTAMP + +.. versionchanged:: 1.3 support for parameter-ordered UPDATE clause within + MySQL ON DUPLICATE KEY UPDATE + +.. warning:: + + The :meth:`_mysql.Insert.on_duplicate_key_update` + method does **not** take into + account Python-side default UPDATE values or generation functions, e.g. + e.g. those specified using :paramref:`_schema.Column.onupdate`. + These values will not be exercised for an ON DUPLICATE KEY style of UPDATE, + unless they are manually specified explicitly in the parameters. + + + +In order to refer to the proposed insertion row, the special alias +:attr:`_mysql.Insert.inserted` is available as an attribute on +the :class:`_mysql.Insert` object; this object is a +:class:`_expression.ColumnCollection` which contains all columns of the target +table: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values( + ... id='some_id', + ... data='inserted value', + ... author='jlh') + + >>> do_update_stmt = stmt.on_duplicate_key_update( + ... data="updated value", + ... author=stmt.inserted.author + ... ) + + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data, author) VALUES (%s, %s, %s) + ON DUPLICATE KEY UPDATE data = %s, author = VALUES(author) + +When rendered, the "inserted" namespace will produce the expression +``VALUES()``. + +.. versionadded:: 1.2 Added support for MySQL ON DUPLICATE KEY UPDATE clause + + + +rowcount Support +---------------- + +SQLAlchemy standardizes the DBAPI ``cursor.rowcount`` attribute to be the +usual definition of "number of rows matched by an UPDATE or DELETE" statement. +This is in contradiction to the default setting on most MySQL DBAPI drivers, +which is "number of rows actually modified/deleted". For this reason, the +SQLAlchemy MySQL dialects always add the ``constants.CLIENT.FOUND_ROWS`` +flag, or whatever is equivalent for the target dialect, upon connection. +This setting is currently hardcoded. + +.. seealso:: + + :attr:`_engine.CursorResult.rowcount` + + +.. _mysql_indexes: + +MySQL / MariaDB- Specific Index Options +----------------------------------------- + +MySQL and MariaDB-specific extensions to the :class:`.Index` construct are available. + +Index Length +~~~~~~~~~~~~~ + +MySQL and MariaDB both provide an option to create index entries with a certain length, where +"length" refers to the number of characters or bytes in each value which will +become part of the index. SQLAlchemy provides this feature via the +``mysql_length`` and/or ``mariadb_length`` parameters:: + + Index('my_index', my_table.c.data, mysql_length=10, mariadb_length=10) + + Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4, + 'b': 9}) + + Index('a_b_idx', my_table.c.a, my_table.c.b, mariadb_length={'a': 4, + 'b': 9}) + +Prefix lengths are given in characters for nonbinary string types and in bytes +for binary string types. The value passed to the keyword argument *must* be +either an integer (and, thus, specify the same prefix length value for all +columns of the index) or a dict in which keys are column names and values are +prefix length values for corresponding columns. MySQL and MariaDB only allow a +length for a column of an index if it is for a CHAR, VARCHAR, TEXT, BINARY, +VARBINARY and BLOB. + +Index Prefixes +~~~~~~~~~~~~~~ + +MySQL storage engines permit you to specify an index prefix when creating +an index. SQLAlchemy provides this feature via the +``mysql_prefix`` parameter on :class:`.Index`:: + + Index('my_index', my_table.c.data, mysql_prefix='FULLTEXT') + +The value passed to the keyword argument will be simply passed through to the +underlying CREATE INDEX, so it *must* be a valid index prefix for your MySQL +storage engine. + +.. seealso:: + + `CREATE INDEX `_ - MySQL documentation + +Index Types +~~~~~~~~~~~~~ + +Some MySQL storage engines permit you to specify an index type when creating +an index or primary key constraint. SQLAlchemy provides this feature via the +``mysql_using`` parameter on :class:`.Index`:: + + Index('my_index', my_table.c.data, mysql_using='hash', mariadb_using='hash') + +As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`:: + + PrimaryKeyConstraint("data", mysql_using='hash', mariadb_using='hash') + +The value passed to the keyword argument will be simply passed through to the +underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index +type for your MySQL storage engine. + +More information can be found at: + +https://dev.mysql.com/doc/refman/5.0/en/create-index.html + +https://dev.mysql.com/doc/refman/5.0/en/create-table.html + +Index Parsers +~~~~~~~~~~~~~ + +CREATE FULLTEXT INDEX in MySQL also supports a "WITH PARSER" option. This +is available using the keyword argument ``mysql_with_parser``:: + + Index( + 'my_index', my_table.c.data, + mysql_prefix='FULLTEXT', mysql_with_parser="ngram", + mariadb_prefix='FULLTEXT', mariadb_with_parser="ngram", + ) + +.. versionadded:: 1.3 + + +.. _mysql_foreign_keys: + +MySQL / MariaDB Foreign Keys +----------------------------- + +MySQL and MariaDB's behavior regarding foreign keys has some important caveats. + +Foreign Key Arguments to Avoid +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Neither MySQL nor MariaDB support the foreign key arguments "DEFERRABLE", "INITIALLY", +or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with +:class:`_schema.ForeignKeyConstraint` or :class:`_schema.ForeignKey` +will have the effect of +these keywords being rendered in a DDL expression, which will then raise an +error on MySQL or MariaDB. In order to use these keywords on a foreign key while having +them ignored on a MySQL / MariaDB backend, use a custom compile rule:: + + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.schema import ForeignKeyConstraint + + @compiles(ForeignKeyConstraint, "mysql", "mariadb") + def process(element, compiler, **kw): + element.deferrable = element.initially = None + return compiler.visit_foreign_key_constraint(element, **kw) + +The "MATCH" keyword is in fact more insidious, and is explicitly disallowed +by SQLAlchemy in conjunction with the MySQL or MariaDB backends. This argument is +silently ignored by MySQL / MariaDB, but in addition has the effect of ON UPDATE and ON +DELETE options also being ignored by the backend. Therefore MATCH should +never be used with the MySQL / MariaDB backends; as is the case with DEFERRABLE and +INITIALLY, custom compilation rules can be used to correct a +ForeignKeyConstraint at DDL definition time. + +Reflection of Foreign Key Constraints +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Not all MySQL / MariaDB storage engines support foreign keys. When using the +very common ``MyISAM`` MySQL storage engine, the information loaded by table +reflection will not include foreign keys. For these tables, you may supply a +:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time:: + + Table('mytable', metadata, + ForeignKeyConstraint(['other_id'], ['othertable.other_id']), + autoload_with=engine + ) + +.. seealso:: + + :ref:`mysql_storage_engines` + +.. _mysql_unique_constraints: + +MySQL / MariaDB Unique Constraints and Reflection +---------------------------------------------------- + +SQLAlchemy supports both the :class:`.Index` construct with the +flag ``unique=True``, indicating a UNIQUE index, as well as the +:class:`.UniqueConstraint` construct, representing a UNIQUE constraint. +Both objects/syntaxes are supported by MySQL / MariaDB when emitting DDL to create +these constraints. However, MySQL / MariaDB does not have a unique constraint +construct that is separate from a unique index; that is, the "UNIQUE" +constraint on MySQL / MariaDB is equivalent to creating a "UNIQUE INDEX". + +When reflecting these constructs, the +:meth:`_reflection.Inspector.get_indexes` +and the :meth:`_reflection.Inspector.get_unique_constraints` +methods will **both** +return an entry for a UNIQUE index in MySQL / MariaDB. However, when performing +full table reflection using ``Table(..., autoload_with=engine)``, +the :class:`.UniqueConstraint` construct is +**not** part of the fully reflected :class:`_schema.Table` construct under any +circumstances; this construct is always represented by a :class:`.Index` +with the ``unique=True`` setting present in the :attr:`_schema.Table.indexes` +collection. + + +TIMESTAMP / DATETIME issues +--------------------------- + +.. _mysql_timestamp_onupdate: + +Rendering ON UPDATE CURRENT TIMESTAMP for MySQL / MariaDB's explicit_defaults_for_timestamp +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +MySQL / MariaDB have historically expanded the DDL for the :class:`_types.TIMESTAMP` +datatype into the phrase "TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE +CURRENT_TIMESTAMP", which includes non-standard SQL that automatically updates +the column with the current timestamp when an UPDATE occurs, eliminating the +usual need to use a trigger in such a case where server-side update changes are +desired. + +MySQL 5.6 introduced a new flag `explicit_defaults_for_timestamp +`_ which disables the above behavior, +and in MySQL 8 this flag defaults to true, meaning in order to get a MySQL +"on update timestamp" without changing this flag, the above DDL must be +rendered explicitly. Additionally, the same DDL is valid for use of the +``DATETIME`` datatype as well. + +SQLAlchemy's MySQL dialect does not yet have an option to generate +MySQL's "ON UPDATE CURRENT_TIMESTAMP" clause, noting that this is not a general +purpose "ON UPDATE" as there is no such syntax in standard SQL. SQLAlchemy's +:paramref:`_schema.Column.server_onupdate` parameter is currently not related +to this special MySQL behavior. + +To generate this DDL, make use of the :paramref:`_schema.Column.server_default` +parameter and pass a textual clause that also includes the ON UPDATE clause:: + + from sqlalchemy import Table, MetaData, Column, Integer, String, TIMESTAMP + from sqlalchemy import text + + metadata = MetaData() + + mytable = Table( + "mytable", + metadata, + Column('id', Integer, primary_key=True), + Column('data', String(50)), + Column( + 'last_updated', + TIMESTAMP, + server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") + ) + ) + +The same instructions apply to use of the :class:`_types.DateTime` and +:class:`_types.DATETIME` datatypes:: + + from sqlalchemy import DateTime + + mytable = Table( + "mytable", + metadata, + Column('id', Integer, primary_key=True), + Column('data', String(50)), + Column( + 'last_updated', + DateTime, + server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") + ) + ) + + +Even though the :paramref:`_schema.Column.server_onupdate` feature does not +generate this DDL, it still may be desirable to signal to the ORM that this +updated value should be fetched. This syntax looks like the following:: + + from sqlalchemy.schema import FetchedValue + + class MyClass(Base): + __tablename__ = 'mytable' + + id = Column(Integer, primary_key=True) + data = Column(String(50)) + last_updated = Column( + TIMESTAMP, + server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"), + server_onupdate=FetchedValue() + ) + + +.. _mysql_timestamp_null: + +TIMESTAMP Columns and NULL +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +MySQL historically enforces that a column which specifies the +TIMESTAMP datatype implicitly includes a default value of +CURRENT_TIMESTAMP, even though this is not stated, and additionally +sets the column as NOT NULL, the opposite behavior vs. that of all +other datatypes:: + + mysql> CREATE TABLE ts_test ( + -> a INTEGER, + -> b INTEGER NOT NULL, + -> c TIMESTAMP, + -> d TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + -> e TIMESTAMP NULL); + Query OK, 0 rows affected (0.03 sec) + + mysql> SHOW CREATE TABLE ts_test; + +---------+----------------------------------------------------- + | Table | Create Table + +---------+----------------------------------------------------- + | ts_test | CREATE TABLE `ts_test` ( + `a` int(11) DEFAULT NULL, + `b` int(11) NOT NULL, + `c` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `d` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `e` timestamp NULL DEFAULT NULL + ) ENGINE=MyISAM DEFAULT CHARSET=latin1 + +Above, we see that an INTEGER column defaults to NULL, unless it is specified +with NOT NULL. But when the column is of type TIMESTAMP, an implicit +default of CURRENT_TIMESTAMP is generated which also coerces the column +to be a NOT NULL, even though we did not specify it as such. + +This behavior of MySQL can be changed on the MySQL side using the +`explicit_defaults_for_timestamp +`_ configuration flag introduced in +MySQL 5.6. With this server setting enabled, TIMESTAMP columns behave like +any other datatype on the MySQL side with regards to defaults and nullability. + +However, to accommodate the vast majority of MySQL databases that do not +specify this new flag, SQLAlchemy emits the "NULL" specifier explicitly with +any TIMESTAMP column that does not specify ``nullable=False``. In order to +accommodate newer databases that specify ``explicit_defaults_for_timestamp``, +SQLAlchemy also emits NOT NULL for TIMESTAMP columns that do specify +``nullable=False``. The following example illustrates:: + + from sqlalchemy import MetaData, Integer, Table, Column, text + from sqlalchemy.dialects.mysql import TIMESTAMP + + m = MetaData() + t = Table('ts_test', m, + Column('a', Integer), + Column('b', Integer, nullable=False), + Column('c', TIMESTAMP), + Column('d', TIMESTAMP, nullable=False) + ) + + + from sqlalchemy import create_engine + e = create_engine("mysql+mysqldb://scott:tiger@localhost/test", echo=True) + m.create_all(e) + +output:: + + CREATE TABLE ts_test ( + a INTEGER, + b INTEGER NOT NULL, + c TIMESTAMP NULL, + d TIMESTAMP NOT NULL + ) + +""" # noqa +from __future__ import annotations + +from array import array as _array +from collections import defaultdict +from itertools import compress +import re +from typing import cast + +from . import reflection as _reflection +from .enumerated import ENUM +from .enumerated import SET +from .json import JSON +from .json import JSONIndexType +from .json import JSONPathType +from .reserved_words import RESERVED_WORDS_MARIADB +from .reserved_words import RESERVED_WORDS_MYSQL +from .types import _FloatType +from .types import _IntegerType +from .types import _MatchType +from .types import _NumericType +from .types import _StringType +from .types import BIGINT +from .types import BIT +from .types import CHAR +from .types import DATETIME +from .types import DECIMAL +from .types import DOUBLE +from .types import FLOAT +from .types import INTEGER +from .types import LONGBLOB +from .types import LONGTEXT +from .types import MEDIUMBLOB +from .types import MEDIUMINT +from .types import MEDIUMTEXT +from .types import NCHAR +from .types import NUMERIC +from .types import NVARCHAR +from .types import REAL +from .types import SMALLINT +from .types import TEXT +from .types import TIME +from .types import TIMESTAMP +from .types import TINYBLOB +from .types import TINYINT +from .types import TINYTEXT +from .types import VARCHAR +from .types import YEAR +from ... import exc +from ... import literal_column +from ... import log +from ... import schema as sa_schema +from ... import sql +from ... import util +from ...engine import cursor as _cursor +from ...engine import default +from ...engine import reflection +from ...engine.reflection import ReflectionDefaults +from ...sql import coercions +from ...sql import compiler +from ...sql import elements +from ...sql import functions +from ...sql import operators +from ...sql import roles +from ...sql import sqltypes +from ...sql import util as sql_util +from ...sql import visitors +from ...sql.compiler import InsertmanyvaluesSentinelOpts +from ...sql.compiler import SQLCompiler +from ...sql.schema import SchemaConst +from ...types import BINARY +from ...types import BLOB +from ...types import BOOLEAN +from ...types import DATE +from ...types import UUID +from ...types import VARBINARY +from ...util import topological + + +SET_RE = re.compile( + r"\s*SET\s+(?:(?:GLOBAL|SESSION)\s+)?\w", re.I | re.UNICODE +) + +# old names +MSTime = TIME +MSSet = SET +MSEnum = ENUM +MSLongBlob = LONGBLOB +MSMediumBlob = MEDIUMBLOB +MSTinyBlob = TINYBLOB +MSBlob = BLOB +MSBinary = BINARY +MSVarBinary = VARBINARY +MSNChar = NCHAR +MSNVarChar = NVARCHAR +MSChar = CHAR +MSString = VARCHAR +MSLongText = LONGTEXT +MSMediumText = MEDIUMTEXT +MSTinyText = TINYTEXT +MSText = TEXT +MSYear = YEAR +MSTimeStamp = TIMESTAMP +MSBit = BIT +MSSmallInteger = SMALLINT +MSTinyInteger = TINYINT +MSMediumInteger = MEDIUMINT +MSBigInteger = BIGINT +MSNumeric = NUMERIC +MSDecimal = DECIMAL +MSDouble = DOUBLE +MSReal = REAL +MSFloat = FLOAT +MSInteger = INTEGER + +colspecs = { + _IntegerType: _IntegerType, + _NumericType: _NumericType, + _FloatType: _FloatType, + sqltypes.Numeric: NUMERIC, + sqltypes.Float: FLOAT, + sqltypes.Double: DOUBLE, + sqltypes.Time: TIME, + sqltypes.Enum: ENUM, + sqltypes.MatchType: _MatchType, + sqltypes.JSON: JSON, + sqltypes.JSON.JSONIndexType: JSONIndexType, + sqltypes.JSON.JSONPathType: JSONPathType, +} + +# Everything 3.23 through 5.1 excepting OpenGIS types. +ischema_names = { + "bigint": BIGINT, + "binary": BINARY, + "bit": BIT, + "blob": BLOB, + "boolean": BOOLEAN, + "char": CHAR, + "date": DATE, + "datetime": DATETIME, + "decimal": DECIMAL, + "double": DOUBLE, + "enum": ENUM, + "fixed": DECIMAL, + "float": FLOAT, + "int": INTEGER, + "integer": INTEGER, + "json": JSON, + "longblob": LONGBLOB, + "longtext": LONGTEXT, + "mediumblob": MEDIUMBLOB, + "mediumint": MEDIUMINT, + "mediumtext": MEDIUMTEXT, + "nchar": NCHAR, + "nvarchar": NVARCHAR, + "numeric": NUMERIC, + "set": SET, + "smallint": SMALLINT, + "text": TEXT, + "time": TIME, + "timestamp": TIMESTAMP, + "tinyblob": TINYBLOB, + "tinyint": TINYINT, + "tinytext": TINYTEXT, + "uuid": UUID, + "varbinary": VARBINARY, + "varchar": VARCHAR, + "year": YEAR, +} + + +class MySQLExecutionContext(default.DefaultExecutionContext): + def post_exec(self): + if ( + self.isdelete + and cast(SQLCompiler, self.compiled).effective_returning + and not self.cursor.description + ): + # All MySQL/mariadb drivers appear to not include + # cursor.description for DELETE..RETURNING with no rows if the + # WHERE criteria is a straight "false" condition such as our EMPTY + # IN condition. manufacture an empty result in this case (issue + # #10505) + # + # taken from cx_Oracle implementation + self.cursor_fetch_strategy = ( + _cursor.FullyBufferedCursorFetchStrategy( + self.cursor, + [ + (entry.keyname, None) + for entry in cast( + SQLCompiler, self.compiled + )._result_columns + ], + [], + ) + ) + + def create_server_side_cursor(self): + if self.dialect.supports_server_side_cursors: + return self._dbapi_connection.cursor(self.dialect._sscursor) + else: + raise NotImplementedError() + + def fire_sequence(self, seq, type_): + return self._execute_scalar( + ( + "select nextval(%s)" + % self.identifier_preparer.format_sequence(seq) + ), + type_, + ) + + +class MySQLCompiler(compiler.SQLCompiler): + render_table_with_column_in_update_from = True + """Overridden from base SQLCompiler value""" + + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update({"milliseconds": "millisecond"}) + + def default_from(self): + """Called when a ``SELECT`` statement has no froms, + and no ``FROM`` clause is to be appended. + + """ + if self.stack: + stmt = self.stack[-1]["selectable"] + if stmt._where_criteria: + return " FROM DUAL" + + return "" + + def visit_random_func(self, fn, **kw): + return "rand%s" % self.function_argspec(fn) + + def visit_rollup_func(self, fn, **kw): + clause = ", ".join( + elem._compiler_dispatch(self, **kw) for elem in fn.clauses + ) + return f"{clause} WITH ROLLUP" + + def visit_aggregate_strings_func(self, fn, **kw): + expr, delimeter = ( + elem._compiler_dispatch(self, **kw) for elem in fn.clauses + ) + return f"group_concat({expr} SEPARATOR {delimeter})" + + def visit_sequence(self, seq, **kw): + return "nextval(%s)" % self.preparer.format_sequence(seq) + + def visit_sysdate_func(self, fn, **kw): + return "SYSDATE()" + + def _render_json_extract_from_binary(self, binary, operator, **kw): + # note we are intentionally calling upon the process() calls in the + # order in which they appear in the SQL String as this is used + # by positional parameter rendering + + if binary.type._type_affinity is sqltypes.JSON: + return "JSON_EXTRACT(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + # for non-JSON, MySQL doesn't handle JSON null at all so it has to + # be explicit + case_expression = "CASE JSON_EXTRACT(%s, %s) WHEN 'null' THEN NULL" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + if binary.type._type_affinity is sqltypes.Integer: + type_expression = ( + "ELSE CAST(JSON_EXTRACT(%s, %s) AS SIGNED INTEGER)" + % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + ) + elif binary.type._type_affinity is sqltypes.Numeric: + if ( + binary.type.scale is not None + and binary.type.precision is not None + ): + # using DECIMAL here because MySQL does not recognize NUMERIC + type_expression = ( + "ELSE CAST(JSON_EXTRACT(%s, %s) AS DECIMAL(%s, %s))" + % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + binary.type.precision, + binary.type.scale, + ) + ) + else: + # FLOAT / REAL not added in MySQL til 8.0.17 + type_expression = ( + "ELSE JSON_EXTRACT(%s, %s)+0.0000000000000000000000" + % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + ) + elif binary.type._type_affinity is sqltypes.Boolean: + # the NULL handling is particularly weird with boolean, so + # explicitly return true/false constants + type_expression = "WHEN true THEN true ELSE false" + elif binary.type._type_affinity is sqltypes.String: + # (gord): this fails with a JSON value that's a four byte unicode + # string. SQLite has the same problem at the moment + # (zzzeek): I'm not really sure. let's take a look at a test case + # that hits each backend and maybe make a requires rule for it? + type_expression = "ELSE JSON_UNQUOTE(JSON_EXTRACT(%s, %s))" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + else: + # other affinity....this is not expected right now + type_expression = "ELSE JSON_EXTRACT(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + return case_expression + " " + type_expression + " END" + + def visit_json_getitem_op_binary(self, binary, operator, **kw): + return self._render_json_extract_from_binary(binary, operator, **kw) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + return self._render_json_extract_from_binary(binary, operator, **kw) + + def visit_on_duplicate_key_update(self, on_duplicate, **kw): + statement = self.current_executable + + if on_duplicate._parameter_ordering: + parameter_ordering = [ + coercions.expect(roles.DMLColumnRole, key) + for key in on_duplicate._parameter_ordering + ] + ordered_keys = set(parameter_ordering) + cols = [ + statement.table.c[key] + for key in parameter_ordering + if key in statement.table.c + ] + [c for c in statement.table.c if c.key not in ordered_keys] + else: + cols = statement.table.c + + clauses = [] + + requires_mysql8_alias = statement.select is None and ( + self.dialect._requires_alias_for_on_duplicate_key + ) + + if requires_mysql8_alias: + if statement.table.name.lower() == "new": + _on_dup_alias_name = "new_1" + else: + _on_dup_alias_name = "new" + + # traverses through all table columns to preserve table column order + for column in (col for col in cols if col.key in on_duplicate.update): + val = on_duplicate.update[column.key] + + if coercions._is_literal(val): + val = elements.BindParameter(None, val, type_=column.type) + value_text = self.process(val.self_group(), use_schema=False) + else: + + def replace(obj): + if ( + isinstance(obj, elements.BindParameter) + and obj.type._isnull + ): + obj = obj._clone() + obj.type = column.type + return obj + elif ( + isinstance(obj, elements.ColumnClause) + and obj.table is on_duplicate.inserted_alias + ): + if requires_mysql8_alias: + column_literal_clause = ( + f"{_on_dup_alias_name}." + f"{self.preparer.quote(obj.name)}" + ) + else: + column_literal_clause = ( + f"VALUES({self.preparer.quote(obj.name)})" + ) + return literal_column(column_literal_clause) + else: + # element is not replaced + return None + + val = visitors.replacement_traverse(val, {}, replace) + value_text = self.process(val.self_group(), use_schema=False) + + name_text = self.preparer.quote(column.name) + clauses.append("%s = %s" % (name_text, value_text)) + + non_matching = set(on_duplicate.update) - {c.key for c in cols} + if non_matching: + util.warn( + "Additional column names not matching " + "any column keys in table '%s': %s" + % ( + self.statement.table.name, + (", ".join("'%s'" % c for c in non_matching)), + ) + ) + + if requires_mysql8_alias: + return ( + f"AS {_on_dup_alias_name} " + f"ON DUPLICATE KEY UPDATE {', '.join(clauses)}" + ) + else: + return f"ON DUPLICATE KEY UPDATE {', '.join(clauses)}" + + def visit_concat_op_expression_clauselist( + self, clauselist, operator, **kw + ): + return "concat(%s)" % ( + ", ".join(self.process(elem, **kw) for elem in clauselist.clauses) + ) + + def visit_concat_op_binary(self, binary, operator, **kw): + return "concat(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + _match_valid_flag_combinations = frozenset( + ( + # (boolean_mode, natural_language, query_expansion) + (False, False, False), + (True, False, False), + (False, True, False), + (False, False, True), + (False, True, True), + ) + ) + + _match_flag_expressions = ( + "IN BOOLEAN MODE", + "IN NATURAL LANGUAGE MODE", + "WITH QUERY EXPANSION", + ) + + def visit_mysql_match(self, element, **kw): + return self.visit_match_op_binary(element, element.operator, **kw) + + def visit_match_op_binary(self, binary, operator, **kw): + """ + Note that `mysql_boolean_mode` is enabled by default because of + backward compatibility + """ + + modifiers = binary.modifiers + + boolean_mode = modifiers.get("mysql_boolean_mode", True) + natural_language = modifiers.get("mysql_natural_language", False) + query_expansion = modifiers.get("mysql_query_expansion", False) + + flag_combination = (boolean_mode, natural_language, query_expansion) + + if flag_combination not in self._match_valid_flag_combinations: + flags = ( + "in_boolean_mode=%s" % boolean_mode, + "in_natural_language_mode=%s" % natural_language, + "with_query_expansion=%s" % query_expansion, + ) + + flags = ", ".join(flags) + + raise exc.CompileError("Invalid MySQL match flags: %s" % flags) + + match_clause = binary.left + match_clause = self.process(match_clause, **kw) + against_clause = self.process(binary.right, **kw) + + if any(flag_combination): + flag_expressions = compress( + self._match_flag_expressions, + flag_combination, + ) + + against_clause = [against_clause] + against_clause.extend(flag_expressions) + + against_clause = " ".join(against_clause) + + return "MATCH (%s) AGAINST (%s)" % (match_clause, against_clause) + + def get_from_hint_text(self, table, text): + return text + + def visit_typeclause(self, typeclause, type_=None, **kw): + if type_ is None: + type_ = typeclause.type.dialect_impl(self.dialect) + if isinstance(type_, sqltypes.TypeDecorator): + return self.visit_typeclause(typeclause, type_.impl, **kw) + elif isinstance(type_, sqltypes.Integer): + if getattr(type_, "unsigned", False): + return "UNSIGNED INTEGER" + else: + return "SIGNED INTEGER" + elif isinstance(type_, sqltypes.TIMESTAMP): + return "DATETIME" + elif isinstance( + type_, + ( + sqltypes.DECIMAL, + sqltypes.DateTime, + sqltypes.Date, + sqltypes.Time, + ), + ): + return self.dialect.type_compiler_instance.process(type_) + elif isinstance(type_, sqltypes.String) and not isinstance( + type_, (ENUM, SET) + ): + adapted = CHAR._adapt_string_for_cast(type_) + return self.dialect.type_compiler_instance.process(adapted) + elif isinstance(type_, sqltypes._Binary): + return "BINARY" + elif isinstance(type_, sqltypes.JSON): + return "JSON" + elif isinstance(type_, sqltypes.NUMERIC): + return self.dialect.type_compiler_instance.process(type_).replace( + "NUMERIC", "DECIMAL" + ) + elif ( + isinstance(type_, sqltypes.Float) + and self.dialect._support_float_cast + ): + return self.dialect.type_compiler_instance.process(type_) + else: + return None + + def visit_cast(self, cast, **kw): + type_ = self.process(cast.typeclause) + if type_ is None: + util.warn( + "Datatype %s does not support CAST on MySQL/MariaDb; " + "the CAST will be skipped." + % self.dialect.type_compiler_instance.process( + cast.typeclause.type + ) + ) + return self.process(cast.clause.self_group(), **kw) + + return "CAST(%s AS %s)" % (self.process(cast.clause, **kw), type_) + + def render_literal_value(self, value, type_): + value = super().render_literal_value(value, type_) + if self.dialect._backslash_escapes: + value = value.replace("\\", "\\\\") + return value + + # override native_boolean=False behavior here, as + # MySQL still supports native boolean + def visit_true(self, element, **kw): + return "true" + + def visit_false(self, element, **kw): + return "false" + + def get_select_precolumns(self, select, **kw): + """Add special MySQL keywords in place of DISTINCT. + + .. deprecated:: 1.4 This usage is deprecated. + :meth:`_expression.Select.prefix_with` should be used for special + keywords at the start of a SELECT. + + """ + if isinstance(select._distinct, str): + util.warn_deprecated( + "Sending string values for 'distinct' is deprecated in the " + "MySQL dialect and will be removed in a future release. " + "Please use :meth:`.Select.prefix_with` for special keywords " + "at the start of a SELECT statement", + version="1.4", + ) + return select._distinct.upper() + " " + + return super().get_select_precolumns(select, **kw) + + def visit_join(self, join, asfrom=False, from_linter=None, **kwargs): + if from_linter: + from_linter.edges.add((join.left, join.right)) + + if join.full: + join_type = " FULL OUTER JOIN " + elif join.isouter: + join_type = " LEFT OUTER JOIN " + else: + join_type = " INNER JOIN " + + return "".join( + ( + self.process( + join.left, asfrom=True, from_linter=from_linter, **kwargs + ), + join_type, + self.process( + join.right, asfrom=True, from_linter=from_linter, **kwargs + ), + " ON ", + self.process(join.onclause, from_linter=from_linter, **kwargs), + ) + ) + + def for_update_clause(self, select, **kw): + if select._for_update_arg.read: + tmp = " LOCK IN SHARE MODE" + else: + tmp = " FOR UPDATE" + + if select._for_update_arg.of and self.dialect.supports_for_update_of: + tables = util.OrderedSet() + for c in select._for_update_arg.of: + tables.update(sql_util.surface_selectables_only(c)) + + tmp += " OF " + ", ".join( + self.process(table, ashint=True, use_schema=False, **kw) + for table in tables + ) + + if select._for_update_arg.nowait: + tmp += " NOWAIT" + + if select._for_update_arg.skip_locked: + tmp += " SKIP LOCKED" + + return tmp + + def limit_clause(self, select, **kw): + # MySQL supports: + # LIMIT + # LIMIT , + # and in server versions > 3.3: + # LIMIT OFFSET + # The latter is more readable for offsets but we're stuck with the + # former until we can refine dialects by server revision. + + limit_clause, offset_clause = ( + select._limit_clause, + select._offset_clause, + ) + + if limit_clause is None and offset_clause is None: + return "" + elif offset_clause is not None: + # As suggested by the MySQL docs, need to apply an + # artificial limit if one wasn't provided + # https://dev.mysql.com/doc/refman/5.0/en/select.html + if limit_clause is None: + # TODO: remove ?? + # hardwire the upper limit. Currently + # needed consistent with the usage of the upper + # bound as part of MySQL's "syntax" for OFFSET with + # no LIMIT. + return " \n LIMIT %s, %s" % ( + self.process(offset_clause, **kw), + "18446744073709551615", + ) + else: + return " \n LIMIT %s, %s" % ( + self.process(offset_clause, **kw), + self.process(limit_clause, **kw), + ) + else: + # No offset provided, so just use the limit + return " \n LIMIT %s" % (self.process(limit_clause, **kw),) + + def update_limit_clause(self, update_stmt): + limit = update_stmt.kwargs.get("%s_limit" % self.dialect.name, None) + if limit: + return "LIMIT %s" % limit + else: + return None + + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): + kw["asfrom"] = True + return ", ".join( + t._compiler_dispatch(self, **kw) + for t in [from_table] + list(extra_froms) + ) + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + return None + + def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): + """If we have extra froms make sure we render any alias as hint.""" + ashint = False + if extra_froms: + ashint = True + return from_table._compiler_dispatch( + self, asfrom=True, iscrud=True, ashint=ashint, **kw + ) + + def delete_extra_from_clause( + self, delete_stmt, from_table, extra_froms, from_hints, **kw + ): + """Render the DELETE .. USING clause specific to MySQL.""" + kw["asfrom"] = True + return "USING " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in [from_table] + extra_froms + ) + + def visit_empty_set_expr(self, element_types, **kw): + return ( + "SELECT %(outer)s FROM (SELECT %(inner)s) " + "as _empty_set WHERE 1!=1" + % { + "inner": ", ".join( + "1 AS _in_%s" % idx + for idx, type_ in enumerate(element_types) + ), + "outer": ", ".join( + "_in_%s" % idx for idx, type_ in enumerate(element_types) + ), + } + ) + + def visit_is_distinct_from_binary(self, binary, operator, **kw): + return "NOT (%s <=> %s)" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): + return "%s <=> %s" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def _mariadb_regexp_flags(self, flags, pattern, **kw): + return "CONCAT('(?', %s, ')', %s)" % ( + self.render_literal_value(flags, sqltypes.STRINGTYPE), + self.process(pattern, **kw), + ) + + def _regexp_match(self, op_string, binary, operator, **kw): + flags = binary.modifiers["flags"] + if flags is None: + return self._generate_generic_binary(binary, op_string, **kw) + elif self.dialect.is_mariadb: + return "%s%s%s" % ( + self.process(binary.left, **kw), + op_string, + self._mariadb_regexp_flags(flags, binary.right), + ) + else: + text = "REGEXP_LIKE(%s, %s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + self.render_literal_value(flags, sqltypes.STRINGTYPE), + ) + if op_string == " NOT REGEXP ": + return "NOT %s" % text + else: + return text + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + return self._regexp_match(" REGEXP ", binary, operator, **kw) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + return self._regexp_match(" NOT REGEXP ", binary, operator, **kw) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + flags = binary.modifiers["flags"] + if flags is None: + return "REGEXP_REPLACE(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + elif self.dialect.is_mariadb: + return "REGEXP_REPLACE(%s, %s, %s)" % ( + self.process(binary.left, **kw), + self._mariadb_regexp_flags(flags, binary.right.clauses[0]), + self.process(binary.right.clauses[1], **kw), + ) + else: + return "REGEXP_REPLACE(%s, %s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + self.render_literal_value(flags, sqltypes.STRINGTYPE), + ) + + +class MySQLDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kw): + """Builds column DDL.""" + if ( + self.dialect.is_mariadb is True + and column.computed is not None + and column._user_defined_nullable is SchemaConst.NULL_UNSPECIFIED + ): + column.nullable = True + colspec = [ + self.preparer.format_column(column), + self.dialect.type_compiler_instance.process( + column.type, type_expression=column + ), + ] + + if column.computed is not None: + colspec.append(self.process(column.computed)) + + is_timestamp = isinstance( + column.type._unwrapped_dialect_impl(self.dialect), + sqltypes.TIMESTAMP, + ) + + if not column.nullable: + colspec.append("NOT NULL") + + # see: https://docs.sqlalchemy.org/en/latest/dialects/mysql.html#mysql_timestamp_null # noqa + elif column.nullable and is_timestamp: + colspec.append("NULL") + + comment = column.comment + if comment is not None: + literal = self.sql_compiler.render_literal_value( + comment, sqltypes.String() + ) + colspec.append("COMMENT " + literal) + + if ( + column.table is not None + and column is column.table._autoincrement_column + and ( + column.server_default is None + or isinstance(column.server_default, sa_schema.Identity) + ) + and not ( + self.dialect.supports_sequences + and isinstance(column.default, sa_schema.Sequence) + and not column.default.optional + ) + ): + colspec.append("AUTO_INCREMENT") + else: + default = self.get_column_default_string(column) + if default is not None: + colspec.append("DEFAULT " + default) + return " ".join(colspec) + + def post_create_table(self, table): + """Build table-level CREATE options like ENGINE and COLLATE.""" + + table_opts = [] + + opts = { + k[len(self.dialect.name) + 1 :].upper(): v + for k, v in table.kwargs.items() + if k.startswith("%s_" % self.dialect.name) + } + + if table.comment is not None: + opts["COMMENT"] = table.comment + + partition_options = [ + "PARTITION_BY", + "PARTITIONS", + "SUBPARTITIONS", + "SUBPARTITION_BY", + ] + + nonpart_options = set(opts).difference(partition_options) + part_options = set(opts).intersection(partition_options) + + for opt in topological.sort( + [ + ("DEFAULT_CHARSET", "COLLATE"), + ("DEFAULT_CHARACTER_SET", "COLLATE"), + ("CHARSET", "COLLATE"), + ("CHARACTER_SET", "COLLATE"), + ], + nonpart_options, + ): + arg = opts[opt] + if opt in _reflection._options_of_type_string: + arg = self.sql_compiler.render_literal_value( + arg, sqltypes.String() + ) + + if opt in ( + "DATA_DIRECTORY", + "INDEX_DIRECTORY", + "DEFAULT_CHARACTER_SET", + "CHARACTER_SET", + "DEFAULT_CHARSET", + "DEFAULT_COLLATE", + ): + opt = opt.replace("_", " ") + + joiner = "=" + if opt in ( + "TABLESPACE", + "DEFAULT CHARACTER SET", + "CHARACTER SET", + "COLLATE", + ): + joiner = " " + + table_opts.append(joiner.join((opt, arg))) + + for opt in topological.sort( + [ + ("PARTITION_BY", "PARTITIONS"), + ("PARTITION_BY", "SUBPARTITION_BY"), + ("PARTITION_BY", "SUBPARTITIONS"), + ("PARTITIONS", "SUBPARTITIONS"), + ("PARTITIONS", "SUBPARTITION_BY"), + ("SUBPARTITION_BY", "SUBPARTITIONS"), + ], + part_options, + ): + arg = opts[opt] + if opt in _reflection._options_of_type_string: + arg = self.sql_compiler.render_literal_value( + arg, sqltypes.String() + ) + + opt = opt.replace("_", " ") + joiner = " " + + table_opts.append(joiner.join((opt, arg))) + + return " ".join(table_opts) + + def visit_create_index(self, create, **kw): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + table = preparer.format_table(index.table) + + columns = [ + self.sql_compiler.process( + ( + elements.Grouping(expr) + if ( + isinstance(expr, elements.BinaryExpression) + or ( + isinstance(expr, elements.UnaryExpression) + and expr.modifier + not in (operators.desc_op, operators.asc_op) + ) + or isinstance(expr, functions.FunctionElement) + ) + else expr + ), + include_table=False, + literal_binds=True, + ) + for expr in index.expressions + ] + + name = self._prepared_index_name(index) + + text = "CREATE " + if index.unique: + text += "UNIQUE " + + index_prefix = index.kwargs.get("%s_prefix" % self.dialect.name, None) + if index_prefix: + text += index_prefix + " " + + text += "INDEX " + if create.if_not_exists: + text += "IF NOT EXISTS " + text += "%s ON %s " % (name, table) + + length = index.dialect_options[self.dialect.name]["length"] + if length is not None: + if isinstance(length, dict): + # length value can be a (column_name --> integer value) + # mapping specifying the prefix length for each column of the + # index + columns = ", ".join( + ( + "%s(%d)" % (expr, length[col.name]) + if col.name in length + else ( + "%s(%d)" % (expr, length[expr]) + if expr in length + else "%s" % expr + ) + ) + for col, expr in zip(index.expressions, columns) + ) + else: + # or can be an integer value specifying the same + # prefix length for all columns of the index + columns = ", ".join( + "%s(%d)" % (col, length) for col in columns + ) + else: + columns = ", ".join(columns) + text += "(%s)" % columns + + parser = index.dialect_options["mysql"]["with_parser"] + if parser is not None: + text += " WITH PARSER %s" % (parser,) + + using = index.dialect_options["mysql"]["using"] + if using is not None: + text += " USING %s" % (preparer.quote(using)) + + return text + + def visit_primary_key_constraint(self, constraint, **kw): + text = super().visit_primary_key_constraint(constraint) + using = constraint.dialect_options["mysql"]["using"] + if using: + text += " USING %s" % (self.preparer.quote(using)) + return text + + def visit_drop_index(self, drop, **kw): + index = drop.element + text = "\nDROP INDEX " + if drop.if_exists: + text += "IF EXISTS " + + return text + "%s ON %s" % ( + self._prepared_index_name(index, include_schema=False), + self.preparer.format_table(index.table), + ) + + def visit_drop_constraint(self, drop, **kw): + constraint = drop.element + if isinstance(constraint, sa_schema.ForeignKeyConstraint): + qual = "FOREIGN KEY " + const = self.preparer.format_constraint(constraint) + elif isinstance(constraint, sa_schema.PrimaryKeyConstraint): + qual = "PRIMARY KEY " + const = "" + elif isinstance(constraint, sa_schema.UniqueConstraint): + qual = "INDEX " + const = self.preparer.format_constraint(constraint) + elif isinstance(constraint, sa_schema.CheckConstraint): + if self.dialect.is_mariadb: + qual = "CONSTRAINT " + else: + qual = "CHECK " + const = self.preparer.format_constraint(constraint) + else: + qual = "" + const = self.preparer.format_constraint(constraint) + return "ALTER TABLE %s DROP %s%s" % ( + self.preparer.format_table(constraint.table), + qual, + const, + ) + + def define_constraint_match(self, constraint): + if constraint.match is not None: + raise exc.CompileError( + "MySQL ignores the 'MATCH' keyword while at the same time " + "causes ON UPDATE/ON DELETE clauses to be ignored." + ) + return "" + + def visit_set_table_comment(self, create, **kw): + return "ALTER TABLE %s COMMENT %s" % ( + self.preparer.format_table(create.element), + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.String() + ), + ) + + def visit_drop_table_comment(self, create, **kw): + return "ALTER TABLE %s COMMENT ''" % ( + self.preparer.format_table(create.element) + ) + + def visit_set_column_comment(self, create, **kw): + return "ALTER TABLE %s CHANGE %s %s" % ( + self.preparer.format_table(create.element.table), + self.preparer.format_column(create.element), + self.get_column_specification(create.element), + ) + + +class MySQLTypeCompiler(compiler.GenericTypeCompiler): + def _extend_numeric(self, type_, spec): + "Extend a numeric-type declaration with MySQL specific extensions." + + if not self._mysql_type(type_): + return spec + + if type_.unsigned: + spec += " UNSIGNED" + if type_.zerofill: + spec += " ZEROFILL" + return spec + + def _extend_string(self, type_, defaults, spec): + """Extend a string-type declaration with standard SQL CHARACTER SET / + COLLATE annotations and MySQL specific extensions. + + """ + + def attr(name): + return getattr(type_, name, defaults.get(name)) + + if attr("charset"): + charset = "CHARACTER SET %s" % attr("charset") + elif attr("ascii"): + charset = "ASCII" + elif attr("unicode"): + charset = "UNICODE" + else: + charset = None + + if attr("collation"): + collation = "COLLATE %s" % type_.collation + elif attr("binary"): + collation = "BINARY" + else: + collation = None + + if attr("national"): + # NATIONAL (aka NCHAR/NVARCHAR) trumps charsets. + return " ".join( + [c for c in ("NATIONAL", spec, collation) if c is not None] + ) + return " ".join( + [c for c in (spec, charset, collation) if c is not None] + ) + + def _mysql_type(self, type_): + return isinstance(type_, (_StringType, _NumericType)) + + def visit_NUMERIC(self, type_, **kw): + if type_.precision is None: + return self._extend_numeric(type_, "NUMERIC") + elif type_.scale is None: + return self._extend_numeric( + type_, + "NUMERIC(%(precision)s)" % {"precision": type_.precision}, + ) + else: + return self._extend_numeric( + type_, + "NUMERIC(%(precision)s, %(scale)s)" + % {"precision": type_.precision, "scale": type_.scale}, + ) + + def visit_DECIMAL(self, type_, **kw): + if type_.precision is None: + return self._extend_numeric(type_, "DECIMAL") + elif type_.scale is None: + return self._extend_numeric( + type_, + "DECIMAL(%(precision)s)" % {"precision": type_.precision}, + ) + else: + return self._extend_numeric( + type_, + "DECIMAL(%(precision)s, %(scale)s)" + % {"precision": type_.precision, "scale": type_.scale}, + ) + + def visit_DOUBLE(self, type_, **kw): + if type_.precision is not None and type_.scale is not None: + return self._extend_numeric( + type_, + "DOUBLE(%(precision)s, %(scale)s)" + % {"precision": type_.precision, "scale": type_.scale}, + ) + else: + return self._extend_numeric(type_, "DOUBLE") + + def visit_REAL(self, type_, **kw): + if type_.precision is not None and type_.scale is not None: + return self._extend_numeric( + type_, + "REAL(%(precision)s, %(scale)s)" + % {"precision": type_.precision, "scale": type_.scale}, + ) + else: + return self._extend_numeric(type_, "REAL") + + def visit_FLOAT(self, type_, **kw): + if ( + self._mysql_type(type_) + and type_.scale is not None + and type_.precision is not None + ): + return self._extend_numeric( + type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale) + ) + elif type_.precision is not None: + return self._extend_numeric( + type_, "FLOAT(%s)" % (type_.precision,) + ) + else: + return self._extend_numeric(type_, "FLOAT") + + def visit_INTEGER(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, + "INTEGER(%(display_width)s)" + % {"display_width": type_.display_width}, + ) + else: + return self._extend_numeric(type_, "INTEGER") + + def visit_BIGINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, + "BIGINT(%(display_width)s)" + % {"display_width": type_.display_width}, + ) + else: + return self._extend_numeric(type_, "BIGINT") + + def visit_MEDIUMINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, + "MEDIUMINT(%(display_width)s)" + % {"display_width": type_.display_width}, + ) + else: + return self._extend_numeric(type_, "MEDIUMINT") + + def visit_TINYINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, "TINYINT(%s)" % type_.display_width + ) + else: + return self._extend_numeric(type_, "TINYINT") + + def visit_SMALLINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, + "SMALLINT(%(display_width)s)" + % {"display_width": type_.display_width}, + ) + else: + return self._extend_numeric(type_, "SMALLINT") + + def visit_BIT(self, type_, **kw): + if type_.length is not None: + return "BIT(%s)" % type_.length + else: + return "BIT" + + def visit_DATETIME(self, type_, **kw): + if getattr(type_, "fsp", None): + return "DATETIME(%d)" % type_.fsp + else: + return "DATETIME" + + def visit_DATE(self, type_, **kw): + return "DATE" + + def visit_TIME(self, type_, **kw): + if getattr(type_, "fsp", None): + return "TIME(%d)" % type_.fsp + else: + return "TIME" + + def visit_TIMESTAMP(self, type_, **kw): + if getattr(type_, "fsp", None): + return "TIMESTAMP(%d)" % type_.fsp + else: + return "TIMESTAMP" + + def visit_YEAR(self, type_, **kw): + if type_.display_width is None: + return "YEAR" + else: + return "YEAR(%s)" % type_.display_width + + def visit_TEXT(self, type_, **kw): + if type_.length is not None: + return self._extend_string(type_, {}, "TEXT(%d)" % type_.length) + else: + return self._extend_string(type_, {}, "TEXT") + + def visit_TINYTEXT(self, type_, **kw): + return self._extend_string(type_, {}, "TINYTEXT") + + def visit_MEDIUMTEXT(self, type_, **kw): + return self._extend_string(type_, {}, "MEDIUMTEXT") + + def visit_LONGTEXT(self, type_, **kw): + return self._extend_string(type_, {}, "LONGTEXT") + + def visit_VARCHAR(self, type_, **kw): + if type_.length is not None: + return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length) + else: + raise exc.CompileError( + "VARCHAR requires a length on dialect %s" % self.dialect.name + ) + + def visit_CHAR(self, type_, **kw): + if type_.length is not None: + return self._extend_string( + type_, {}, "CHAR(%(length)s)" % {"length": type_.length} + ) + else: + return self._extend_string(type_, {}, "CHAR") + + def visit_NVARCHAR(self, type_, **kw): + # We'll actually generate the equiv. "NATIONAL VARCHAR" instead + # of "NVARCHAR". + if type_.length is not None: + return self._extend_string( + type_, + {"national": True}, + "VARCHAR(%(length)s)" % {"length": type_.length}, + ) + else: + raise exc.CompileError( + "NVARCHAR requires a length on dialect %s" % self.dialect.name + ) + + def visit_NCHAR(self, type_, **kw): + # We'll actually generate the equiv. + # "NATIONAL CHAR" instead of "NCHAR". + if type_.length is not None: + return self._extend_string( + type_, + {"national": True}, + "CHAR(%(length)s)" % {"length": type_.length}, + ) + else: + return self._extend_string(type_, {"national": True}, "CHAR") + + def visit_UUID(self, type_, **kw): + return "UUID" + + def visit_VARBINARY(self, type_, **kw): + return "VARBINARY(%d)" % type_.length + + def visit_JSON(self, type_, **kw): + return "JSON" + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_) + + def visit_enum(self, type_, **kw): + if not type_.native_enum: + return super().visit_enum(type_) + else: + return self._visit_enumerated_values("ENUM", type_, type_.enums) + + def visit_BLOB(self, type_, **kw): + if type_.length is not None: + return "BLOB(%d)" % type_.length + else: + return "BLOB" + + def visit_TINYBLOB(self, type_, **kw): + return "TINYBLOB" + + def visit_MEDIUMBLOB(self, type_, **kw): + return "MEDIUMBLOB" + + def visit_LONGBLOB(self, type_, **kw): + return "LONGBLOB" + + def _visit_enumerated_values(self, name, type_, enumerated_values): + quoted_enums = [] + for e in enumerated_values: + if self.dialect.identifier_preparer._double_percents: + e = e.replace("%", "%%") + quoted_enums.append("'%s'" % e.replace("'", "''")) + return self._extend_string( + type_, {}, "%s(%s)" % (name, ",".join(quoted_enums)) + ) + + def visit_ENUM(self, type_, **kw): + return self._visit_enumerated_values("ENUM", type_, type_.enums) + + def visit_SET(self, type_, **kw): + return self._visit_enumerated_values("SET", type_, type_.values) + + def visit_BOOLEAN(self, type_, **kw): + return "BOOL" + + +class MySQLIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = RESERVED_WORDS_MYSQL + + def __init__(self, dialect, server_ansiquotes=False, **kw): + if not server_ansiquotes: + quote = "`" + else: + quote = '"' + + super().__init__(dialect, initial_quote=quote, escape_quote=quote) + + def _quote_free_identifiers(self, *ids): + """Unilaterally identifier-quote any number of strings.""" + + return tuple([self.quote_identifier(i) for i in ids if i is not None]) + + +class MariaDBIdentifierPreparer(MySQLIdentifierPreparer): + reserved_words = RESERVED_WORDS_MARIADB + + +@log.class_logger +class MySQLDialect(default.DefaultDialect): + """Details of the MySQL dialect. + Not used directly in application code. + """ + + name = "mysql" + supports_statement_cache = True + + supports_alter = True + + # MySQL has no true "boolean" type; we + # allow for the "true" and "false" keywords, however + supports_native_boolean = False + + # identifiers are 64, however aliases can be 255... + max_identifier_length = 255 + max_index_name_length = 64 + max_constraint_name_length = 64 + + div_is_floordiv = False + + supports_native_enum = True + + returns_native_bytes = True + + supports_sequences = False # default for MySQL ... + # ... may be updated to True for MariaDB 10.3+ in initialize() + + sequences_optional = False + + supports_for_update_of = False # default for MySQL ... + # ... may be updated to True for MySQL 8+ in initialize() + + _requires_alias_for_on_duplicate_key = False # Only available ... + # ... in MySQL 8+ + + # MySQL doesn't support "DEFAULT VALUES" but *does* support + # "VALUES (DEFAULT)" + supports_default_values = False + supports_default_metavalue = True + + use_insertmanyvalues: bool = True + insertmanyvalues_implicit_sentinel = ( + InsertmanyvaluesSentinelOpts.ANY_AUTOINCREMENT + ) + + supports_sane_rowcount = True + supports_sane_multi_rowcount = False + supports_multivalues_insert = True + insert_null_pk_still_autoincrements = True + + supports_comments = True + inline_comments = True + default_paramstyle = "format" + colspecs = colspecs + + cte_follows_insert = True + + statement_compiler = MySQLCompiler + ddl_compiler = MySQLDDLCompiler + type_compiler_cls = MySQLTypeCompiler + ischema_names = ischema_names + preparer = MySQLIdentifierPreparer + + is_mariadb = False + _mariadb_normalized_version_info = None + + # default SQL compilation settings - + # these are modified upon initialize(), + # i.e. first connect + _backslash_escapes = True + _server_ansiquotes = False + + construct_arguments = [ + (sa_schema.Table, {"*": None}), + (sql.Update, {"limit": None}), + (sa_schema.PrimaryKeyConstraint, {"using": None}), + ( + sa_schema.Index, + { + "using": None, + "length": None, + "prefix": None, + "with_parser": None, + }, + ), + ] + + def __init__( + self, + json_serializer=None, + json_deserializer=None, + is_mariadb=None, + **kwargs, + ): + kwargs.pop("use_ansiquotes", None) # legacy + default.DefaultDialect.__init__(self, **kwargs) + self._json_serializer = json_serializer + self._json_deserializer = json_deserializer + self._set_mariadb(is_mariadb, None) + + def get_isolation_level_values(self, dbapi_conn): + return ( + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + ) + + def set_isolation_level(self, dbapi_connection, level): + cursor = dbapi_connection.cursor() + cursor.execute(f"SET SESSION TRANSACTION ISOLATION LEVEL {level}") + cursor.execute("COMMIT") + cursor.close() + + def get_isolation_level(self, dbapi_connection): + cursor = dbapi_connection.cursor() + if self._is_mysql and self.server_version_info >= (5, 7, 20): + cursor.execute("SELECT @@transaction_isolation") + else: + cursor.execute("SELECT @@tx_isolation") + row = cursor.fetchone() + if row is None: + util.warn( + "Could not retrieve transaction isolation level for MySQL " + "connection." + ) + raise NotImplementedError() + val = row[0] + cursor.close() + if isinstance(val, bytes): + val = val.decode() + return val.upper().replace("-", " ") + + @classmethod + def _is_mariadb_from_url(cls, url): + dbapi = cls.import_dbapi() + dialect = cls(dbapi=dbapi) + + cargs, cparams = dialect.create_connect_args(url) + conn = dialect.connect(*cargs, **cparams) + try: + cursor = conn.cursor() + cursor.execute("SELECT VERSION() LIKE '%MariaDB%'") + val = cursor.fetchone()[0] + except: + raise + else: + return bool(val) + finally: + conn.close() + + def _get_server_version_info(self, connection): + # get database server version info explicitly over the wire + # to avoid proxy servers like MaxScale getting in the + # way with their own values, see #4205 + dbapi_con = connection.connection + cursor = dbapi_con.cursor() + cursor.execute("SELECT VERSION()") + val = cursor.fetchone()[0] + cursor.close() + if isinstance(val, bytes): + val = val.decode() + + return self._parse_server_version(val) + + def _parse_server_version(self, val): + version = [] + is_mariadb = False + + r = re.compile(r"[.\-+]") + tokens = r.split(val) + for token in tokens: + parsed_token = re.match( + r"^(?:(\d+)(?:a|b|c)?|(MariaDB\w*))$", token + ) + if not parsed_token: + continue + elif parsed_token.group(2): + self._mariadb_normalized_version_info = tuple(version[-3:]) + is_mariadb = True + else: + digit = int(parsed_token.group(1)) + version.append(digit) + + server_version_info = tuple(version) + + self._set_mariadb( + server_version_info and is_mariadb, server_version_info + ) + + if not is_mariadb: + self._mariadb_normalized_version_info = server_version_info + + if server_version_info < (5, 0, 2): + raise NotImplementedError( + "the MySQL/MariaDB dialect supports server " + "version info 5.0.2 and above." + ) + + # setting it here to help w the test suite + self.server_version_info = server_version_info + return server_version_info + + def _set_mariadb(self, is_mariadb, server_version_info): + if is_mariadb is None: + return + + if not is_mariadb and self.is_mariadb: + raise exc.InvalidRequestError( + "MySQL version %s is not a MariaDB variant." + % (".".join(map(str, server_version_info)),) + ) + if is_mariadb: + self.preparer = MariaDBIdentifierPreparer + # this would have been set by the default dialect already, + # so set it again + self.identifier_preparer = self.preparer(self) + + # this will be updated on first connect in initialize() + # if using older mariadb version + self.delete_returning = True + self.insert_returning = True + + self.is_mariadb = is_mariadb + + def do_begin_twophase(self, connection, xid): + connection.execute(sql.text("XA BEGIN :xid"), dict(xid=xid)) + + def do_prepare_twophase(self, connection, xid): + connection.execute(sql.text("XA END :xid"), dict(xid=xid)) + connection.execute(sql.text("XA PREPARE :xid"), dict(xid=xid)) + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if not is_prepared: + connection.execute(sql.text("XA END :xid"), dict(xid=xid)) + connection.execute(sql.text("XA ROLLBACK :xid"), dict(xid=xid)) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if not is_prepared: + self.do_prepare_twophase(connection, xid) + connection.execute(sql.text("XA COMMIT :xid"), dict(xid=xid)) + + def do_recover_twophase(self, connection): + resultset = connection.exec_driver_sql("XA RECOVER") + return [row["data"][0 : row["gtrid_length"]] for row in resultset] + + def is_disconnect(self, e, connection, cursor): + if isinstance( + e, + ( + self.dbapi.OperationalError, + self.dbapi.ProgrammingError, + self.dbapi.InterfaceError, + ), + ) and self._extract_error_code(e) in ( + 1927, + 2006, + 2013, + 2014, + 2045, + 2055, + 4031, + ): + return True + elif isinstance( + e, (self.dbapi.InterfaceError, self.dbapi.InternalError) + ): + # if underlying connection is closed, + # this is the error you get + return "(0, '')" in str(e) + else: + return False + + def _compat_fetchall(self, rp, charset=None): + """Proxy result rows to smooth over MySQL-Python driver + inconsistencies.""" + + return [_DecodingRow(row, charset) for row in rp.fetchall()] + + def _compat_fetchone(self, rp, charset=None): + """Proxy a result row to smooth over MySQL-Python driver + inconsistencies.""" + + row = rp.fetchone() + if row: + return _DecodingRow(row, charset) + else: + return None + + def _compat_first(self, rp, charset=None): + """Proxy a result row to smooth over MySQL-Python driver + inconsistencies.""" + + row = rp.first() + if row: + return _DecodingRow(row, charset) + else: + return None + + def _extract_error_code(self, exception): + raise NotImplementedError() + + def _get_default_schema_name(self, connection): + return connection.exec_driver_sql("SELECT DATABASE()").scalar() + + @reflection.cache + def has_table(self, connection, table_name, schema=None, **kw): + self._ensure_has_table_connection(connection) + + if schema is None: + schema = self.default_schema_name + + assert schema is not None + + full_name = ".".join( + self.identifier_preparer._quote_free_identifiers( + schema, table_name + ) + ) + + # DESCRIBE *must* be used because there is no information schema + # table that returns information on temp tables that is consistently + # available on MariaDB / MySQL / engine-agnostic etc. + # therefore we have no choice but to use DESCRIBE and an error catch + # to detect "False". See issue #9058 + + try: + with connection.exec_driver_sql( + f"DESCRIBE {full_name}", + execution_options={"skip_user_error_events": True}, + ) as rs: + return rs.fetchone() is not None + except exc.DBAPIError as e: + # https://dev.mysql.com/doc/mysql-errors/8.0/en/server-error-reference.html # noqa: E501 + # there are a lot of codes that *may* pop up here at some point + # but we continue to be fairly conservative. We include: + # 1146: Table '%s.%s' doesn't exist - what every MySQL has emitted + # for decades + # + # mysql 8 suddenly started emitting: + # 1049: Unknown database '%s' - for nonexistent schema + # + # also added: + # 1051: Unknown table '%s' - not known to emit + # + # there's more "doesn't exist" kinds of messages but they are + # less clear if mysql 8 would suddenly start using one of those + if self._extract_error_code(e.orig) in (1146, 1049, 1051): + return False + raise + + @reflection.cache + def has_sequence(self, connection, sequence_name, schema=None, **kw): + if not self.supports_sequences: + self._sequences_not_supported() + if not schema: + schema = self.default_schema_name + # MariaDB implements sequences as a special type of table + # + cursor = connection.execute( + sql.text( + "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " + "WHERE TABLE_TYPE='SEQUENCE' and TABLE_NAME=:name AND " + "TABLE_SCHEMA=:schema_name" + ), + dict( + name=str(sequence_name), + schema_name=str(schema), + ), + ) + return cursor.first() is not None + + def _sequences_not_supported(self): + raise NotImplementedError( + "Sequences are supported only by the " + "MariaDB series 10.3 or greater" + ) + + @reflection.cache + def get_sequence_names(self, connection, schema=None, **kw): + if not self.supports_sequences: + self._sequences_not_supported() + if not schema: + schema = self.default_schema_name + # MariaDB implements sequences as a special type of table + cursor = connection.execute( + sql.text( + "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " + "WHERE TABLE_TYPE='SEQUENCE' and TABLE_SCHEMA=:schema_name" + ), + dict(schema_name=schema), + ) + return [ + row[0] + for row in self._compat_fetchall( + cursor, charset=self._connection_charset + ) + ] + + def initialize(self, connection): + # this is driver-based, does not need server version info + # and is fairly critical for even basic SQL operations + self._connection_charset = self._detect_charset(connection) + + # call super().initialize() because we need to have + # server_version_info set up. in 1.4 under python 2 only this does the + # "check unicode returns" thing, which is the one area that some + # SQL gets compiled within initialize() currently + default.DefaultDialect.initialize(self, connection) + + self._detect_sql_mode(connection) + self._detect_ansiquotes(connection) # depends on sql mode + self._detect_casing(connection) + if self._server_ansiquotes: + # if ansiquotes == True, build a new IdentifierPreparer + # with the new setting + self.identifier_preparer = self.preparer( + self, server_ansiquotes=self._server_ansiquotes + ) + + self.supports_sequences = ( + self.is_mariadb and self.server_version_info >= (10, 3) + ) + + self.supports_for_update_of = ( + self._is_mysql and self.server_version_info >= (8,) + ) + + self._needs_correct_for_88718_96365 = ( + not self.is_mariadb and self.server_version_info >= (8,) + ) + + self.delete_returning = ( + self.is_mariadb and self.server_version_info >= (10, 0, 5) + ) + + self.insert_returning = ( + self.is_mariadb and self.server_version_info >= (10, 5) + ) + + self._requires_alias_for_on_duplicate_key = ( + self._is_mysql and self.server_version_info >= (8, 0, 20) + ) + + self._warn_for_known_db_issues() + + def _warn_for_known_db_issues(self): + if self.is_mariadb: + mdb_version = self._mariadb_normalized_version_info + if mdb_version > (10, 2) and mdb_version < (10, 2, 9): + util.warn( + "MariaDB %r before 10.2.9 has known issues regarding " + "CHECK constraints, which impact handling of NULL values " + "with SQLAlchemy's boolean datatype (MDEV-13596). An " + "additional issue prevents proper migrations of columns " + "with CHECK constraints (MDEV-11114). Please upgrade to " + "MariaDB 10.2.9 or greater, or use the MariaDB 10.1 " + "series, to avoid these issues." % (mdb_version,) + ) + + @property + def _support_float_cast(self): + if not self.server_version_info: + return False + elif self.is_mariadb: + # ref https://mariadb.com/kb/en/mariadb-1045-release-notes/ + return self.server_version_info >= (10, 4, 5) + else: + # ref https://dev.mysql.com/doc/relnotes/mysql/8.0/en/news-8-0-17.html#mysqld-8-0-17-feature # noqa + return self.server_version_info >= (8, 0, 17) + + @property + def _is_mariadb(self): + return self.is_mariadb + + @property + def _is_mysql(self): + return not self.is_mariadb + + @property + def _is_mariadb_102(self): + return self.is_mariadb and self._mariadb_normalized_version_info > ( + 10, + 2, + ) + + @reflection.cache + def get_schema_names(self, connection, **kw): + rp = connection.exec_driver_sql("SHOW schemas") + return [r[0] for r in rp] + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + """Return a Unicode SHOW TABLES from a given schema.""" + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + charset = self._connection_charset + + rp = connection.exec_driver_sql( + "SHOW FULL TABLES FROM %s" + % self.identifier_preparer.quote_identifier(current_schema) + ) + + return [ + row[0] + for row in self._compat_fetchall(rp, charset=charset) + if row[1] == "BASE TABLE" + ] + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + if schema is None: + schema = self.default_schema_name + charset = self._connection_charset + rp = connection.exec_driver_sql( + "SHOW FULL TABLES FROM %s" + % self.identifier_preparer.quote_identifier(schema) + ) + return [ + row[0] + for row in self._compat_fetchall(rp, charset=charset) + if row[1] in ("VIEW", "SYSTEM VIEW") + ] + + @reflection.cache + def get_table_options(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + if parsed_state.table_options: + return parsed_state.table_options + else: + return ReflectionDefaults.table_options() + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + if parsed_state.columns: + return parsed_state.columns + else: + return ReflectionDefaults.columns() + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + for key in parsed_state.keys: + if key["type"] == "PRIMARY": + # There can be only one. + cols = [s[0] for s in key["columns"]] + return {"constrained_columns": cols, "name": None} + return ReflectionDefaults.pk_constraint() + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + default_schema = None + + fkeys = [] + + for spec in parsed_state.fk_constraints: + ref_name = spec["table"][-1] + ref_schema = len(spec["table"]) > 1 and spec["table"][-2] or schema + + if not ref_schema: + if default_schema is None: + default_schema = connection.dialect.default_schema_name + if schema == default_schema: + ref_schema = schema + + loc_names = spec["local"] + ref_names = spec["foreign"] + + con_kw = {} + for opt in ("onupdate", "ondelete"): + if spec.get(opt, False) not in ("NO ACTION", None): + con_kw[opt] = spec[opt] + + fkey_d = { + "name": spec["name"], + "constrained_columns": loc_names, + "referred_schema": ref_schema, + "referred_table": ref_name, + "referred_columns": ref_names, + "options": con_kw, + } + fkeys.append(fkey_d) + + if self._needs_correct_for_88718_96365: + self._correct_for_mysql_bugs_88718_96365(fkeys, connection) + + return fkeys if fkeys else ReflectionDefaults.foreign_keys() + + def _correct_for_mysql_bugs_88718_96365(self, fkeys, connection): + # Foreign key is always in lower case (MySQL 8.0) + # https://bugs.mysql.com/bug.php?id=88718 + # issue #4344 for SQLAlchemy + + # table name also for MySQL 8.0 + # https://bugs.mysql.com/bug.php?id=96365 + # issue #4751 for SQLAlchemy + + # for lower_case_table_names=2, information_schema.columns + # preserves the original table/schema casing, but SHOW CREATE + # TABLE does not. this problem is not in lower_case_table_names=1, + # but use case-insensitive matching for these two modes in any case. + + if self._casing in (1, 2): + + def lower(s): + return s.lower() + + else: + # if on case sensitive, there can be two tables referenced + # with the same name different casing, so we need to use + # case-sensitive matching. + def lower(s): + return s + + default_schema_name = connection.dialect.default_schema_name + col_tuples = [ + ( + lower(rec["referred_schema"] or default_schema_name), + lower(rec["referred_table"]), + col_name, + ) + for rec in fkeys + for col_name in rec["referred_columns"] + ] + + if col_tuples: + correct_for_wrong_fk_case = connection.execute( + sql.text( + """ + select table_schema, table_name, column_name + from information_schema.columns + where (table_schema, table_name, lower(column_name)) in + :table_data; + """ + ).bindparams(sql.bindparam("table_data", expanding=True)), + dict(table_data=col_tuples), + ) + + # in casing=0, table name and schema name come back in their + # exact case. + # in casing=1, table name and schema name come back in lower + # case. + # in casing=2, table name and schema name come back from the + # information_schema.columns view in the case + # that was used in CREATE DATABASE and CREATE TABLE, but + # SHOW CREATE TABLE converts them to *lower case*, therefore + # not matching. So for this case, case-insensitive lookup + # is necessary + d = defaultdict(dict) + for schema, tname, cname in correct_for_wrong_fk_case: + d[(lower(schema), lower(tname))]["SCHEMANAME"] = schema + d[(lower(schema), lower(tname))]["TABLENAME"] = tname + d[(lower(schema), lower(tname))][cname.lower()] = cname + + for fkey in fkeys: + rec = d[ + ( + lower(fkey["referred_schema"] or default_schema_name), + lower(fkey["referred_table"]), + ) + ] + + fkey["referred_table"] = rec["TABLENAME"] + if fkey["referred_schema"] is not None: + fkey["referred_schema"] = rec["SCHEMANAME"] + + fkey["referred_columns"] = [ + rec[col.lower()] for col in fkey["referred_columns"] + ] + + @reflection.cache + def get_check_constraints(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + + cks = [ + {"name": spec["name"], "sqltext": spec["sqltext"]} + for spec in parsed_state.ck_constraints + ] + cks.sort(key=lambda d: d["name"] or "~") # sort None as last + return cks if cks else ReflectionDefaults.check_constraints() + + @reflection.cache + def get_table_comment(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + comment = parsed_state.table_options.get(f"{self.name}_comment", None) + if comment is not None: + return {"text": comment} + else: + return ReflectionDefaults.table_comment() + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + + indexes = [] + + for spec in parsed_state.keys: + dialect_options = {} + unique = False + flavor = spec["type"] + if flavor == "PRIMARY": + continue + if flavor == "UNIQUE": + unique = True + elif flavor in ("FULLTEXT", "SPATIAL"): + dialect_options["%s_prefix" % self.name] = flavor + elif flavor is None: + pass + else: + self.logger.info( + "Converting unknown KEY type %s to a plain KEY", flavor + ) + pass + + if spec["parser"]: + dialect_options["%s_with_parser" % (self.name)] = spec[ + "parser" + ] + + index_d = {} + + index_d["name"] = spec["name"] + index_d["column_names"] = [s[0] for s in spec["columns"]] + mysql_length = { + s[0]: s[1] for s in spec["columns"] if s[1] is not None + } + if mysql_length: + dialect_options["%s_length" % self.name] = mysql_length + + index_d["unique"] = unique + if flavor: + index_d["type"] = flavor + + if dialect_options: + index_d["dialect_options"] = dialect_options + + indexes.append(index_d) + indexes.sort(key=lambda d: d["name"] or "~") # sort None as last + return indexes if indexes else ReflectionDefaults.indexes() + + @reflection.cache + def get_unique_constraints( + self, connection, table_name, schema=None, **kw + ): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw + ) + + ucs = [ + { + "name": key["name"], + "column_names": [col[0] for col in key["columns"]], + "duplicates_index": key["name"], + } + for key in parsed_state.keys + if key["type"] == "UNIQUE" + ] + ucs.sort(key=lambda d: d["name"] or "~") # sort None as last + if ucs: + return ucs + else: + return ReflectionDefaults.unique_constraints() + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + charset = self._connection_charset + full_name = ".".join( + self.identifier_preparer._quote_free_identifiers(schema, view_name) + ) + sql = self._show_create_table( + connection, None, charset, full_name=full_name + ) + if sql.upper().startswith("CREATE TABLE"): + # it's a table, not a view + raise exc.NoSuchTableError(full_name) + return sql + + def _parsed_state_or_create( + self, connection, table_name, schema=None, **kw + ): + return self._setup_parser( + connection, + table_name, + schema, + info_cache=kw.get("info_cache", None), + ) + + @util.memoized_property + def _tabledef_parser(self): + """return the MySQLTableDefinitionParser, generate if needed. + + The deferred creation ensures that the dialect has + retrieved server version information first. + + """ + preparer = self.identifier_preparer + return _reflection.MySQLTableDefinitionParser(self, preparer) + + @reflection.cache + def _setup_parser(self, connection, table_name, schema=None, **kw): + charset = self._connection_charset + parser = self._tabledef_parser + full_name = ".".join( + self.identifier_preparer._quote_free_identifiers( + schema, table_name + ) + ) + sql = self._show_create_table( + connection, None, charset, full_name=full_name + ) + if parser._check_view(sql): + # Adapt views to something table-like. + columns = self._describe_table( + connection, None, charset, full_name=full_name + ) + sql = parser._describe_to_create(table_name, columns) + return parser.parse(sql, charset) + + def _fetch_setting(self, connection, setting_name): + charset = self._connection_charset + + if self.server_version_info and self.server_version_info < (5, 6): + sql = "SHOW VARIABLES LIKE '%s'" % setting_name + fetch_col = 1 + else: + sql = "SELECT @@%s" % setting_name + fetch_col = 0 + + show_var = connection.exec_driver_sql(sql) + row = self._compat_first(show_var, charset=charset) + if not row: + return None + else: + return row[fetch_col] + + def _detect_charset(self, connection): + raise NotImplementedError() + + def _detect_casing(self, connection): + """Sniff out identifier case sensitivity. + + Cached per-connection. This value can not change without a server + restart. + + """ + # https://dev.mysql.com/doc/refman/en/identifier-case-sensitivity.html + + setting = self._fetch_setting(connection, "lower_case_table_names") + if setting is None: + cs = 0 + else: + # 4.0.15 returns OFF or ON according to [ticket:489] + # 3.23 doesn't, 4.0.27 doesn't.. + if setting == "OFF": + cs = 0 + elif setting == "ON": + cs = 1 + else: + cs = int(setting) + self._casing = cs + return cs + + def _detect_collations(self, connection): + """Pull the active COLLATIONS list from the server. + + Cached per-connection. + """ + + collations = {} + charset = self._connection_charset + rs = connection.exec_driver_sql("SHOW COLLATION") + for row in self._compat_fetchall(rs, charset): + collations[row[0]] = row[1] + return collations + + def _detect_sql_mode(self, connection): + setting = self._fetch_setting(connection, "sql_mode") + + if setting is None: + util.warn( + "Could not retrieve SQL_MODE; please ensure the " + "MySQL user has permissions to SHOW VARIABLES" + ) + self._sql_mode = "" + else: + self._sql_mode = setting or "" + + def _detect_ansiquotes(self, connection): + """Detect and adjust for the ANSI_QUOTES sql mode.""" + + mode = self._sql_mode + if not mode: + mode = "" + elif mode.isdigit(): + mode_no = int(mode) + mode = (mode_no | 4 == mode_no) and "ANSI_QUOTES" or "" + + self._server_ansiquotes = "ANSI_QUOTES" in mode + + # as of MySQL 5.0.1 + self._backslash_escapes = "NO_BACKSLASH_ESCAPES" not in mode + + def _show_create_table( + self, connection, table, charset=None, full_name=None + ): + """Run SHOW CREATE TABLE for a ``Table``.""" + + if full_name is None: + full_name = self.identifier_preparer.format_table(table) + st = "SHOW CREATE TABLE %s" % full_name + + rp = None + try: + rp = connection.execution_options( + skip_user_error_events=True + ).exec_driver_sql(st) + except exc.DBAPIError as e: + if self._extract_error_code(e.orig) == 1146: + raise exc.NoSuchTableError(full_name) from e + else: + raise + row = self._compat_first(rp, charset=charset) + if not row: + raise exc.NoSuchTableError(full_name) + return row[1].strip() + + def _describe_table(self, connection, table, charset=None, full_name=None): + """Run DESCRIBE for a ``Table`` and return processed rows.""" + + if full_name is None: + full_name = self.identifier_preparer.format_table(table) + st = "DESCRIBE %s" % full_name + + rp, rows = None, None + try: + try: + rp = connection.execution_options( + skip_user_error_events=True + ).exec_driver_sql(st) + except exc.DBAPIError as e: + code = self._extract_error_code(e.orig) + if code == 1146: + raise exc.NoSuchTableError(full_name) from e + + elif code == 1356: + raise exc.UnreflectableTableError( + "Table or view named %s could not be " + "reflected: %s" % (full_name, e) + ) from e + + else: + raise + rows = self._compat_fetchall(rp, charset=charset) + finally: + if rp: + rp.close() + return rows + + +class _DecodingRow: + """Return unicode-decoded values based on type inspection. + + Smooth over data type issues (esp. with alpha driver versions) and + normalize strings as Unicode regardless of user-configured driver + encoding settings. + + """ + + # Some MySQL-python versions can return some columns as + # sets.Set(['value']) (seriously) but thankfully that doesn't + # seem to come up in DDL queries. + + _encoding_compat = { + "koi8r": "koi8_r", + "koi8u": "koi8_u", + "utf16": "utf-16-be", # MySQL's uft16 is always bigendian + "utf8mb4": "utf8", # real utf8 + "utf8mb3": "utf8", # real utf8; saw this happen on CI but I cannot + # reproduce, possibly mariadb10.6 related + "eucjpms": "ujis", + } + + def __init__(self, rowproxy, charset): + self.rowproxy = rowproxy + self.charset = self._encoding_compat.get(charset, charset) + + def __getitem__(self, index): + item = self.rowproxy[index] + if isinstance(item, _array): + item = item.tostring() + + if self.charset and isinstance(item, bytes): + return item.decode(self.charset) + else: + return item + + def __getattr__(self, attr): + item = getattr(self.rowproxy, attr) + if isinstance(item, _array): + item = item.tostring() + if self.charset and isinstance(item, bytes): + return item.decode(self.charset) + else: + return item diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py new file mode 100644 index 00000000..f199aa4e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py @@ -0,0 +1,84 @@ +# dialects/mysql/cymysql.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" + +.. dialect:: mysql+cymysql + :name: CyMySQL + :dbapi: cymysql + :connectstring: mysql+cymysql://:@/[?] + :url: https://github.com/nakagami/CyMySQL + +.. note:: + + The CyMySQL dialect is **not tested as part of SQLAlchemy's continuous + integration** and may have unresolved issues. The recommended MySQL + dialects are mysqlclient and PyMySQL. + +""" # noqa + +from .base import BIT +from .base import MySQLDialect +from .mysqldb import MySQLDialect_mysqldb +from ... import util + + +class _cymysqlBIT(BIT): + def result_processor(self, dialect, coltype): + """Convert MySQL's 64 bit, variable length binary string to a long.""" + + def process(value): + if value is not None: + v = 0 + for i in iter(value): + v = v << 8 | i + return v + return value + + return process + + +class MySQLDialect_cymysql(MySQLDialect_mysqldb): + driver = "cymysql" + supports_statement_cache = True + + description_encoding = None + supports_sane_rowcount = True + supports_sane_multi_rowcount = False + supports_unicode_statements = True + + colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _cymysqlBIT}) + + @classmethod + def import_dbapi(cls): + return __import__("cymysql") + + def _detect_charset(self, connection): + return connection.connection.charset + + def _extract_error_code(self, exception): + return exception.errno + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.OperationalError): + return self._extract_error_code(e) in ( + 2006, + 2013, + 2014, + 2045, + 2055, + ) + elif isinstance(e, self.dbapi.InterfaceError): + # if underlying connection is closed, + # this is the error you get + return True + else: + return False + + +dialect = MySQLDialect_cymysql diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py new file mode 100644 index 00000000..e4005c26 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py @@ -0,0 +1,219 @@ +# dialects/mysql/dml.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +from typing import Any +from typing import List +from typing import Mapping +from typing import Optional +from typing import Tuple +from typing import Union + +from ... import exc +from ... import util +from ...sql._typing import _DMLTableArgument +from ...sql.base import _exclusive_against +from ...sql.base import _generative +from ...sql.base import ColumnCollection +from ...sql.base import ReadOnlyColumnCollection +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...sql.elements import KeyedColumnElement +from ...sql.expression import alias +from ...sql.selectable import NamedFromClause +from ...util.typing import Self + + +__all__ = ("Insert", "insert") + + +def insert(table: _DMLTableArgument) -> Insert: + """Construct a MySQL/MariaDB-specific variant :class:`_mysql.Insert` + construct. + + .. container:: inherited_member + + The :func:`sqlalchemy.dialects.mysql.insert` function creates + a :class:`sqlalchemy.dialects.mysql.Insert`. This class is based + on the dialect-agnostic :class:`_sql.Insert` construct which may + be constructed using the :func:`_sql.insert` function in + SQLAlchemy Core. + + The :class:`_mysql.Insert` construct includes additional methods + :meth:`_mysql.Insert.on_duplicate_key_update`. + + """ + return Insert(table) + + +class Insert(StandardInsert): + """MySQL-specific implementation of INSERT. + + Adds methods for MySQL-specific syntaxes such as ON DUPLICATE KEY UPDATE. + + The :class:`~.mysql.Insert` object is created using the + :func:`sqlalchemy.dialects.mysql.insert` function. + + .. versionadded:: 1.2 + + """ + + stringify_dialect = "mysql" + inherit_cache = False + + @property + def inserted( + self, + ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]: + """Provide the "inserted" namespace for an ON DUPLICATE KEY UPDATE + statement + + MySQL's ON DUPLICATE KEY UPDATE clause allows reference to the row + that would be inserted, via a special function called ``VALUES()``. + This attribute provides all columns in this row to be referenceable + such that they will render within a ``VALUES()`` function inside the + ON DUPLICATE KEY UPDATE clause. The attribute is named ``.inserted`` + so as not to conflict with the existing + :meth:`_expression.Insert.values` method. + + .. tip:: The :attr:`_mysql.Insert.inserted` attribute is an instance + of :class:`_expression.ColumnCollection`, which provides an + interface the same as that of the :attr:`_schema.Table.c` + collection described at :ref:`metadata_tables_and_columns`. + With this collection, ordinary names are accessible like attributes + (e.g. ``stmt.inserted.some_column``), but special names and + dictionary method names should be accessed using indexed access, + such as ``stmt.inserted["column name"]`` or + ``stmt.inserted["values"]``. See the docstring for + :class:`_expression.ColumnCollection` for further examples. + + .. seealso:: + + :ref:`mysql_insert_on_duplicate_key_update` - example of how + to use :attr:`_expression.Insert.inserted` + + """ + return self.inserted_alias.columns + + @util.memoized_property + def inserted_alias(self) -> NamedFromClause: + return alias(self.table, name="inserted") + + @_generative + @_exclusive_against( + "_post_values_clause", + msgs={ + "_post_values_clause": "This Insert construct already " + "has an ON DUPLICATE KEY clause present" + }, + ) + def on_duplicate_key_update(self, *args: _UpdateArg, **kw: Any) -> Self: + r""" + Specifies the ON DUPLICATE KEY UPDATE clause. + + :param \**kw: Column keys linked to UPDATE values. The + values may be any SQL expression or supported literal Python + values. + + .. warning:: This dictionary does **not** take into account + Python-specified default UPDATE values or generation functions, + e.g. those specified using :paramref:`_schema.Column.onupdate`. + These values will not be exercised for an ON DUPLICATE KEY UPDATE + style of UPDATE, unless values are manually specified here. + + :param \*args: As an alternative to passing key/value parameters, + a dictionary or list of 2-tuples can be passed as a single positional + argument. + + Passing a single dictionary is equivalent to the keyword argument + form:: + + insert().on_duplicate_key_update({"name": "some name"}) + + Passing a list of 2-tuples indicates that the parameter assignments + in the UPDATE clause should be ordered as sent, in a manner similar + to that described for the :class:`_expression.Update` + construct overall + in :ref:`tutorial_parameter_ordered_updates`:: + + insert().on_duplicate_key_update( + [("name", "some name"), ("value", "some value")]) + + .. versionchanged:: 1.3 parameters can be specified as a dictionary + or list of 2-tuples; the latter form provides for parameter + ordering. + + + .. versionadded:: 1.2 + + .. seealso:: + + :ref:`mysql_insert_on_duplicate_key_update` + + """ + if args and kw: + raise exc.ArgumentError( + "Can't pass kwargs and positional arguments simultaneously" + ) + + if args: + if len(args) > 1: + raise exc.ArgumentError( + "Only a single dictionary or list of tuples " + "is accepted positionally." + ) + values = args[0] + else: + values = kw + + self._post_values_clause = OnDuplicateClause( + self.inserted_alias, values + ) + return self + + +class OnDuplicateClause(ClauseElement): + __visit_name__ = "on_duplicate_key_update" + + _parameter_ordering: Optional[List[str]] = None + + stringify_dialect = "mysql" + + def __init__( + self, inserted_alias: NamedFromClause, update: _UpdateArg + ) -> None: + self.inserted_alias = inserted_alias + + # auto-detect that parameters should be ordered. This is copied from + # Update._proces_colparams(), however we don't look for a special flag + # in this case since we are not disambiguating from other use cases as + # we are in Update.values(). + if isinstance(update, list) and ( + update and isinstance(update[0], tuple) + ): + self._parameter_ordering = [key for key, value in update] + update = dict(update) + + if isinstance(update, dict): + if not update: + raise ValueError( + "update parameter dictionary must not be empty" + ) + elif isinstance(update, ColumnCollection): + update = dict(update) + else: + raise ValueError( + "update parameter must be a non-empty dictionary " + "or a ColumnCollection such as the `.c.` collection " + "of a Table object" + ) + self.update = update + + +_UpdateArg = Union[ + Mapping[Any, Any], List[Tuple[str, Any]], ColumnCollection[Any, Any] +] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py new file mode 100644 index 00000000..96499d7b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py @@ -0,0 +1,244 @@ +# dialects/mysql/enumerated.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +import re + +from .types import _StringType +from ... import exc +from ... import sql +from ... import util +from ...sql import sqltypes + + +class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType): + """MySQL ENUM type.""" + + __visit_name__ = "ENUM" + + native_enum = True + + def __init__(self, *enums, **kw): + """Construct an ENUM. + + E.g.:: + + Column('myenum', ENUM("foo", "bar", "baz")) + + :param enums: The range of valid values for this ENUM. Values in + enums are not quoted, they will be escaped and surrounded by single + quotes when generating the schema. This object may also be a + PEP-435-compliant enumerated type. + + .. versionadded: 1.1 added support for PEP-435-compliant enumerated + types. + + :param strict: This flag has no effect. + + .. versionchanged:: The MySQL ENUM type as well as the base Enum + type now validates all Python data values. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + kw.pop("strict", None) + self._enum_init(enums, kw) + _StringType.__init__(self, length=self.length, **kw) + + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): + """Produce a MySQL native :class:`.mysql.ENUM` from plain + :class:`.Enum`. + + """ + kw.setdefault("validate_strings", impl.validate_strings) + kw.setdefault("values_callable", impl.values_callable) + kw.setdefault("omit_aliases", impl._omit_aliases) + return cls(**kw) + + def _object_value_for_elem(self, elem): + # mysql sends back a blank string for any value that + # was persisted that was not in the enums; that is, it does no + # validation on the incoming data, it "truncates" it to be + # the blank string. Return it straight. + if elem == "": + return elem + else: + return super()._object_value_for_elem(elem) + + def __repr__(self): + return util.generic_repr( + self, to_inspect=[ENUM, _StringType, sqltypes.Enum] + ) + + +class SET(_StringType): + """MySQL SET type.""" + + __visit_name__ = "SET" + + def __init__(self, *values, **kw): + """Construct a SET. + + E.g.:: + + Column('myset', SET("foo", "bar", "baz")) + + + The list of potential values is required in the case that this + set will be used to generate DDL for a table, or if the + :paramref:`.SET.retrieve_as_bitwise` flag is set to True. + + :param values: The range of valid values for this SET. The values + are not quoted, they will be escaped and surrounded by single + quotes when generating the schema. + + :param convert_unicode: Same flag as that of + :paramref:`.String.convert_unicode`. + + :param collation: same as that of :paramref:`.String.collation` + + :param charset: same as that of :paramref:`.VARCHAR.charset`. + + :param ascii: same as that of :paramref:`.VARCHAR.ascii`. + + :param unicode: same as that of :paramref:`.VARCHAR.unicode`. + + :param binary: same as that of :paramref:`.VARCHAR.binary`. + + :param retrieve_as_bitwise: if True, the data for the set type will be + persisted and selected using an integer value, where a set is coerced + into a bitwise mask for persistence. MySQL allows this mode which + has the advantage of being able to store values unambiguously, + such as the blank string ``''``. The datatype will appear + as the expression ``col + 0`` in a SELECT statement, so that the + value is coerced into an integer value in result sets. + This flag is required if one wishes + to persist a set that can store the blank string ``''`` as a value. + + .. warning:: + + When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is + essential that the list of set values is expressed in the + **exact same order** as exists on the MySQL database. + + """ + self.retrieve_as_bitwise = kw.pop("retrieve_as_bitwise", False) + self.values = tuple(values) + if not self.retrieve_as_bitwise and "" in values: + raise exc.ArgumentError( + "Can't use the blank value '' in a SET without " + "setting retrieve_as_bitwise=True" + ) + if self.retrieve_as_bitwise: + self._bitmap = { + value: 2**idx for idx, value in enumerate(self.values) + } + self._bitmap.update( + (2**idx, value) for idx, value in enumerate(self.values) + ) + length = max([len(v) for v in values] + [0]) + kw.setdefault("length", length) + super().__init__(**kw) + + def column_expression(self, colexpr): + if self.retrieve_as_bitwise: + return sql.type_coerce( + sql.type_coerce(colexpr, sqltypes.Integer) + 0, self + ) + else: + return colexpr + + def result_processor(self, dialect, coltype): + if self.retrieve_as_bitwise: + + def process(value): + if value is not None: + value = int(value) + + return set(util.map_bits(self._bitmap.__getitem__, value)) + else: + return None + + else: + super_convert = super().result_processor(dialect, coltype) + + def process(value): + if isinstance(value, str): + # MySQLdb returns a string, let's parse + if super_convert: + value = super_convert(value) + return set(re.findall(r"[^,]+", value)) + else: + # mysql-connector-python does a naive + # split(",") which throws in an empty string + if value is not None: + value.discard("") + return value + + return process + + def bind_processor(self, dialect): + super_convert = super().bind_processor(dialect) + if self.retrieve_as_bitwise: + + def process(value): + if value is None: + return None + elif isinstance(value, (int, str)): + if super_convert: + return super_convert(value) + else: + return value + else: + int_value = 0 + for v in value: + int_value |= self._bitmap[v] + return int_value + + else: + + def process(value): + # accept strings and int (actually bitflag) values directly + if value is not None and not isinstance(value, (int, str)): + value = ",".join(value) + + if super_convert: + return super_convert(value) + else: + return value + + return process + + def adapt(self, impltype, **kw): + kw["retrieve_as_bitwise"] = self.retrieve_as_bitwise + return util.constructor_copy(self, impltype, *self.values, **kw) + + def __repr__(self): + return util.generic_repr( + self, + to_inspect=[SET, _StringType], + additional_kw=[ + ("retrieve_as_bitwise", False), + ], + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py new file mode 100644 index 00000000..b81b58af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py @@ -0,0 +1,141 @@ +# dialects/mysql/expression.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from ... import exc +from ... import util +from ...sql import coercions +from ...sql import elements +from ...sql import operators +from ...sql import roles +from ...sql.base import _generative +from ...sql.base import Generative +from ...util.typing import Self + + +class match(Generative, elements.BinaryExpression): + """Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause. + + E.g.:: + + from sqlalchemy import desc + from sqlalchemy.dialects.mysql import match + + match_expr = match( + users_table.c.firstname, + users_table.c.lastname, + against="Firstname Lastname", + ) + + stmt = ( + select(users_table) + .where(match_expr.in_boolean_mode()) + .order_by(desc(match_expr)) + ) + + Would produce SQL resembling:: + + SELECT id, firstname, lastname + FROM user + WHERE MATCH(firstname, lastname) AGAINST (:param_1 IN BOOLEAN MODE) + ORDER BY MATCH(firstname, lastname) AGAINST (:param_2) DESC + + The :func:`_mysql.match` function is a standalone version of the + :meth:`_sql.ColumnElement.match` method available on all + SQL expressions, as when :meth:`_expression.ColumnElement.match` is + used, but allows to pass multiple columns + + :param cols: column expressions to match against + + :param against: expression to be compared towards + + :param in_boolean_mode: boolean, set "boolean mode" to true + + :param in_natural_language_mode: boolean , set "natural language" to true + + :param with_query_expansion: boolean, set "query expansion" to true + + .. versionadded:: 1.4.19 + + .. seealso:: + + :meth:`_expression.ColumnElement.match` + + """ + + __visit_name__ = "mysql_match" + + inherit_cache = True + + def __init__(self, *cols, **kw): + if not cols: + raise exc.ArgumentError("columns are required") + + against = kw.pop("against", None) + + if against is None: + raise exc.ArgumentError("against is required") + against = coercions.expect( + roles.ExpressionElementRole, + against, + ) + + left = elements.BooleanClauseList._construct_raw( + operators.comma_op, + clauses=cols, + ) + left.group = False + + flags = util.immutabledict( + { + "mysql_boolean_mode": kw.pop("in_boolean_mode", False), + "mysql_natural_language": kw.pop( + "in_natural_language_mode", False + ), + "mysql_query_expansion": kw.pop("with_query_expansion", False), + } + ) + + if kw: + raise exc.ArgumentError("unknown arguments: %s" % (", ".join(kw))) + + super().__init__(left, against, operators.match_op, modifiers=flags) + + @_generative + def in_boolean_mode(self) -> Self: + """Apply the "IN BOOLEAN MODE" modifier to the MATCH expression. + + :return: a new :class:`_mysql.match` instance with modifications + applied. + """ + + self.modifiers = self.modifiers.union({"mysql_boolean_mode": True}) + return self + + @_generative + def in_natural_language_mode(self) -> Self: + """Apply the "IN NATURAL LANGUAGE MODE" modifier to the MATCH + expression. + + :return: a new :class:`_mysql.match` instance with modifications + applied. + """ + + self.modifiers = self.modifiers.union({"mysql_natural_language": True}) + return self + + @_generative + def with_query_expansion(self) -> Self: + """Apply the "WITH QUERY EXPANSION" modifier to the MATCH expression. + + :return: a new :class:`_mysql.match` instance with modifications + applied. + """ + + self.modifiers = self.modifiers.union({"mysql_query_expansion": True}) + return self diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py new file mode 100644 index 00000000..ebe4a34d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py @@ -0,0 +1,81 @@ +# dialects/mysql/json.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from ... import types as sqltypes + + +class JSON(sqltypes.JSON): + """MySQL JSON type. + + MySQL supports JSON as of version 5.7. + MariaDB supports JSON (as an alias for LONGTEXT) as of version 10.2. + + :class:`_mysql.JSON` is used automatically whenever the base + :class:`_types.JSON` datatype is used against a MySQL or MariaDB backend. + + .. seealso:: + + :class:`_types.JSON` - main documentation for the generic + cross-platform JSON datatype. + + The :class:`.mysql.JSON` type supports persistence of JSON values + as well as the core index operations provided by :class:`_types.JSON` + datatype, by adapting the operations to render the ``JSON_EXTRACT`` + function at the database level. + + """ + + pass + + +class _FormatTypeMixin: + def _format_value(self, value): + raise NotImplementedError() + + def bind_processor(self, dialect): + super_proc = self.string_bind_processor(dialect) + + def process(value): + value = self._format_value(value) + if super_proc: + value = super_proc(value) + return value + + return process + + def literal_processor(self, dialect): + super_proc = self.string_literal_processor(dialect) + + def process(value): + value = self._format_value(value) + if super_proc: + value = super_proc(value) + return value + + return process + + +class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): + def _format_value(self, value): + if isinstance(value, int): + value = "$[%s]" % value + else: + value = '$."%s"' % value + return value + + +class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): + def _format_value(self, value): + return "$%s" % ( + "".join( + [ + "[%s]" % elem if isinstance(elem, int) else '."%s"' % elem + for elem in value + ] + ) + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py new file mode 100644 index 00000000..10a05f9c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py @@ -0,0 +1,32 @@ +# dialects/mysql/mariadb.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from .base import MariaDBIdentifierPreparer +from .base import MySQLDialect + + +class MariaDBDialect(MySQLDialect): + is_mariadb = True + supports_statement_cache = True + name = "mariadb" + preparer = MariaDBIdentifierPreparer + + +def loader(driver): + driver_mod = __import__( + "sqlalchemy.dialects.mysql.%s" % driver + ).dialects.mysql + driver_cls = getattr(driver_mod, driver).dialect + + return type( + "MariaDBDialect_%s" % driver, + ( + MariaDBDialect, + driver_cls, + ), + {"supports_statement_cache": True}, + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py new file mode 100644 index 00000000..1730c1a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -0,0 +1,277 @@ +# dialects/mysql/mariadbconnector.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +""" + +.. dialect:: mysql+mariadbconnector + :name: MariaDB Connector/Python + :dbapi: mariadb + :connectstring: mariadb+mariadbconnector://:@[:]/ + :url: https://pypi.org/project/mariadb/ + +Driver Status +------------- + +MariaDB Connector/Python enables Python programs to access MariaDB and MySQL +databases using an API which is compliant with the Python DB API 2.0 (PEP-249). +It is written in C and uses MariaDB Connector/C client library for client server +communication. + +Note that the default driver for a ``mariadb://`` connection URI continues to +be ``mysqldb``. ``mariadb+mariadbconnector://`` is required to use this driver. + +.. mariadb: https://github.com/mariadb-corporation/mariadb-connector-python + +""" # noqa +import re +from uuid import UUID as _python_UUID + +from .base import MySQLCompiler +from .base import MySQLDialect +from .base import MySQLExecutionContext +from ... import sql +from ... import util +from ...sql import sqltypes + + +mariadb_cpy_minimum_version = (1, 0, 1) + + +class _MariaDBUUID(sqltypes.UUID[sqltypes._UUID_RETURN]): + # work around JIRA issue + # https://jira.mariadb.org/browse/CONPY-270. When that issue is fixed, + # this type can be removed. + def result_processor(self, dialect, coltype): + if self.as_uuid: + + def process(value): + if value is not None: + if hasattr(value, "decode"): + value = value.decode("ascii") + value = _python_UUID(value) + return value + + return process + else: + + def process(value): + if value is not None: + if hasattr(value, "decode"): + value = value.decode("ascii") + value = str(_python_UUID(value)) + return value + + return process + + +class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext): + _lastrowid = None + + def create_server_side_cursor(self): + return self._dbapi_connection.cursor(buffered=False) + + def create_default_cursor(self): + return self._dbapi_connection.cursor(buffered=True) + + def post_exec(self): + super().post_exec() + + self._rowcount = self.cursor.rowcount + + if self.isinsert and self.compiled.postfetch_lastrowid: + self._lastrowid = self.cursor.lastrowid + + def get_lastrowid(self): + return self._lastrowid + + +class MySQLCompiler_mariadbconnector(MySQLCompiler): + pass + + +class MySQLDialect_mariadbconnector(MySQLDialect): + driver = "mariadbconnector" + supports_statement_cache = True + + # set this to True at the module level to prevent the driver from running + # against a backend that server detects as MySQL. currently this appears to + # be unnecessary as MariaDB client libraries have always worked against + # MySQL databases. However, if this changes at some point, this can be + # adjusted, but PLEASE ADD A TEST in test/dialect/mysql/test_dialect.py if + # this change is made at some point to ensure the correct exception + # is raised at the correct point when running the driver against + # a MySQL backend. + # is_mariadb = True + + supports_unicode_statements = True + encoding = "utf8mb4" + convert_unicode = True + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + supports_native_decimal = True + default_paramstyle = "qmark" + execution_ctx_cls = MySQLExecutionContext_mariadbconnector + statement_compiler = MySQLCompiler_mariadbconnector + + supports_server_side_cursors = True + + colspecs = util.update_copy( + MySQLDialect.colspecs, {sqltypes.Uuid: _MariaDBUUID} + ) + + @util.memoized_property + def _dbapi_version(self): + if self.dbapi and hasattr(self.dbapi, "__version__"): + return tuple( + [ + int(x) + for x in re.findall( + r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__ + ) + ] + ) + else: + return (99, 99, 99) + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.paramstyle = "qmark" + if self.dbapi is not None: + if self._dbapi_version < mariadb_cpy_minimum_version: + raise NotImplementedError( + "The minimum required version for MariaDB " + "Connector/Python is %s" + % ".".join(str(x) for x in mariadb_cpy_minimum_version) + ) + + @classmethod + def import_dbapi(cls): + return __import__("mariadb") + + def is_disconnect(self, e, connection, cursor): + if super().is_disconnect(e, connection, cursor): + return True + elif isinstance(e, self.dbapi.Error): + str_e = str(e).lower() + return "not connected" in str_e or "isn't valid" in str_e + else: + return False + + def create_connect_args(self, url): + opts = url.translate_connect_args() + opts.update(url.query) + + int_params = [ + "connect_timeout", + "read_timeout", + "write_timeout", + "client_flag", + "port", + "pool_size", + ] + bool_params = [ + "local_infile", + "ssl_verify_cert", + "ssl", + "pool_reset_connection", + "compress", + ] + + for key in int_params: + util.coerce_kw_type(opts, key, int) + for key in bool_params: + util.coerce_kw_type(opts, key, bool) + + # FOUND_ROWS must be set in CLIENT_FLAGS to enable + # supports_sane_rowcount. + client_flag = opts.get("client_flag", 0) + if self.dbapi is not None: + try: + CLIENT_FLAGS = __import__( + self.dbapi.__name__ + ".constants.CLIENT" + ).constants.CLIENT + client_flag |= CLIENT_FLAGS.FOUND_ROWS + except (AttributeError, ImportError): + self.supports_sane_rowcount = False + opts["client_flag"] = client_flag + return [[], opts] + + def _extract_error_code(self, exception): + try: + rc = exception.errno + except: + rc = -1 + return rc + + def _detect_charset(self, connection): + return "utf8mb4" + + def get_isolation_level_values(self, dbapi_connection): + return ( + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "AUTOCOMMIT", + ) + + def set_isolation_level(self, connection, level): + if level == "AUTOCOMMIT": + connection.autocommit = True + else: + connection.autocommit = False + super().set_isolation_level(connection, level) + + def do_begin_twophase(self, connection, xid): + connection.execute( + sql.text("XA BEGIN :xid").bindparams( + sql.bindparam("xid", xid, literal_execute=True) + ) + ) + + def do_prepare_twophase(self, connection, xid): + connection.execute( + sql.text("XA END :xid").bindparams( + sql.bindparam("xid", xid, literal_execute=True) + ) + ) + connection.execute( + sql.text("XA PREPARE :xid").bindparams( + sql.bindparam("xid", xid, literal_execute=True) + ) + ) + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if not is_prepared: + connection.execute( + sql.text("XA END :xid").bindparams( + sql.bindparam("xid", xid, literal_execute=True) + ) + ) + connection.execute( + sql.text("XA ROLLBACK :xid").bindparams( + sql.bindparam("xid", xid, literal_execute=True) + ) + ) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if not is_prepared: + self.do_prepare_twophase(connection, xid) + connection.execute( + sql.text("XA COMMIT :xid").bindparams( + sql.bindparam("xid", xid, literal_execute=True) + ) + ) + + +dialect = MySQLDialect_mariadbconnector diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py new file mode 100644 index 00000000..8f4b4174 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -0,0 +1,180 @@ +# dialects/mysql/mysqlconnector.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" +.. dialect:: mysql+mysqlconnector + :name: MySQL Connector/Python + :dbapi: myconnpy + :connectstring: mysql+mysqlconnector://:@[:]/ + :url: https://pypi.org/project/mysql-connector-python/ + +.. note:: + + The MySQL Connector/Python DBAPI has had many issues since its release, + some of which may remain unresolved, and the mysqlconnector dialect is + **not tested as part of SQLAlchemy's continuous integration**. + The recommended MySQL dialects are mysqlclient and PyMySQL. + +""" # noqa + +import re + +from .base import BIT +from .base import MySQLCompiler +from .base import MySQLDialect +from .base import MySQLIdentifierPreparer +from ... import util + + +class MySQLCompiler_mysqlconnector(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return ( + self.process(binary.left, **kw) + + " % " + + self.process(binary.right, **kw) + ) + + +class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): + @property + def _double_percents(self): + return False + + @_double_percents.setter + def _double_percents(self, value): + pass + + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + return value + + +class _myconnpyBIT(BIT): + def result_processor(self, dialect, coltype): + """MySQL-connector already converts mysql bits, so.""" + + return None + + +class MySQLDialect_mysqlconnector(MySQLDialect): + driver = "mysqlconnector" + supports_statement_cache = True + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_native_decimal = True + + default_paramstyle = "format" + statement_compiler = MySQLCompiler_mysqlconnector + + preparer = MySQLIdentifierPreparer_mysqlconnector + + colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _myconnpyBIT}) + + @classmethod + def import_dbapi(cls): + from mysql import connector + + return connector + + def do_ping(self, dbapi_connection): + dbapi_connection.ping(False) + return True + + def create_connect_args(self, url): + opts = url.translate_connect_args(username="user") + + opts.update(url.query) + + util.coerce_kw_type(opts, "allow_local_infile", bool) + util.coerce_kw_type(opts, "autocommit", bool) + util.coerce_kw_type(opts, "buffered", bool) + util.coerce_kw_type(opts, "client_flag", int) + util.coerce_kw_type(opts, "compress", bool) + util.coerce_kw_type(opts, "connection_timeout", int) + util.coerce_kw_type(opts, "connect_timeout", int) + util.coerce_kw_type(opts, "consume_results", bool) + util.coerce_kw_type(opts, "force_ipv6", bool) + util.coerce_kw_type(opts, "get_warnings", bool) + util.coerce_kw_type(opts, "pool_reset_session", bool) + util.coerce_kw_type(opts, "pool_size", int) + util.coerce_kw_type(opts, "raise_on_warnings", bool) + util.coerce_kw_type(opts, "raw", bool) + util.coerce_kw_type(opts, "ssl_verify_cert", bool) + util.coerce_kw_type(opts, "use_pure", bool) + util.coerce_kw_type(opts, "use_unicode", bool) + + # unfortunately, MySQL/connector python refuses to release a + # cursor without reading fully, so non-buffered isn't an option + opts.setdefault("buffered", True) + + # FOUND_ROWS must be set in ClientFlag to enable + # supports_sane_rowcount. + if self.dbapi is not None: + try: + from mysql.connector.constants import ClientFlag + + client_flags = opts.get( + "client_flags", ClientFlag.get_default() + ) + client_flags |= ClientFlag.FOUND_ROWS + opts["client_flags"] = client_flags + except Exception: + pass + return [[], opts] + + @util.memoized_property + def _mysqlconnector_version_info(self): + if self.dbapi and hasattr(self.dbapi, "__version__"): + m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__) + if m: + return tuple(int(x) for x in m.group(1, 2, 3) if x is not None) + + def _detect_charset(self, connection): + return connection.connection.charset + + def _extract_error_code(self, exception): + return exception.errno + + def is_disconnect(self, e, connection, cursor): + errnos = (2006, 2013, 2014, 2045, 2055, 2048) + exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError) + if isinstance(e, exceptions): + return ( + e.errno in errnos + or "MySQL Connection not available." in str(e) + or "Connection to MySQL is not available" in str(e) + ) + else: + return False + + def _compat_fetchall(self, rp, charset=None): + return rp.fetchall() + + def _compat_fetchone(self, rp, charset=None): + return rp.fetchone() + + _isolation_lookup = { + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "AUTOCOMMIT", + } + + def _set_isolation_level(self, connection, level): + if level == "AUTOCOMMIT": + connection.autocommit = True + else: + connection.autocommit = False + super()._set_isolation_level(connection, level) + + +dialect = MySQLDialect_mysqlconnector diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py new file mode 100644 index 00000000..0baf10f7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py @@ -0,0 +1,303 @@ +# dialects/mysql/mysqldb.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +""" + +.. dialect:: mysql+mysqldb + :name: mysqlclient (maintained fork of MySQL-Python) + :dbapi: mysqldb + :connectstring: mysql+mysqldb://:@[:]/ + :url: https://pypi.org/project/mysqlclient/ + +Driver Status +------------- + +The mysqlclient DBAPI is a maintained fork of the +`MySQL-Python `_ DBAPI +that is no longer maintained. `mysqlclient`_ supports Python 2 and Python 3 +and is very stable. + +.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python + +.. _mysqldb_unicode: + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + +.. _mysqldb_ssl: + +SSL Connections +---------------- + +The mysqlclient and PyMySQL DBAPIs accept an additional dictionary under the +key "ssl", which may be specified using the +:paramref:`_sa.create_engine.connect_args` dictionary:: + + engine = create_engine( + "mysql+mysqldb://scott:tiger@192.168.0.134/test", + connect_args={ + "ssl": { + "ca": "/home/gord/client-ssl/ca.pem", + "cert": "/home/gord/client-ssl/client-cert.pem", + "key": "/home/gord/client-ssl/client-key.pem" + } + } + ) + +For convenience, the following keys may also be specified inline within the URL +where they will be interpreted into the "ssl" dictionary automatically: +"ssl_ca", "ssl_cert", "ssl_key", "ssl_capath", "ssl_cipher", +"ssl_check_hostname". An example is as follows:: + + connection_uri = ( + "mysql+mysqldb://scott:tiger@192.168.0.134/test" + "?ssl_ca=/home/gord/client-ssl/ca.pem" + "&ssl_cert=/home/gord/client-ssl/client-cert.pem" + "&ssl_key=/home/gord/client-ssl/client-key.pem" + ) + +.. seealso:: + + :ref:`pymysql_ssl` in the PyMySQL dialect + + +Using MySQLdb with Google Cloud SQL +----------------------------------- + +Google Cloud SQL now recommends use of the MySQLdb dialect. Connect +using a URL like the following:: + + mysql+mysqldb://root@/?unix_socket=/cloudsql/: + +Server Side Cursors +------------------- + +The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`. + +""" + +import re + +from .base import MySQLCompiler +from .base import MySQLDialect +from .base import MySQLExecutionContext +from .base import MySQLIdentifierPreparer +from .base import TEXT +from ... import sql +from ... import util + + +class MySQLExecutionContext_mysqldb(MySQLExecutionContext): + pass + + +class MySQLCompiler_mysqldb(MySQLCompiler): + pass + + +class MySQLDialect_mysqldb(MySQLDialect): + driver = "mysqldb" + supports_statement_cache = True + supports_unicode_statements = True + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_native_decimal = True + + default_paramstyle = "format" + execution_ctx_cls = MySQLExecutionContext_mysqldb + statement_compiler = MySQLCompiler_mysqldb + preparer = MySQLIdentifierPreparer + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._mysql_dbapi_version = ( + self._parse_dbapi_version(self.dbapi.__version__) + if self.dbapi is not None and hasattr(self.dbapi, "__version__") + else (0, 0, 0) + ) + + def _parse_dbapi_version(self, version): + m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", version) + if m: + return tuple(int(x) for x in m.group(1, 2, 3) if x is not None) + else: + return (0, 0, 0) + + @util.langhelpers.memoized_property + def supports_server_side_cursors(self): + try: + cursors = __import__("MySQLdb.cursors").cursors + self._sscursor = cursors.SSCursor + return True + except (ImportError, AttributeError): + return False + + @classmethod + def import_dbapi(cls): + return __import__("MySQLdb") + + def on_connect(self): + super_ = super().on_connect() + + def on_connect(conn): + if super_ is not None: + super_(conn) + + charset_name = conn.character_set_name() + + if charset_name is not None: + cursor = conn.cursor() + cursor.execute("SET NAMES %s" % charset_name) + cursor.close() + + return on_connect + + def do_ping(self, dbapi_connection): + dbapi_connection.ping() + return True + + def do_executemany(self, cursor, statement, parameters, context=None): + rowcount = cursor.executemany(statement, parameters) + if context is not None: + context._rowcount = rowcount + + def _check_unicode_returns(self, connection): + # work around issue fixed in + # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8 + # specific issue w/ the utf8mb4_bin collation and unicode returns + + collation = connection.exec_driver_sql( + "show collation where %s = 'utf8mb4' and %s = 'utf8mb4_bin'" + % ( + self.identifier_preparer.quote("Charset"), + self.identifier_preparer.quote("Collation"), + ) + ).scalar() + has_utf8mb4_bin = self.server_version_info > (5,) and collation + if has_utf8mb4_bin: + additional_tests = [ + sql.collate( + sql.cast( + sql.literal_column("'test collated returns'"), + TEXT(charset="utf8mb4"), + ), + "utf8mb4_bin", + ) + ] + else: + additional_tests = [] + return super()._check_unicode_returns(connection, additional_tests) + + def create_connect_args(self, url, _translate_args=None): + if _translate_args is None: + _translate_args = dict( + database="db", username="user", password="passwd" + ) + + opts = url.translate_connect_args(**_translate_args) + opts.update(url.query) + + util.coerce_kw_type(opts, "compress", bool) + util.coerce_kw_type(opts, "connect_timeout", int) + util.coerce_kw_type(opts, "read_timeout", int) + util.coerce_kw_type(opts, "write_timeout", int) + util.coerce_kw_type(opts, "client_flag", int) + util.coerce_kw_type(opts, "local_infile", bool) + # Note: using either of the below will cause all strings to be + # returned as Unicode, both in raw SQL operations and with column + # types like String and MSString. + util.coerce_kw_type(opts, "use_unicode", bool) + util.coerce_kw_type(opts, "charset", str) + + # Rich values 'cursorclass' and 'conv' are not supported via + # query string. + + ssl = {} + keys = [ + ("ssl_ca", str), + ("ssl_key", str), + ("ssl_cert", str), + ("ssl_capath", str), + ("ssl_cipher", str), + ("ssl_check_hostname", bool), + ] + for key, kw_type in keys: + if key in opts: + ssl[key[4:]] = opts[key] + util.coerce_kw_type(ssl, key[4:], kw_type) + del opts[key] + if ssl: + opts["ssl"] = ssl + + # FOUND_ROWS must be set in CLIENT_FLAGS to enable + # supports_sane_rowcount. + client_flag = opts.get("client_flag", 0) + + client_flag_found_rows = self._found_rows_client_flag() + if client_flag_found_rows is not None: + client_flag |= client_flag_found_rows + opts["client_flag"] = client_flag + return [[], opts] + + def _found_rows_client_flag(self): + if self.dbapi is not None: + try: + CLIENT_FLAGS = __import__( + self.dbapi.__name__ + ".constants.CLIENT" + ).constants.CLIENT + except (AttributeError, ImportError): + return None + else: + return CLIENT_FLAGS.FOUND_ROWS + else: + return None + + def _extract_error_code(self, exception): + return exception.args[0] + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + try: + # note: the SQL here would be + # "SHOW VARIABLES LIKE 'character_set%%'" + cset_name = connection.connection.character_set_name + except AttributeError: + util.warn( + "No 'character_set_name' can be detected with " + "this MySQL-Python version; " + "please upgrade to a recent version of MySQL-Python. " + "Assuming latin1." + ) + return "latin1" + else: + return cset_name() + + def get_isolation_level_values(self, dbapi_connection): + return ( + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "AUTOCOMMIT", + ) + + def set_isolation_level(self, dbapi_connection, level): + if level == "AUTOCOMMIT": + dbapi_connection.autocommit(True) + else: + dbapi_connection.autocommit(False) + super().set_isolation_level(dbapi_connection, level) + + +dialect = MySQLDialect_mysqldb diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py new file mode 100644 index 00000000..836ffa1d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py @@ -0,0 +1,110 @@ +# dialects/mysql/provision.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from ... import exc +from ...testing.provision import configure_follower +from ...testing.provision import create_db +from ...testing.provision import drop_db +from ...testing.provision import generate_driver_url +from ...testing.provision import temp_table_keyword_args +from ...testing.provision import upsert + + +@generate_driver_url.for_db("mysql", "mariadb") +def generate_driver_url(url, driver, query_str): + backend = url.get_backend_name() + + # NOTE: at the moment, tests are running mariadbconnector + # against both mariadb and mysql backends. if we want this to be + # limited, do the decision making here to reject a "mysql+mariadbconnector" + # URL. Optionally also re-enable the module level + # MySQLDialect_mariadbconnector.is_mysql flag as well, which must include + # a unit and/or functional test. + + # all the Jenkins tests have been running mysqlclient Python library + # built against mariadb client drivers for years against all MySQL / + # MariaDB versions going back to MySQL 5.6, currently they can talk + # to MySQL databases without problems. + + if backend == "mysql": + dialect_cls = url.get_dialect() + if dialect_cls._is_mariadb_from_url(url): + backend = "mariadb" + + new_url = url.set( + drivername="%s+%s" % (backend, driver) + ).update_query_string(query_str) + + if driver == "mariadbconnector": + new_url = new_url.difference_update_query(["charset"]) + + try: + new_url.get_dialect() + except exc.NoSuchModuleError: + return None + else: + return new_url + + +@create_db.for_db("mysql", "mariadb") +def _mysql_create_db(cfg, eng, ident): + with eng.begin() as conn: + try: + _mysql_drop_db(cfg, conn, ident) + except Exception: + pass + + with eng.begin() as conn: + conn.exec_driver_sql( + "CREATE DATABASE %s CHARACTER SET utf8mb4" % ident + ) + conn.exec_driver_sql( + "CREATE DATABASE %s_test_schema CHARACTER SET utf8mb4" % ident + ) + conn.exec_driver_sql( + "CREATE DATABASE %s_test_schema_2 CHARACTER SET utf8mb4" % ident + ) + + +@configure_follower.for_db("mysql", "mariadb") +def _mysql_configure_follower(config, ident): + config.test_schema = "%s_test_schema" % ident + config.test_schema_2 = "%s_test_schema_2" % ident + + +@drop_db.for_db("mysql", "mariadb") +def _mysql_drop_db(cfg, eng, ident): + with eng.begin() as conn: + conn.exec_driver_sql("DROP DATABASE %s_test_schema" % ident) + conn.exec_driver_sql("DROP DATABASE %s_test_schema_2" % ident) + conn.exec_driver_sql("DROP DATABASE %s" % ident) + + +@temp_table_keyword_args.for_db("mysql", "mariadb") +def _mysql_temp_table_keyword_args(cfg, eng): + return {"prefixes": ["TEMPORARY"]} + + +@upsert.for_db("mariadb") +def _upsert( + cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False +): + from sqlalchemy.dialects.mysql import insert + + stmt = insert(table) + + if set_lambda: + stmt = stmt.on_duplicate_key_update(**set_lambda(stmt.inserted)) + else: + pk1 = table.primary_key.c[0] + stmt = stmt.on_duplicate_key_update({pk1.key: pk1}) + + stmt = stmt.returning( + *returning, sort_by_parameter_order=sort_by_parameter_order + ) + return stmt diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py new file mode 100644 index 00000000..830e4416 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py @@ -0,0 +1,137 @@ +# dialects/mysql/pymysql.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" + +.. dialect:: mysql+pymysql + :name: PyMySQL + :dbapi: pymysql + :connectstring: mysql+pymysql://:@/[?] + :url: https://pymysql.readthedocs.io/ + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + +.. _pymysql_ssl: + +SSL Connections +------------------ + +The PyMySQL DBAPI accepts the same SSL arguments as that of MySQLdb, +described at :ref:`mysqldb_ssl`. See that section for additional examples. + +If the server uses an automatically-generated certificate that is self-signed +or does not match the host name (as seen from the client), it may also be +necessary to indicate ``ssl_check_hostname=false`` in PyMySQL:: + + connection_uri = ( + "mysql+pymysql://scott:tiger@192.168.0.134/test" + "?ssl_ca=/home/gord/client-ssl/ca.pem" + "&ssl_cert=/home/gord/client-ssl/client-cert.pem" + "&ssl_key=/home/gord/client-ssl/client-key.pem" + "&ssl_check_hostname=false" + ) + + +MySQL-Python Compatibility +-------------------------- + +The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, +and targets 100% compatibility. Most behavioral notes for MySQL-python apply +to the pymysql driver as well. + +""" # noqa + +from .mysqldb import MySQLDialect_mysqldb +from ...util import langhelpers + + +class MySQLDialect_pymysql(MySQLDialect_mysqldb): + driver = "pymysql" + supports_statement_cache = True + + description_encoding = None + + @langhelpers.memoized_property + def supports_server_side_cursors(self): + try: + cursors = __import__("pymysql.cursors").cursors + self._sscursor = cursors.SSCursor + return True + except (ImportError, AttributeError): + return False + + @classmethod + def import_dbapi(cls): + return __import__("pymysql") + + @langhelpers.memoized_property + def _send_false_to_ping(self): + """determine if pymysql has deprecated, changed the default of, + or removed the 'reconnect' argument of connection.ping(). + + See #10492 and + https://github.com/PyMySQL/mysqlclient/discussions/651#discussioncomment-7308971 + for background. + + """ # noqa: E501 + + try: + Connection = __import__( + "pymysql.connections" + ).connections.Connection + except (ImportError, AttributeError): + return True + else: + insp = langhelpers.get_callable_argspec(Connection.ping) + try: + reconnect_arg = insp.args[1] + except IndexError: + return False + else: + return reconnect_arg == "reconnect" and ( + not insp.defaults or insp.defaults[0] is not False + ) + + def do_ping(self, dbapi_connection): + if self._send_false_to_ping: + dbapi_connection.ping(False) + else: + dbapi_connection.ping() + + return True + + def create_connect_args(self, url, _translate_args=None): + if _translate_args is None: + _translate_args = dict(username="user") + return super().create_connect_args( + url, _translate_args=_translate_args + ) + + def is_disconnect(self, e, connection, cursor): + if super().is_disconnect(e, connection, cursor): + return True + elif isinstance(e, self.dbapi.Error): + str_e = str(e).lower() + return ( + "already closed" in str_e or "connection was killed" in str_e + ) + else: + return False + + def _extract_error_code(self, exception): + if isinstance(exception.args[0], Exception): + exception = exception.args[0] + return exception.args[0] + + +dialect = MySQLDialect_pymysql diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py new file mode 100644 index 00000000..428c8dfd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py @@ -0,0 +1,138 @@ +# dialects/mysql/pyodbc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" + + +.. dialect:: mysql+pyodbc + :name: PyODBC + :dbapi: pyodbc + :connectstring: mysql+pyodbc://:@ + :url: https://pypi.org/project/pyodbc/ + +.. note:: + + The PyODBC for MySQL dialect is **not tested as part of + SQLAlchemy's continuous integration**. + The recommended MySQL dialects are mysqlclient and PyMySQL. + However, if you want to use the mysql+pyodbc dialect and require + full support for ``utf8mb4`` characters (including supplementary + characters like emoji) be sure to use a current release of + MySQL Connector/ODBC and specify the "ANSI" (**not** "Unicode") + version of the driver in your DSN or connection string. + +Pass through exact pyodbc connection string:: + + import urllib + connection_string = ( + 'DRIVER=MySQL ODBC 8.0 ANSI Driver;' + 'SERVER=localhost;' + 'PORT=3307;' + 'DATABASE=mydb;' + 'UID=root;' + 'PWD=(whatever);' + 'charset=utf8mb4;' + ) + params = urllib.parse.quote_plus(connection_string) + connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params + +""" # noqa + +import re + +from .base import MySQLDialect +from .base import MySQLExecutionContext +from .types import TIME +from ... import exc +from ... import util +from ...connectors.pyodbc import PyODBCConnector +from ...sql.sqltypes import Time + + +class _pyodbcTIME(TIME): + def result_processor(self, dialect, coltype): + def process(value): + # pyodbc returns a datetime.time object; no need to convert + return value + + return process + + +class MySQLExecutionContext_pyodbc(MySQLExecutionContext): + def get_lastrowid(self): + cursor = self.create_cursor() + cursor.execute("SELECT LAST_INSERT_ID()") + lastrowid = cursor.fetchone()[0] + cursor.close() + return lastrowid + + +class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): + supports_statement_cache = True + colspecs = util.update_copy(MySQLDialect.colspecs, {Time: _pyodbcTIME}) + supports_unicode_statements = True + execution_ctx_cls = MySQLExecutionContext_pyodbc + + pyodbc_driver_name = "MySQL" + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + # Prefer 'character_set_results' for the current connection over the + # value in the driver. SET NAMES or individual variable SETs will + # change the charset without updating the driver's view of the world. + # + # If it's decided that issuing that sort of SQL leaves you SOL, then + # this can prefer the driver value. + + # set this to None as _fetch_setting attempts to use it (None is OK) + self._connection_charset = None + try: + value = self._fetch_setting(connection, "character_set_client") + if value: + return value + except exc.DBAPIError: + pass + + util.warn( + "Could not detect the connection character set. " + "Assuming latin1." + ) + return "latin1" + + def _get_server_version_info(self, connection): + return MySQLDialect._get_server_version_info(self, connection) + + def _extract_error_code(self, exception): + m = re.compile(r"\((\d+)\)").search(str(exception.args)) + c = m.group(1) + if c: + return int(c) + else: + return None + + def on_connect(self): + super_ = super().on_connect() + + def on_connect(conn): + if super_ is not None: + super_(conn) + + # declare Unicode encoding for pyodbc as per + # https://github.com/mkleehammer/pyodbc/wiki/Unicode + pyodbc_SQL_CHAR = 1 # pyodbc.SQL_CHAR + pyodbc_SQL_WCHAR = -8 # pyodbc.SQL_WCHAR + conn.setdecoding(pyodbc_SQL_CHAR, encoding="utf-8") + conn.setdecoding(pyodbc_SQL_WCHAR, encoding="utf-8") + conn.setencoding(encoding="utf-8") + + return on_connect + + +dialect = MySQLDialect_pyodbc diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py new file mode 100644 index 00000000..d7622c54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py @@ -0,0 +1,677 @@ +# dialects/mysql/reflection.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +import re + +from .enumerated import ENUM +from .enumerated import SET +from .types import DATETIME +from .types import TIME +from .types import TIMESTAMP +from ... import log +from ... import types as sqltypes +from ... import util + + +class ReflectedState: + """Stores raw information about a SHOW CREATE TABLE statement.""" + + def __init__(self): + self.columns = [] + self.table_options = {} + self.table_name = None + self.keys = [] + self.fk_constraints = [] + self.ck_constraints = [] + + +@log.class_logger +class MySQLTableDefinitionParser: + """Parses the results of a SHOW CREATE TABLE statement.""" + + def __init__(self, dialect, preparer): + self.dialect = dialect + self.preparer = preparer + self._prep_regexes() + + def parse(self, show_create, charset): + state = ReflectedState() + state.charset = charset + for line in re.split(r"\r?\n", show_create): + if line.startswith(" " + self.preparer.initial_quote): + self._parse_column(line, state) + # a regular table options line + elif line.startswith(") "): + self._parse_table_options(line, state) + # an ANSI-mode table options line + elif line == ")": + pass + elif line.startswith("CREATE "): + self._parse_table_name(line, state) + elif "PARTITION" in line: + self._parse_partition_options(line, state) + # Not present in real reflection, but may be if + # loading from a file. + elif not line: + pass + else: + type_, spec = self._parse_constraints(line) + if type_ is None: + util.warn("Unknown schema content: %r" % line) + elif type_ == "key": + state.keys.append(spec) + elif type_ == "fk_constraint": + state.fk_constraints.append(spec) + elif type_ == "ck_constraint": + state.ck_constraints.append(spec) + else: + pass + return state + + def _check_view(self, sql: str) -> bool: + return bool(self._re_is_view.match(sql)) + + def _parse_constraints(self, line): + """Parse a KEY or CONSTRAINT line. + + :param line: A line of SHOW CREATE TABLE output + """ + + # KEY + m = self._re_key.match(line) + if m: + spec = m.groupdict() + # convert columns into name, length pairs + # NOTE: we may want to consider SHOW INDEX as the + # format of indexes in MySQL becomes more complex + spec["columns"] = self._parse_keyexprs(spec["columns"]) + if spec["version_sql"]: + m2 = self._re_key_version_sql.match(spec["version_sql"]) + if m2 and m2.groupdict()["parser"]: + spec["parser"] = m2.groupdict()["parser"] + if spec["parser"]: + spec["parser"] = self.preparer.unformat_identifiers( + spec["parser"] + )[0] + return "key", spec + + # FOREIGN KEY CONSTRAINT + m = self._re_fk_constraint.match(line) + if m: + spec = m.groupdict() + spec["table"] = self.preparer.unformat_identifiers(spec["table"]) + spec["local"] = [c[0] for c in self._parse_keyexprs(spec["local"])] + spec["foreign"] = [ + c[0] for c in self._parse_keyexprs(spec["foreign"]) + ] + return "fk_constraint", spec + + # CHECK constraint + m = self._re_ck_constraint.match(line) + if m: + spec = m.groupdict() + return "ck_constraint", spec + + # PARTITION and SUBPARTITION + m = self._re_partition.match(line) + if m: + # Punt! + return "partition", line + + # No match. + return (None, line) + + def _parse_table_name(self, line, state): + """Extract the table name. + + :param line: The first line of SHOW CREATE TABLE + """ + + regex, cleanup = self._pr_name + m = regex.match(line) + if m: + state.table_name = cleanup(m.group("name")) + + def _parse_table_options(self, line, state): + """Build a dictionary of all reflected table-level options. + + :param line: The final line of SHOW CREATE TABLE output. + """ + + options = {} + + if line and line != ")": + rest_of_line = line + for regex, cleanup in self._pr_options: + m = regex.search(rest_of_line) + if not m: + continue + directive, value = m.group("directive"), m.group("val") + if cleanup: + value = cleanup(value) + options[directive.lower()] = value + rest_of_line = regex.sub("", rest_of_line) + + for nope in ("auto_increment", "data directory", "index directory"): + options.pop(nope, None) + + for opt, val in options.items(): + state.table_options["%s_%s" % (self.dialect.name, opt)] = val + + def _parse_partition_options(self, line, state): + options = {} + new_line = line[:] + + while new_line.startswith("(") or new_line.startswith(" "): + new_line = new_line[1:] + + for regex, cleanup in self._pr_options: + m = regex.search(new_line) + if not m or "PARTITION" not in regex.pattern: + continue + + directive = m.group("directive") + directive = directive.lower() + is_subpartition = directive == "subpartition" + + if directive == "partition" or is_subpartition: + new_line = new_line.replace(") */", "") + new_line = new_line.replace(",", "") + if is_subpartition and new_line.endswith(")"): + new_line = new_line[:-1] + if self.dialect.name == "mariadb" and new_line.endswith(")"): + if ( + "MAXVALUE" in new_line + or "MINVALUE" in new_line + or "ENGINE" in new_line + ): + # final line of MariaDB partition endswith ")" + new_line = new_line[:-1] + + defs = "%s_%s_definitions" % (self.dialect.name, directive) + options[defs] = new_line + + else: + directive = directive.replace(" ", "_") + value = m.group("val") + if cleanup: + value = cleanup(value) + options[directive] = value + break + + for opt, val in options.items(): + part_def = "%s_partition_definitions" % (self.dialect.name) + subpart_def = "%s_subpartition_definitions" % (self.dialect.name) + if opt == part_def or opt == subpart_def: + # builds a string of definitions + if opt not in state.table_options: + state.table_options[opt] = val + else: + state.table_options[opt] = "%s, %s" % ( + state.table_options[opt], + val, + ) + else: + state.table_options["%s_%s" % (self.dialect.name, opt)] = val + + def _parse_column(self, line, state): + """Extract column details. + + Falls back to a 'minimal support' variant if full parse fails. + + :param line: Any column-bearing line from SHOW CREATE TABLE + """ + + spec = None + m = self._re_column.match(line) + if m: + spec = m.groupdict() + spec["full"] = True + else: + m = self._re_column_loose.match(line) + if m: + spec = m.groupdict() + spec["full"] = False + if not spec: + util.warn("Unknown column definition %r" % line) + return + if not spec["full"]: + util.warn("Incomplete reflection of column definition %r" % line) + + name, type_, args = spec["name"], spec["coltype"], spec["arg"] + + try: + col_type = self.dialect.ischema_names[type_] + except KeyError: + util.warn( + "Did not recognize type '%s' of column '%s'" % (type_, name) + ) + col_type = sqltypes.NullType + + # Column type positional arguments eg. varchar(32) + if args is None or args == "": + type_args = [] + elif args[0] == "'" and args[-1] == "'": + type_args = self._re_csv_str.findall(args) + else: + type_args = [int(v) for v in self._re_csv_int.findall(args)] + + # Column type keyword options + type_kw = {} + + if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)): + if type_args: + type_kw["fsp"] = type_args.pop(0) + + for kw in ("unsigned", "zerofill"): + if spec.get(kw, False): + type_kw[kw] = True + for kw in ("charset", "collate"): + if spec.get(kw, False): + type_kw[kw] = spec[kw] + if issubclass(col_type, (ENUM, SET)): + type_args = _strip_values(type_args) + + if issubclass(col_type, SET) and "" in type_args: + type_kw["retrieve_as_bitwise"] = True + + type_instance = col_type(*type_args, **type_kw) + + col_kw = {} + + # NOT NULL + col_kw["nullable"] = True + # this can be "NULL" in the case of TIMESTAMP + if spec.get("notnull", False) == "NOT NULL": + col_kw["nullable"] = False + # For generated columns, the nullability is marked in a different place + if spec.get("notnull_generated", False) == "NOT NULL": + col_kw["nullable"] = False + + # AUTO_INCREMENT + if spec.get("autoincr", False): + col_kw["autoincrement"] = True + elif issubclass(col_type, sqltypes.Integer): + col_kw["autoincrement"] = False + + # DEFAULT + default = spec.get("default", None) + + if default == "NULL": + # eliminates the need to deal with this later. + default = None + + comment = spec.get("comment", None) + + if comment is not None: + comment = cleanup_text(comment) + + sqltext = spec.get("generated") + if sqltext is not None: + computed = dict(sqltext=sqltext) + persisted = spec.get("persistence") + if persisted is not None: + computed["persisted"] = persisted == "STORED" + col_kw["computed"] = computed + + col_d = dict( + name=name, type=type_instance, default=default, comment=comment + ) + col_d.update(col_kw) + state.columns.append(col_d) + + def _describe_to_create(self, table_name, columns): + """Re-format DESCRIBE output as a SHOW CREATE TABLE string. + + DESCRIBE is a much simpler reflection and is sufficient for + reflecting views for runtime use. This method formats DDL + for columns only- keys are omitted. + + :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples. + SHOW FULL COLUMNS FROM rows must be rearranged for use with + this function. + """ + + buffer = [] + for row in columns: + (name, col_type, nullable, default, extra) = ( + row[i] for i in (0, 1, 2, 4, 5) + ) + + line = [" "] + line.append(self.preparer.quote_identifier(name)) + line.append(col_type) + if not nullable: + line.append("NOT NULL") + if default: + if "auto_increment" in default: + pass + elif col_type.startswith("timestamp") and default.startswith( + "C" + ): + line.append("DEFAULT") + line.append(default) + elif default == "NULL": + line.append("DEFAULT") + line.append(default) + else: + line.append("DEFAULT") + line.append("'%s'" % default.replace("'", "''")) + if extra: + line.append(extra) + + buffer.append(" ".join(line)) + + return "".join( + [ + ( + "CREATE TABLE %s (\n" + % self.preparer.quote_identifier(table_name) + ), + ",\n".join(buffer), + "\n) ", + ] + ) + + def _parse_keyexprs(self, identifiers): + """Unpack '"col"(2),"col" ASC'-ish strings into components.""" + + return [ + (colname, int(length) if length else None, modifiers) + for colname, length, modifiers in self._re_keyexprs.findall( + identifiers + ) + ] + + def _prep_regexes(self): + """Pre-compile regular expressions.""" + + self._re_columns = [] + self._pr_options = [] + + _final = self.preparer.final_quote + + quotes = dict( + zip( + ("iq", "fq", "esc_fq"), + [ + re.escape(s) + for s in ( + self.preparer.initial_quote, + _final, + self.preparer._escape_identifier(_final), + ) + ], + ) + ) + + self._pr_name = _pr_compile( + r"^CREATE (?:\w+ +)?TABLE +" + r"%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($" % quotes, + self.preparer._unescape_identifier, + ) + + self._re_is_view = _re_compile(r"^CREATE(?! TABLE)(\s.*)?\sVIEW") + + # `col`,`col2`(32),`col3`(15) DESC + # + self._re_keyexprs = _re_compile( + r"(?:" + r"(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)" + r"(?:\((\d+)\))?(?: +(ASC|DESC))?(?=\,|$))+" % quotes + ) + + # 'foo' or 'foo','bar' or 'fo,o','ba''a''r' + self._re_csv_str = _re_compile(r"\x27(?:\x27\x27|[^\x27])*\x27") + + # 123 or 123,456 + self._re_csv_int = _re_compile(r"\d+") + + # `colname` [type opts] + # (NOT NULL | NULL) + # DEFAULT ('value' | CURRENT_TIMESTAMP...) + # COMMENT 'comment' + # COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT) + # STORAGE (DISK|MEMORY) + self._re_column = _re_compile( + r" " + r"%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +" + r"(?P\w+)" + r"(?:\((?P(?:\d+|\d+,\d+|" + r"(?:'(?:''|[^'])*',?)+))\))?" + r"(?: +(?PUNSIGNED))?" + r"(?: +(?PZEROFILL))?" + r"(?: +CHARACTER SET +(?P[\w_]+))?" + r"(?: +COLLATE +(?P[\w_]+))?" + r"(?: +(?P(?:NOT )?NULL))?" + r"(?: +DEFAULT +(?P" + r"(?:NULL|'(?:''|[^'])*'|[\-\w\.\(\)]+" + r"(?: +ON UPDATE [\-\w\.\(\)]+)?)" + r"))?" + r"(?: +(?:GENERATED ALWAYS)? ?AS +(?P\(" + r".*\))? ?(?PVIRTUAL|STORED)?" + r"(?: +(?P(?:NOT )?NULL))?" + r")?" + r"(?: +(?PAUTO_INCREMENT))?" + r"(?: +COMMENT +'(?P(?:''|[^'])*)')?" + r"(?: +COLUMN_FORMAT +(?P\w+))?" + r"(?: +STORAGE +(?P\w+))?" + r"(?: +(?P.*))?" + r",?$" % quotes + ) + + # Fallback, try to parse as little as possible + self._re_column_loose = _re_compile( + r" " + r"%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +" + r"(?P\w+)" + r"(?:\((?P(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?" + r".*?(?P(?:NOT )NULL)?" % quotes + ) + + # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))? + # (`col` (ASC|DESC)?, `col` (ASC|DESC)?) + # KEY_BLOCK_SIZE size | WITH PARSER name /*!50100 WITH PARSER name */ + self._re_key = _re_compile( + r" " + r"(?:(?P\S+) )?KEY" + r"(?: +%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?" + r"(?: +USING +(?P\S+))?" + r" +\((?P.+?)\)" + r"(?: +USING +(?P\S+))?" + r"(?: +KEY_BLOCK_SIZE *[ =]? *(?P\S+))?" + r"(?: +WITH PARSER +(?P\S+))?" + r"(?: +COMMENT +(?P(\x27\x27|\x27([^\x27])*?\x27)+))?" + r"(?: +/\*(?P.+)\*/ *)?" + r",?$" % quotes + ) + + # https://forums.mysql.com/read.php?20,567102,567111#msg-567111 + # It means if the MySQL version >= \d+, execute what's in the comment + self._re_key_version_sql = _re_compile( + r"\!\d+ " r"(?: *WITH PARSER +(?P\S+) *)?" + ) + + # CONSTRAINT `name` FOREIGN KEY (`local_col`) + # REFERENCES `remote` (`remote_col`) + # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE + # ON DELETE CASCADE ON UPDATE RESTRICT + # + # unique constraints come back as KEYs + kw = quotes.copy() + kw["on"] = "RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT" + self._re_fk_constraint = _re_compile( + r" " + r"CONSTRAINT +" + r"%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +" + r"FOREIGN KEY +" + r"\((?P[^\)]+?)\) REFERENCES +" + r"(?P%(iq)s[^%(fq)s]+%(fq)s" + r"(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +" + r"\((?P(?:%(iq)s[^%(fq)s]+%(fq)s(?: *, *)?)+)\)" + r"(?: +(?PMATCH \w+))?" + r"(?: +ON DELETE (?P%(on)s))?" + r"(?: +ON UPDATE (?P%(on)s))?" % kw + ) + + # CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)' + # testing on MariaDB 10.2 shows that the CHECK constraint + # is returned on a line by itself, so to match without worrying + # about parenthesis in the expression we go to the end of the line + self._re_ck_constraint = _re_compile( + r" " + r"CONSTRAINT +" + r"%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +" + r"CHECK +" + r"\((?P.+)\),?" % kw + ) + + # PARTITION + # + # punt! + self._re_partition = _re_compile(r"(?:.*)(?:SUB)?PARTITION(?:.*)") + + # Table-level options (COLLATE, ENGINE, etc.) + # Do the string options first, since they have quoted + # strings we need to get rid of. + for option in _options_of_type_string: + self._add_option_string(option) + + for option in ( + "ENGINE", + "TYPE", + "AUTO_INCREMENT", + "AVG_ROW_LENGTH", + "CHARACTER SET", + "DEFAULT CHARSET", + "CHECKSUM", + "COLLATE", + "DELAY_KEY_WRITE", + "INSERT_METHOD", + "MAX_ROWS", + "MIN_ROWS", + "PACK_KEYS", + "ROW_FORMAT", + "KEY_BLOCK_SIZE", + "STATS_SAMPLE_PAGES", + ): + self._add_option_word(option) + + for option in ( + "PARTITION BY", + "SUBPARTITION BY", + "PARTITIONS", + "SUBPARTITIONS", + "PARTITION", + "SUBPARTITION", + ): + self._add_partition_option_word(option) + + self._add_option_regex("UNION", r"\([^\)]+\)") + self._add_option_regex("TABLESPACE", r".*? STORAGE DISK") + self._add_option_regex( + "RAID_TYPE", + r"\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+", + ) + + _optional_equals = r"(?:\s*(?:=\s*)|\s+)" + + def _add_option_string(self, directive): + regex = r"(?P%s)%s" r"'(?P(?:[^']|'')*?)'(?!')" % ( + re.escape(directive), + self._optional_equals, + ) + self._pr_options.append(_pr_compile(regex, cleanup_text)) + + def _add_option_word(self, directive): + regex = r"(?P%s)%s" r"(?P\w+)" % ( + re.escape(directive), + self._optional_equals, + ) + self._pr_options.append(_pr_compile(regex)) + + def _add_partition_option_word(self, directive): + if directive == "PARTITION BY" or directive == "SUBPARTITION BY": + regex = r"(?%s)%s" r"(?P\w+.*)" % ( + re.escape(directive), + self._optional_equals, + ) + elif directive == "SUBPARTITIONS" or directive == "PARTITIONS": + regex = r"(?%s)%s" r"(?P\d+)" % ( + re.escape(directive), + self._optional_equals, + ) + else: + regex = r"(?%s)(?!\S)" % (re.escape(directive),) + self._pr_options.append(_pr_compile(regex)) + + def _add_option_regex(self, directive, regex): + regex = r"(?P%s)%s" r"(?P%s)" % ( + re.escape(directive), + self._optional_equals, + regex, + ) + self._pr_options.append(_pr_compile(regex)) + + +_options_of_type_string = ( + "COMMENT", + "DATA DIRECTORY", + "INDEX DIRECTORY", + "PASSWORD", + "CONNECTION", +) + + +def _pr_compile(regex, cleanup=None): + """Prepare a 2-tuple of compiled regex and callable.""" + + return (_re_compile(regex), cleanup) + + +def _re_compile(regex): + """Compile a string to regex, I and UNICODE.""" + + return re.compile(regex, re.I | re.UNICODE) + + +def _strip_values(values): + "Strip reflected values quotes" + strip_values = [] + for a in values: + if a[0:1] == '"' or a[0:1] == "'": + # strip enclosing quotes and unquote interior + a = a[1:-1].replace(a[0] * 2, a[0]) + strip_values.append(a) + return strip_values + + +def cleanup_text(raw_text: str) -> str: + if "\\" in raw_text: + raw_text = re.sub( + _control_char_regexp, lambda s: _control_char_map[s[0]], raw_text + ) + return raw_text.replace("''", "'") + + +_control_char_map = { + "\\\\": "\\", + "\\0": "\0", + "\\a": "\a", + "\\b": "\b", + "\\t": "\t", + "\\n": "\n", + "\\v": "\v", + "\\f": "\f", + "\\r": "\r", + # '\\e':'\e', +} +_control_char_regexp = re.compile( + "|".join(re.escape(k) for k in _control_char_map) +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py new file mode 100644 index 00000000..04764c17 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py @@ -0,0 +1,571 @@ +# dialects/mysql/reserved_words.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +# generated using: +# https://gist.github.com/kkirsche/4f31f2153ed7a3248be1ec44ca6ddbc9 +# +# https://mariadb.com/kb/en/reserved-words/ +# includes: Reserved Words, Oracle Mode (separate set unioned) +# excludes: Exceptions, Function Names +# mypy: ignore-errors + +RESERVED_WORDS_MARIADB = { + "accessible", + "add", + "all", + "alter", + "analyze", + "and", + "as", + "asc", + "asensitive", + "before", + "between", + "bigint", + "binary", + "blob", + "both", + "by", + "call", + "cascade", + "case", + "change", + "char", + "character", + "check", + "collate", + "column", + "condition", + "constraint", + "continue", + "convert", + "create", + "cross", + "current_date", + "current_role", + "current_time", + "current_timestamp", + "current_user", + "cursor", + "database", + "databases", + "day_hour", + "day_microsecond", + "day_minute", + "day_second", + "dec", + "decimal", + "declare", + "default", + "delayed", + "delete", + "desc", + "describe", + "deterministic", + "distinct", + "distinctrow", + "div", + "do_domain_ids", + "double", + "drop", + "dual", + "each", + "else", + "elseif", + "enclosed", + "escaped", + "except", + "exists", + "exit", + "explain", + "false", + "fetch", + "float", + "float4", + "float8", + "for", + "force", + "foreign", + "from", + "fulltext", + "general", + "grant", + "group", + "having", + "high_priority", + "hour_microsecond", + "hour_minute", + "hour_second", + "if", + "ignore", + "ignore_domain_ids", + "ignore_server_ids", + "in", + "index", + "infile", + "inner", + "inout", + "insensitive", + "insert", + "int", + "int1", + "int2", + "int3", + "int4", + "int8", + "integer", + "intersect", + "interval", + "into", + "is", + "iterate", + "join", + "key", + "keys", + "kill", + "leading", + "leave", + "left", + "like", + "limit", + "linear", + "lines", + "load", + "localtime", + "localtimestamp", + "lock", + "long", + "longblob", + "longtext", + "loop", + "low_priority", + "master_heartbeat_period", + "master_ssl_verify_server_cert", + "match", + "maxvalue", + "mediumblob", + "mediumint", + "mediumtext", + "middleint", + "minute_microsecond", + "minute_second", + "mod", + "modifies", + "natural", + "no_write_to_binlog", + "not", + "null", + "numeric", + "offset", + "on", + "optimize", + "option", + "optionally", + "or", + "order", + "out", + "outer", + "outfile", + "over", + "page_checksum", + "parse_vcol_expr", + "partition", + "position", + "precision", + "primary", + "procedure", + "purge", + "range", + "read", + "read_write", + "reads", + "real", + "recursive", + "ref_system_id", + "references", + "regexp", + "release", + "rename", + "repeat", + "replace", + "require", + "resignal", + "restrict", + "return", + "returning", + "revoke", + "right", + "rlike", + "rows", + "row_number", + "schema", + "schemas", + "second_microsecond", + "select", + "sensitive", + "separator", + "set", + "show", + "signal", + "slow", + "smallint", + "spatial", + "specific", + "sql", + "sql_big_result", + "sql_calc_found_rows", + "sql_small_result", + "sqlexception", + "sqlstate", + "sqlwarning", + "ssl", + "starting", + "stats_auto_recalc", + "stats_persistent", + "stats_sample_pages", + "straight_join", + "table", + "terminated", + "then", + "tinyblob", + "tinyint", + "tinytext", + "to", + "trailing", + "trigger", + "true", + "undo", + "union", + "unique", + "unlock", + "unsigned", + "update", + "usage", + "use", + "using", + "utc_date", + "utc_time", + "utc_timestamp", + "values", + "varbinary", + "varchar", + "varcharacter", + "varying", + "when", + "where", + "while", + "window", + "with", + "write", + "xor", + "year_month", + "zerofill", +}.union( + { + "body", + "elsif", + "goto", + "history", + "others", + "package", + "period", + "raise", + "rowtype", + "system", + "system_time", + "versioning", + "without", + } +) + +# https://dev.mysql.com/doc/refman/8.3/en/keywords.html +# https://dev.mysql.com/doc/refman/8.0/en/keywords.html +# https://dev.mysql.com/doc/refman/5.7/en/keywords.html +# https://dev.mysql.com/doc/refman/5.6/en/keywords.html +# includes: MySQL x.0 Keywords and Reserved Words +# excludes: MySQL x.0 New Keywords and Reserved Words, +# MySQL x.0 Removed Keywords and Reserved Words +RESERVED_WORDS_MYSQL = { + "accessible", + "add", + "admin", + "all", + "alter", + "analyze", + "and", + "array", + "as", + "asc", + "asensitive", + "before", + "between", + "bigint", + "binary", + "blob", + "both", + "by", + "call", + "cascade", + "case", + "change", + "char", + "character", + "check", + "collate", + "column", + "condition", + "constraint", + "continue", + "convert", + "create", + "cross", + "cube", + "cume_dist", + "current_date", + "current_time", + "current_timestamp", + "current_user", + "cursor", + "database", + "databases", + "day_hour", + "day_microsecond", + "day_minute", + "day_second", + "dec", + "decimal", + "declare", + "default", + "delayed", + "delete", + "dense_rank", + "desc", + "describe", + "deterministic", + "distinct", + "distinctrow", + "div", + "double", + "drop", + "dual", + "each", + "else", + "elseif", + "empty", + "enclosed", + "escaped", + "except", + "exists", + "exit", + "explain", + "false", + "fetch", + "first_value", + "float", + "float4", + "float8", + "for", + "force", + "foreign", + "from", + "fulltext", + "function", + "general", + "generated", + "get", + "get_master_public_key", + "grant", + "group", + "grouping", + "groups", + "having", + "high_priority", + "hour_microsecond", + "hour_minute", + "hour_second", + "if", + "ignore", + "ignore_server_ids", + "in", + "index", + "infile", + "inner", + "inout", + "insensitive", + "insert", + "int", + "int1", + "int2", + "int3", + "int4", + "int8", + "integer", + "intersect", + "interval", + "into", + "io_after_gtids", + "io_before_gtids", + "is", + "iterate", + "join", + "json_table", + "key", + "keys", + "kill", + "lag", + "last_value", + "lateral", + "lead", + "leading", + "leave", + "left", + "like", + "limit", + "linear", + "lines", + "load", + "localtime", + "localtimestamp", + "lock", + "long", + "longblob", + "longtext", + "loop", + "low_priority", + "master_bind", + "master_heartbeat_period", + "master_ssl_verify_server_cert", + "match", + "maxvalue", + "mediumblob", + "mediumint", + "mediumtext", + "member", + "middleint", + "minute_microsecond", + "minute_second", + "mod", + "modifies", + "natural", + "no_write_to_binlog", + "not", + "nth_value", + "ntile", + "null", + "numeric", + "of", + "on", + "optimize", + "optimizer_costs", + "option", + "optionally", + "or", + "order", + "out", + "outer", + "outfile", + "over", + "parse_gcol_expr", + "parallel", + "partition", + "percent_rank", + "persist", + "persist_only", + "precision", + "primary", + "procedure", + "purge", + "qualify", + "range", + "rank", + "read", + "read_write", + "reads", + "real", + "recursive", + "references", + "regexp", + "release", + "rename", + "repeat", + "replace", + "require", + "resignal", + "restrict", + "return", + "revoke", + "right", + "rlike", + "role", + "row", + "row_number", + "rows", + "schema", + "schemas", + "second_microsecond", + "select", + "sensitive", + "separator", + "set", + "show", + "signal", + "slow", + "smallint", + "spatial", + "specific", + "sql", + "sql_after_gtids", + "sql_before_gtids", + "sql_big_result", + "sql_calc_found_rows", + "sql_small_result", + "sqlexception", + "sqlstate", + "sqlwarning", + "ssl", + "starting", + "stored", + "straight_join", + "system", + "table", + "terminated", + "then", + "tinyblob", + "tinyint", + "tinytext", + "to", + "trailing", + "trigger", + "true", + "undo", + "union", + "unique", + "unlock", + "unsigned", + "update", + "usage", + "use", + "using", + "utc_date", + "utc_time", + "utc_timestamp", + "values", + "varbinary", + "varchar", + "varcharacter", + "varying", + "virtual", + "when", + "where", + "while", + "window", + "with", + "write", + "xor", + "year_month", + "zerofill", +} diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py new file mode 100644 index 00000000..734f6ae3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py @@ -0,0 +1,774 @@ +# dialects/mysql/types.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +import datetime + +from ... import exc +from ... import util +from ...sql import sqltypes + + +class _NumericType: + """Base for MySQL numeric types. + + This is the base both for NUMERIC as well as INTEGER, hence + it's a mixin. + + """ + + def __init__(self, unsigned=False, zerofill=False, **kw): + self.unsigned = unsigned + self.zerofill = zerofill + super().__init__(**kw) + + def __repr__(self): + return util.generic_repr( + self, to_inspect=[_NumericType, sqltypes.Numeric] + ) + + +class _FloatType(_NumericType, sqltypes.Float): + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + if isinstance(self, (REAL, DOUBLE)) and ( + (precision is None and scale is not None) + or (precision is not None and scale is None) + ): + raise exc.ArgumentError( + "You must specify both precision and scale or omit " + "both altogether." + ) + super().__init__(precision=precision, asdecimal=asdecimal, **kw) + self.scale = scale + + def __repr__(self): + return util.generic_repr( + self, to_inspect=[_FloatType, _NumericType, sqltypes.Float] + ) + + +class _IntegerType(_NumericType, sqltypes.Integer): + def __init__(self, display_width=None, **kw): + self.display_width = display_width + super().__init__(**kw) + + def __repr__(self): + return util.generic_repr( + self, to_inspect=[_IntegerType, _NumericType, sqltypes.Integer] + ) + + +class _StringType(sqltypes.String): + """Base for MySQL string types.""" + + def __init__( + self, + charset=None, + collation=None, + ascii=False, # noqa + binary=False, + unicode=False, + national=False, + **kw, + ): + self.charset = charset + + # allow collate= or collation= + kw.setdefault("collation", kw.pop("collate", collation)) + + self.ascii = ascii + self.unicode = unicode + self.binary = binary + self.national = national + super().__init__(**kw) + + def __repr__(self): + return util.generic_repr( + self, to_inspect=[_StringType, sqltypes.String] + ) + + +class _MatchType(sqltypes.Float, sqltypes.MatchType): + def __init__(self, **kw): + # TODO: float arguments? + sqltypes.Float.__init__(self) + sqltypes.MatchType.__init__(self) + + +class NUMERIC(_NumericType, sqltypes.NUMERIC): + """MySQL NUMERIC type.""" + + __visit_name__ = "NUMERIC" + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a NUMERIC. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__( + precision=precision, scale=scale, asdecimal=asdecimal, **kw + ) + + +class DECIMAL(_NumericType, sqltypes.DECIMAL): + """MySQL DECIMAL type.""" + + __visit_name__ = "DECIMAL" + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a DECIMAL. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__( + precision=precision, scale=scale, asdecimal=asdecimal, **kw + ) + + +class DOUBLE(_FloatType, sqltypes.DOUBLE): + """MySQL DOUBLE type.""" + + __visit_name__ = "DOUBLE" + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a DOUBLE. + + .. note:: + + The :class:`.DOUBLE` type by default converts from float + to Decimal, using a truncation that defaults to 10 digits. + Specify either ``scale=n`` or ``decimal_return_scale=n`` in order + to change this scale, or ``asdecimal=False`` to return values + directly as Python floating points. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__( + precision=precision, scale=scale, asdecimal=asdecimal, **kw + ) + + +class REAL(_FloatType, sqltypes.REAL): + """MySQL REAL type.""" + + __visit_name__ = "REAL" + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a REAL. + + .. note:: + + The :class:`.REAL` type by default converts from float + to Decimal, using a truncation that defaults to 10 digits. + Specify either ``scale=n`` or ``decimal_return_scale=n`` in order + to change this scale, or ``asdecimal=False`` to return values + directly as Python floating points. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__( + precision=precision, scale=scale, asdecimal=asdecimal, **kw + ) + + +class FLOAT(_FloatType, sqltypes.FLOAT): + """MySQL FLOAT type.""" + + __visit_name__ = "FLOAT" + + def __init__(self, precision=None, scale=None, asdecimal=False, **kw): + """Construct a FLOAT. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__( + precision=precision, scale=scale, asdecimal=asdecimal, **kw + ) + + def bind_processor(self, dialect): + return None + + +class INTEGER(_IntegerType, sqltypes.INTEGER): + """MySQL INTEGER type.""" + + __visit_name__ = "INTEGER" + + def __init__(self, display_width=None, **kw): + """Construct an INTEGER. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__(display_width=display_width, **kw) + + +class BIGINT(_IntegerType, sqltypes.BIGINT): + """MySQL BIGINTEGER type.""" + + __visit_name__ = "BIGINT" + + def __init__(self, display_width=None, **kw): + """Construct a BIGINTEGER. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__(display_width=display_width, **kw) + + +class MEDIUMINT(_IntegerType): + """MySQL MEDIUMINTEGER type.""" + + __visit_name__ = "MEDIUMINT" + + def __init__(self, display_width=None, **kw): + """Construct a MEDIUMINTEGER + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__(display_width=display_width, **kw) + + +class TINYINT(_IntegerType): + """MySQL TINYINT type.""" + + __visit_name__ = "TINYINT" + + def __init__(self, display_width=None, **kw): + """Construct a TINYINT. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__(display_width=display_width, **kw) + + +class SMALLINT(_IntegerType, sqltypes.SMALLINT): + """MySQL SMALLINTEGER type.""" + + __visit_name__ = "SMALLINT" + + def __init__(self, display_width=None, **kw): + """Construct a SMALLINTEGER. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super().__init__(display_width=display_width, **kw) + + +class BIT(sqltypes.TypeEngine): + """MySQL BIT type. + + This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater + for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a + MSTinyInteger() type. + + """ + + __visit_name__ = "BIT" + + def __init__(self, length=None): + """Construct a BIT. + + :param length: Optional, number of bits. + + """ + self.length = length + + def result_processor(self, dialect, coltype): + """Convert a MySQL's 64 bit, variable length binary string to a long. + + TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector + already do this, so this logic should be moved to those dialects. + + """ + + def process(value): + if value is not None: + v = 0 + for i in value: + if not isinstance(i, int): + i = ord(i) # convert byte to int on Python 2 + v = v << 8 | i + return v + return value + + return process + + +class TIME(sqltypes.TIME): + """MySQL TIME type.""" + + __visit_name__ = "TIME" + + def __init__(self, timezone=False, fsp=None): + """Construct a MySQL TIME type. + + :param timezone: not used by the MySQL dialect. + :param fsp: fractional seconds precision value. + MySQL 5.6 supports storage of fractional seconds; + this parameter will be used when emitting DDL + for the TIME type. + + .. note:: + + DBAPI driver support for fractional seconds may + be limited; current support includes + MySQL Connector/Python. + + """ + super().__init__(timezone=timezone) + self.fsp = fsp + + def result_processor(self, dialect, coltype): + time = datetime.time + + def process(value): + # convert from a timedelta value + if value is not None: + microseconds = value.microseconds + seconds = value.seconds + minutes = seconds // 60 + return time( + minutes // 60, + minutes % 60, + seconds - minutes * 60, + microsecond=microseconds, + ) + else: + return None + + return process + + +class TIMESTAMP(sqltypes.TIMESTAMP): + """MySQL TIMESTAMP type.""" + + __visit_name__ = "TIMESTAMP" + + def __init__(self, timezone=False, fsp=None): + """Construct a MySQL TIMESTAMP type. + + :param timezone: not used by the MySQL dialect. + :param fsp: fractional seconds precision value. + MySQL 5.6.4 supports storage of fractional seconds; + this parameter will be used when emitting DDL + for the TIMESTAMP type. + + .. note:: + + DBAPI driver support for fractional seconds may + be limited; current support includes + MySQL Connector/Python. + + """ + super().__init__(timezone=timezone) + self.fsp = fsp + + +class DATETIME(sqltypes.DATETIME): + """MySQL DATETIME type.""" + + __visit_name__ = "DATETIME" + + def __init__(self, timezone=False, fsp=None): + """Construct a MySQL DATETIME type. + + :param timezone: not used by the MySQL dialect. + :param fsp: fractional seconds precision value. + MySQL 5.6.4 supports storage of fractional seconds; + this parameter will be used when emitting DDL + for the DATETIME type. + + .. note:: + + DBAPI driver support for fractional seconds may + be limited; current support includes + MySQL Connector/Python. + + """ + super().__init__(timezone=timezone) + self.fsp = fsp + + +class YEAR(sqltypes.TypeEngine): + """MySQL YEAR type, for single byte storage of years 1901-2155.""" + + __visit_name__ = "YEAR" + + def __init__(self, display_width=None): + self.display_width = display_width + + +class TEXT(_StringType, sqltypes.TEXT): + """MySQL TEXT type, for character storage encoded up to 2^16 bytes.""" + + __visit_name__ = "TEXT" + + def __init__(self, length=None, **kw): + """Construct a TEXT. + + :param length: Optional, if provided the server may optimize storage + by substituting the smallest TEXT type sufficient to store + ``length`` bytes of characters. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super().__init__(length=length, **kw) + + +class TINYTEXT(_StringType): + """MySQL TINYTEXT type, for character storage encoded up to 2^8 bytes.""" + + __visit_name__ = "TINYTEXT" + + def __init__(self, **kwargs): + """Construct a TINYTEXT. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super().__init__(**kwargs) + + +class MEDIUMTEXT(_StringType): + """MySQL MEDIUMTEXT type, for character storage encoded up + to 2^24 bytes.""" + + __visit_name__ = "MEDIUMTEXT" + + def __init__(self, **kwargs): + """Construct a MEDIUMTEXT. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super().__init__(**kwargs) + + +class LONGTEXT(_StringType): + """MySQL LONGTEXT type, for character storage encoded up to 2^32 bytes.""" + + __visit_name__ = "LONGTEXT" + + def __init__(self, **kwargs): + """Construct a LONGTEXT. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super().__init__(**kwargs) + + +class VARCHAR(_StringType, sqltypes.VARCHAR): + """MySQL VARCHAR type, for variable-length character data.""" + + __visit_name__ = "VARCHAR" + + def __init__(self, length=None, **kwargs): + """Construct a VARCHAR. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super().__init__(length=length, **kwargs) + + +class CHAR(_StringType, sqltypes.CHAR): + """MySQL CHAR type, for fixed-length character data.""" + + __visit_name__ = "CHAR" + + def __init__(self, length=None, **kwargs): + """Construct a CHAR. + + :param length: Maximum data length, in characters. + + :param binary: Optional, use the default binary collation for the + national character set. This does not affect the type of data + stored, use a BINARY type for binary data. + + :param collation: Optional, request a particular collation. Must be + compatible with the national character set. + + """ + super().__init__(length=length, **kwargs) + + @classmethod + def _adapt_string_for_cast(cls, type_): + # copy the given string type into a CHAR + # for the purposes of rendering a CAST expression + type_ = sqltypes.to_instance(type_) + if isinstance(type_, sqltypes.CHAR): + return type_ + elif isinstance(type_, _StringType): + return CHAR( + length=type_.length, + charset=type_.charset, + collation=type_.collation, + ascii=type_.ascii, + binary=type_.binary, + unicode=type_.unicode, + national=False, # not supported in CAST + ) + else: + return CHAR(length=type_.length) + + +class NVARCHAR(_StringType, sqltypes.NVARCHAR): + """MySQL NVARCHAR type. + + For variable-length character data in the server's configured national + character set. + """ + + __visit_name__ = "NVARCHAR" + + def __init__(self, length=None, **kwargs): + """Construct an NVARCHAR. + + :param length: Maximum data length, in characters. + + :param binary: Optional, use the default binary collation for the + national character set. This does not affect the type of data + stored, use a BINARY type for binary data. + + :param collation: Optional, request a particular collation. Must be + compatible with the national character set. + + """ + kwargs["national"] = True + super().__init__(length=length, **kwargs) + + +class NCHAR(_StringType, sqltypes.NCHAR): + """MySQL NCHAR type. + + For fixed-length character data in the server's configured national + character set. + """ + + __visit_name__ = "NCHAR" + + def __init__(self, length=None, **kwargs): + """Construct an NCHAR. + + :param length: Maximum data length, in characters. + + :param binary: Optional, use the default binary collation for the + national character set. This does not affect the type of data + stored, use a BINARY type for binary data. + + :param collation: Optional, request a particular collation. Must be + compatible with the national character set. + + """ + kwargs["national"] = True + super().__init__(length=length, **kwargs) + + +class TINYBLOB(sqltypes._Binary): + """MySQL TINYBLOB type, for binary data up to 2^8 bytes.""" + + __visit_name__ = "TINYBLOB" + + +class MEDIUMBLOB(sqltypes._Binary): + """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes.""" + + __visit_name__ = "MEDIUMBLOB" + + +class LONGBLOB(sqltypes._Binary): + """MySQL LONGBLOB type, for binary data up to 2^32 bytes.""" + + __visit_name__ = "LONGBLOB" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__init__.py new file mode 100644 index 00000000..d855122e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__init__.py @@ -0,0 +1,67 @@ +# dialects/oracle/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from types import ModuleType + +from . import base # noqa +from . import cx_oracle # noqa +from . import oracledb # noqa +from .base import BFILE +from .base import BINARY_DOUBLE +from .base import BINARY_FLOAT +from .base import BLOB +from .base import CHAR +from .base import CLOB +from .base import DATE +from .base import DOUBLE_PRECISION +from .base import FLOAT +from .base import INTERVAL +from .base import LONG +from .base import NCHAR +from .base import NCLOB +from .base import NUMBER +from .base import NVARCHAR +from .base import NVARCHAR2 +from .base import RAW +from .base import REAL +from .base import ROWID +from .base import TIMESTAMP +from .base import VARCHAR +from .base import VARCHAR2 + +# Alias oracledb also as oracledb_async +oracledb_async = type( + "oracledb_async", (ModuleType,), {"dialect": oracledb.dialect_async} +) + +base.dialect = dialect = cx_oracle.dialect + +__all__ = ( + "VARCHAR", + "NVARCHAR", + "CHAR", + "NCHAR", + "DATE", + "NUMBER", + "BLOB", + "BFILE", + "CLOB", + "NCLOB", + "TIMESTAMP", + "RAW", + "FLOAT", + "DOUBLE_PRECISION", + "BINARY_DOUBLE", + "BINARY_FLOAT", + "LONG", + "dialect", + "INTERVAL", + "VARCHAR2", + "NVARCHAR2", + "ROWID", + "REAL", +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..21db7a11 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..9d502955 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc new file mode 100644 index 00000000..25246622 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc new file mode 100644 index 00000000..52d7171d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc new file mode 100644 index 00000000..cae66516 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc new file mode 100644 index 00000000..659de4b9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..651cb31e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/base.py new file mode 100644 index 00000000..9dd3acf7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/base.py @@ -0,0 +1,3272 @@ +# dialects/oracle/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" +.. dialect:: oracle + :name: Oracle + :full_support: 18c + :normal_support: 11+ + :best_effort: 9+ + + +Auto Increment Behavior +----------------------- + +SQLAlchemy Table objects which include integer primary keys are usually +assumed to have "autoincrementing" behavior, meaning they can generate their +own primary key values upon INSERT. For use within Oracle, two options are +available, which are the use of IDENTITY columns (Oracle 12 and above only) +or the association of a SEQUENCE with the column. + +Specifying GENERATED AS IDENTITY (Oracle 12 and above) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Starting from version 12 Oracle can make use of identity columns using +the :class:`_sql.Identity` to specify the autoincrementing behavior:: + + t = Table('mytable', metadata, + Column('id', Integer, Identity(start=3), primary_key=True), + Column(...), ... + ) + +The CREATE TABLE for the above :class:`_schema.Table` object would be: + +.. sourcecode:: sql + + CREATE TABLE mytable ( + id INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 3), + ..., + PRIMARY KEY (id) + ) + +The :class:`_schema.Identity` object support many options to control the +"autoincrementing" behavior of the column, like the starting value, the +incrementing value, etc. +In addition to the standard options, Oracle supports setting +:paramref:`_schema.Identity.always` to ``None`` to use the default +generated mode, rendering GENERATED AS IDENTITY in the DDL. It also supports +setting :paramref:`_schema.Identity.on_null` to ``True`` to specify ON NULL +in conjunction with a 'BY DEFAULT' identity column. + +Using a SEQUENCE (all Oracle versions) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Older version of Oracle had no "autoincrement" +feature, SQLAlchemy relies upon sequences to produce these values. With the +older Oracle versions, *a sequence must always be explicitly specified to +enable autoincrement*. This is divergent with the majority of documentation +examples which assume the usage of an autoincrement-capable database. To +specify sequences, use the sqlalchemy.schema.Sequence object which is passed +to a Column construct:: + + t = Table('mytable', metadata, + Column('id', Integer, Sequence('id_seq', start=1), primary_key=True), + Column(...), ... + ) + +This step is also required when using table reflection, i.e. autoload_with=engine:: + + t = Table('mytable', metadata, + Column('id', Integer, Sequence('id_seq', start=1), primary_key=True), + autoload_with=engine + ) + +.. versionchanged:: 1.4 Added :class:`_schema.Identity` construct + in a :class:`_schema.Column` to specify the option of an autoincrementing + column. + +.. _oracle_isolation_level: + +Transaction Isolation Level / Autocommit +---------------------------------------- + +The Oracle database supports "READ COMMITTED" and "SERIALIZABLE" modes of +isolation. The AUTOCOMMIT isolation level is also supported by the cx_Oracle +dialect. + +To set using per-connection execution options:: + + connection = engine.connect() + connection = connection.execution_options( + isolation_level="AUTOCOMMIT" + ) + +For ``READ COMMITTED`` and ``SERIALIZABLE``, the Oracle dialect sets the +level at the session level using ``ALTER SESSION``, which is reverted back +to its default setting when the connection is returned to the connection +pool. + +Valid values for ``isolation_level`` include: + +* ``READ COMMITTED`` +* ``AUTOCOMMIT`` +* ``SERIALIZABLE`` + +.. note:: The implementation for the + :meth:`_engine.Connection.get_isolation_level` method as implemented by the + Oracle dialect necessarily forces the start of a transaction using the + Oracle LOCAL_TRANSACTION_ID function; otherwise no level is normally + readable. + + Additionally, the :meth:`_engine.Connection.get_isolation_level` method will + raise an exception if the ``v$transaction`` view is not available due to + permissions or other reasons, which is a common occurrence in Oracle + installations. + + The cx_Oracle dialect attempts to call the + :meth:`_engine.Connection.get_isolation_level` method when the dialect makes + its first connection to the database in order to acquire the + "default"isolation level. This default level is necessary so that the level + can be reset on a connection after it has been temporarily modified using + :meth:`_engine.Connection.execution_options` method. In the common event + that the :meth:`_engine.Connection.get_isolation_level` method raises an + exception due to ``v$transaction`` not being readable as well as any other + database-related failure, the level is assumed to be "READ COMMITTED". No + warning is emitted for this initial first-connect condition as it is + expected to be a common restriction on Oracle databases. + +.. versionadded:: 1.3.16 added support for AUTOCOMMIT to the cx_oracle dialect + as well as the notion of a default isolation level + +.. versionadded:: 1.3.21 Added support for SERIALIZABLE as well as live + reading of the isolation level. + +.. versionchanged:: 1.3.22 In the event that the default isolation + level cannot be read due to permissions on the v$transaction view as + is common in Oracle installations, the default isolation level is hardcoded + to "READ COMMITTED" which was the behavior prior to 1.3.21. + +.. seealso:: + + :ref:`dbapi_autocommit` + +Identifier Casing +----------------- + +In Oracle, the data dictionary represents all case insensitive identifier +names using UPPERCASE text. SQLAlchemy on the other hand considers an +all-lower case identifier name to be case insensitive. The Oracle dialect +converts all case insensitive identifiers to and from those two formats during +schema level communication, such as reflection of tables and indexes. Using +an UPPERCASE name on the SQLAlchemy side indicates a case sensitive +identifier, and SQLAlchemy will quote the name - this will cause mismatches +against data dictionary data received from Oracle, so unless identifier names +have been truly created as case sensitive (i.e. using quoted names), all +lowercase names should be used on the SQLAlchemy side. + +.. _oracle_max_identifier_lengths: + +Max Identifier Lengths +---------------------- + +Oracle has changed the default max identifier length as of Oracle Server +version 12.2. Prior to this version, the length was 30, and for 12.2 and +greater it is now 128. This change impacts SQLAlchemy in the area of +generated SQL label names as well as the generation of constraint names, +particularly in the case where the constraint naming convention feature +described at :ref:`constraint_naming_conventions` is being used. + +To assist with this change and others, Oracle includes the concept of a +"compatibility" version, which is a version number that is independent of the +actual server version in order to assist with migration of Oracle databases, +and may be configured within the Oracle server itself. This compatibility +version is retrieved using the query ``SELECT value FROM v$parameter WHERE +name = 'compatible';``. The SQLAlchemy Oracle dialect, when tasked with +determining the default max identifier length, will attempt to use this query +upon first connect in order to determine the effective compatibility version of +the server, which determines what the maximum allowed identifier length is for +the server. If the table is not available, the server version information is +used instead. + +As of SQLAlchemy 1.4, the default max identifier length for the Oracle dialect +is 128 characters. Upon first connect, the compatibility version is detected +and if it is less than Oracle version 12.2, the max identifier length is +changed to be 30 characters. In all cases, setting the +:paramref:`_sa.create_engine.max_identifier_length` parameter will bypass this +change and the value given will be used as is:: + + engine = create_engine( + "oracle+cx_oracle://scott:tiger@oracle122", + max_identifier_length=30) + +The maximum identifier length comes into play both when generating anonymized +SQL labels in SELECT statements, but more crucially when generating constraint +names from a naming convention. It is this area that has created the need for +SQLAlchemy to change this default conservatively. For example, the following +naming convention produces two very different constraint names based on the +identifier length:: + + from sqlalchemy import Column + from sqlalchemy import Index + from sqlalchemy import Integer + from sqlalchemy import MetaData + from sqlalchemy import Table + from sqlalchemy.dialects import oracle + from sqlalchemy.schema import CreateIndex + + m = MetaData(naming_convention={"ix": "ix_%(column_0N_name)s"}) + + t = Table( + "t", + m, + Column("some_column_name_1", Integer), + Column("some_column_name_2", Integer), + Column("some_column_name_3", Integer), + ) + + ix = Index( + None, + t.c.some_column_name_1, + t.c.some_column_name_2, + t.c.some_column_name_3, + ) + + oracle_dialect = oracle.dialect(max_identifier_length=30) + print(CreateIndex(ix).compile(dialect=oracle_dialect)) + +With an identifier length of 30, the above CREATE INDEX looks like:: + + CREATE INDEX ix_some_column_name_1s_70cd ON t + (some_column_name_1, some_column_name_2, some_column_name_3) + +However with length=128, it becomes:: + + CREATE INDEX ix_some_column_name_1some_column_name_2some_column_name_3 ON t + (some_column_name_1, some_column_name_2, some_column_name_3) + +Applications which have run versions of SQLAlchemy prior to 1.4 on an Oracle +server version 12.2 or greater are therefore subject to the scenario of a +database migration that wishes to "DROP CONSTRAINT" on a name that was +previously generated with the shorter length. This migration will fail when +the identifier length is changed without the name of the index or constraint +first being adjusted. Such applications are strongly advised to make use of +:paramref:`_sa.create_engine.max_identifier_length` +in order to maintain control +of the generation of truncated names, and to fully review and test all database +migrations in a staging environment when changing this value to ensure that the +impact of this change has been mitigated. + +.. versionchanged:: 1.4 the default max_identifier_length for Oracle is 128 + characters, which is adjusted down to 30 upon first connect if an older + version of Oracle server (compatibility version < 12.2) is detected. + + +LIMIT/OFFSET/FETCH Support +-------------------------- + +Methods like :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset` make +use of ``FETCH FIRST N ROW / OFFSET N ROWS`` syntax assuming +Oracle 12c or above, and assuming the SELECT statement is not embedded within +a compound statement like UNION. This syntax is also available directly by using +the :meth:`_sql.Select.fetch` method. + +.. versionchanged:: 2.0 the Oracle dialect now uses + ``FETCH FIRST N ROW / OFFSET N ROWS`` for all + :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset` usage including + within the ORM and legacy :class:`_orm.Query`. To force the legacy + behavior using window functions, specify the ``enable_offset_fetch=False`` + dialect parameter to :func:`_sa.create_engine`. + +The use of ``FETCH FIRST / OFFSET`` may be disabled on any Oracle version +by passing ``enable_offset_fetch=False`` to :func:`_sa.create_engine`, which +will force the use of "legacy" mode that makes use of window functions. +This mode is also selected automatically when using a version of Oracle +prior to 12c. + +When using legacy mode, or when a :class:`.Select` statement +with limit/offset is embedded in a compound statement, an emulated approach for +LIMIT / OFFSET based on window functions is used, which involves creation of a +subquery using ``ROW_NUMBER`` that is prone to performance issues as well as +SQL construction issues for complex statements. However, this approach is +supported by all Oracle versions. See notes below. + +Notes on LIMIT / OFFSET emulation (when fetch() method cannot be used) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If using :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset`, or with the +ORM the :meth:`_orm.Query.limit` and :meth:`_orm.Query.offset` methods on an +Oracle version prior to 12c, the following notes apply: + +* SQLAlchemy currently makes use of ROWNUM to achieve + LIMIT/OFFSET; the exact methodology is taken from + https://blogs.oracle.com/oraclemagazine/on-rownum-and-limiting-results . + +* the "FIRST_ROWS()" optimization keyword is not used by default. To enable + the usage of this optimization directive, specify ``optimize_limits=True`` + to :func:`_sa.create_engine`. + + .. versionchanged:: 1.4 + The Oracle dialect renders limit/offset integer values using a "post + compile" scheme which renders the integer directly before passing the + statement to the cursor for execution. The ``use_binds_for_limits`` flag + no longer has an effect. + + .. seealso:: + + :ref:`change_4808`. + +.. _oracle_returning: + +RETURNING Support +----------------- + +The Oracle database supports RETURNING fully for INSERT, UPDATE and DELETE +statements that are invoked with a single collection of bound parameters +(that is, a ``cursor.execute()`` style statement; SQLAlchemy does not generally +support RETURNING with :term:`executemany` statements). Multiple rows may be +returned as well. + +.. versionchanged:: 2.0 the Oracle backend has full support for RETURNING + on parity with other backends. + + +ON UPDATE CASCADE +----------------- + +Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based +solution is available at +https://web.archive.org/web/20090317041251/https://asktom.oracle.com/tkyte/update_cascade/index.html + +When using the SQLAlchemy ORM, the ORM has limited ability to manually issue +cascading updates - specify ForeignKey objects using the +"deferrable=True, initially='deferred'" keyword arguments, +and specify "passive_updates=False" on each relationship(). + +Oracle 8 Compatibility +---------------------- + +.. warning:: The status of Oracle 8 compatibility is not known for SQLAlchemy + 2.0. + +When Oracle 8 is detected, the dialect internally configures itself to the +following behaviors: + +* the use_ansi flag is set to False. This has the effect of converting all + JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN + makes use of Oracle's (+) operator. + +* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when + the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued + instead. This because these types don't seem to work correctly on Oracle 8 + even though they are available. The :class:`~sqlalchemy.types.NVARCHAR` and + :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate + NVARCHAR2 and NCLOB. + + +Synonym/DBLINK Reflection +------------------------- + +When using reflection with Table objects, the dialect can optionally search +for tables indicated by synonyms, either in local or remote schemas or +accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as +a keyword argument to the :class:`_schema.Table` construct:: + + some_table = Table('some_table', autoload_with=some_engine, + oracle_resolve_synonyms=True) + +When this flag is set, the given name (such as ``some_table`` above) will +be searched not just in the ``ALL_TABLES`` view, but also within the +``ALL_SYNONYMS`` view to see if this name is actually a synonym to another +name. If the synonym is located and refers to a DBLINK, the oracle dialect +knows how to locate the table's information using DBLINK syntax(e.g. +``@dblink``). + +``oracle_resolve_synonyms`` is accepted wherever reflection arguments are +accepted, including methods such as :meth:`_schema.MetaData.reflect` and +:meth:`_reflection.Inspector.get_columns`. + +If synonyms are not in use, this flag should be left disabled. + +.. _oracle_constraint_reflection: + +Constraint Reflection +--------------------- + +The Oracle dialect can return information about foreign key, unique, and +CHECK constraints, as well as indexes on tables. + +Raw information regarding these constraints can be acquired using +:meth:`_reflection.Inspector.get_foreign_keys`, +:meth:`_reflection.Inspector.get_unique_constraints`, +:meth:`_reflection.Inspector.get_check_constraints`, and +:meth:`_reflection.Inspector.get_indexes`. + +.. versionchanged:: 1.2 The Oracle dialect can now reflect UNIQUE and + CHECK constraints. + +When using reflection at the :class:`_schema.Table` level, the +:class:`_schema.Table` +will also include these constraints. + +Note the following caveats: + +* When using the :meth:`_reflection.Inspector.get_check_constraints` method, + Oracle + builds a special "IS NOT NULL" constraint for columns that specify + "NOT NULL". This constraint is **not** returned by default; to include + the "IS NOT NULL" constraints, pass the flag ``include_all=True``:: + + from sqlalchemy import create_engine, inspect + + engine = create_engine("oracle+cx_oracle://s:t@dsn") + inspector = inspect(engine) + all_check_constraints = inspector.get_check_constraints( + "some_table", include_all=True) + +* in most cases, when reflecting a :class:`_schema.Table`, + a UNIQUE constraint will + **not** be available as a :class:`.UniqueConstraint` object, as Oracle + mirrors unique constraints with a UNIQUE index in most cases (the exception + seems to be when two or more unique constraints represent the same columns); + the :class:`_schema.Table` will instead represent these using + :class:`.Index` + with the ``unique=True`` flag set. + +* Oracle creates an implicit index for the primary key of a table; this index + is **excluded** from all index results. + +* the list of columns reflected for an index will not include column names + that start with SYS_NC. + +Table names with SYSTEM/SYSAUX tablespaces +------------------------------------------- + +The :meth:`_reflection.Inspector.get_table_names` and +:meth:`_reflection.Inspector.get_temp_table_names` +methods each return a list of table names for the current engine. These methods +are also part of the reflection which occurs within an operation such as +:meth:`_schema.MetaData.reflect`. By default, +these operations exclude the ``SYSTEM`` +and ``SYSAUX`` tablespaces from the operation. In order to change this, the +default list of tablespaces excluded can be changed at the engine level using +the ``exclude_tablespaces`` parameter:: + + # exclude SYSAUX and SOME_TABLESPACE, but not SYSTEM + e = create_engine( + "oracle+cx_oracle://scott:tiger@xe", + exclude_tablespaces=["SYSAUX", "SOME_TABLESPACE"]) + +DateTime Compatibility +---------------------- + +Oracle has no datatype known as ``DATETIME``, it instead has only ``DATE``, +which can actually store a date and time value. For this reason, the Oracle +dialect provides a type :class:`_oracle.DATE` which is a subclass of +:class:`.DateTime`. This type has no special behavior, and is only +present as a "marker" for this type; additionally, when a database column +is reflected and the type is reported as ``DATE``, the time-supporting +:class:`_oracle.DATE` type is used. + +.. _oracle_table_options: + +Oracle Table Options +-------------------- + +The CREATE TABLE phrase supports the following options with Oracle +in conjunction with the :class:`_schema.Table` construct: + + +* ``ON COMMIT``:: + + Table( + "some_table", metadata, ..., + prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS') + +* ``COMPRESS``:: + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=True) + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=6) + + The ``oracle_compress`` parameter accepts either an integer compression + level, or ``True`` to use the default compression level. + +.. _oracle_index_options: + +Oracle Specific Index Options +----------------------------- + +Bitmap Indexes +~~~~~~~~~~~~~~ + +You can specify the ``oracle_bitmap`` parameter to create a bitmap index +instead of a B-tree index:: + + Index('my_index', my_table.c.data, oracle_bitmap=True) + +Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not +check for such limitations, only the database will. + +Index compression +~~~~~~~~~~~~~~~~~ + +Oracle has a more efficient storage mode for indexes containing lots of +repeated values. Use the ``oracle_compress`` parameter to turn on key +compression:: + + Index('my_index', my_table.c.data, oracle_compress=True) + + Index('my_index', my_table.c.data1, my_table.c.data2, unique=True, + oracle_compress=1) + +The ``oracle_compress`` parameter accepts either an integer specifying the +number of prefix columns to compress, or ``True`` to use the default (all +columns for non-unique indexes, all but the last column for unique indexes). + +""" # noqa + +from __future__ import annotations + +from collections import defaultdict +from functools import lru_cache +from functools import wraps +import re + +from . import dictionary +from .types import _OracleBoolean +from .types import _OracleDate +from .types import BFILE +from .types import BINARY_DOUBLE +from .types import BINARY_FLOAT +from .types import DATE +from .types import FLOAT +from .types import INTERVAL +from .types import LONG +from .types import NCLOB +from .types import NUMBER +from .types import NVARCHAR2 # noqa +from .types import OracleRaw # noqa +from .types import RAW +from .types import ROWID # noqa +from .types import TIMESTAMP +from .types import VARCHAR2 # noqa +from ... import Computed +from ... import exc +from ... import schema as sa_schema +from ... import sql +from ... import util +from ...engine import default +from ...engine import ObjectKind +from ...engine import ObjectScope +from ...engine import reflection +from ...engine.reflection import ReflectionDefaults +from ...sql import and_ +from ...sql import bindparam +from ...sql import compiler +from ...sql import expression +from ...sql import func +from ...sql import null +from ...sql import or_ +from ...sql import select +from ...sql import sqltypes +from ...sql import util as sql_util +from ...sql import visitors +from ...sql.visitors import InternalTraversal +from ...types import BLOB +from ...types import CHAR +from ...types import CLOB +from ...types import DOUBLE_PRECISION +from ...types import INTEGER +from ...types import NCHAR +from ...types import NVARCHAR +from ...types import REAL +from ...types import VARCHAR + +RESERVED_WORDS = set( + "SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN " + "DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED " + "ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE " + "ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE " + "BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES " + "AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS " + "NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER " + "CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR " + "DECIMAL UNION PUBLIC AND START UID COMMENT CURRENT LEVEL".split() +) + +NO_ARG_FNS = set( + "UID CURRENT_DATE SYSDATE USER CURRENT_TIME CURRENT_TIMESTAMP".split() +) + + +colspecs = { + sqltypes.Boolean: _OracleBoolean, + sqltypes.Interval: INTERVAL, + sqltypes.DateTime: DATE, + sqltypes.Date: _OracleDate, +} + +ischema_names = { + "VARCHAR2": VARCHAR, + "NVARCHAR2": NVARCHAR, + "CHAR": CHAR, + "NCHAR": NCHAR, + "DATE": DATE, + "NUMBER": NUMBER, + "BLOB": BLOB, + "BFILE": BFILE, + "CLOB": CLOB, + "NCLOB": NCLOB, + "TIMESTAMP": TIMESTAMP, + "TIMESTAMP WITH TIME ZONE": TIMESTAMP, + "TIMESTAMP WITH LOCAL TIME ZONE": TIMESTAMP, + "INTERVAL DAY TO SECOND": INTERVAL, + "RAW": RAW, + "FLOAT": FLOAT, + "DOUBLE PRECISION": DOUBLE_PRECISION, + "REAL": REAL, + "LONG": LONG, + "BINARY_DOUBLE": BINARY_DOUBLE, + "BINARY_FLOAT": BINARY_FLOAT, + "ROWID": ROWID, +} + + +class OracleTypeCompiler(compiler.GenericTypeCompiler): + # Note: + # Oracle DATE == DATETIME + # Oracle does not allow milliseconds in DATE + # Oracle does not support TIME columns + + def visit_datetime(self, type_, **kw): + return self.visit_DATE(type_, **kw) + + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) + + def visit_double(self, type_, **kw): + return self.visit_DOUBLE_PRECISION(type_, **kw) + + def visit_unicode(self, type_, **kw): + if self.dialect._use_nchar_for_unicode: + return self.visit_NVARCHAR2(type_, **kw) + else: + return self.visit_VARCHAR2(type_, **kw) + + def visit_INTERVAL(self, type_, **kw): + return "INTERVAL DAY%s TO SECOND%s" % ( + type_.day_precision is not None + and "(%d)" % type_.day_precision + or "", + type_.second_precision is not None + and "(%d)" % type_.second_precision + or "", + ) + + def visit_LONG(self, type_, **kw): + return "LONG" + + def visit_TIMESTAMP(self, type_, **kw): + if getattr(type_, "local_timezone", False): + return "TIMESTAMP WITH LOCAL TIME ZONE" + elif type_.timezone: + return "TIMESTAMP WITH TIME ZONE" + else: + return "TIMESTAMP" + + def visit_DOUBLE_PRECISION(self, type_, **kw): + return self._generate_numeric(type_, "DOUBLE PRECISION", **kw) + + def visit_BINARY_DOUBLE(self, type_, **kw): + return self._generate_numeric(type_, "BINARY_DOUBLE", **kw) + + def visit_BINARY_FLOAT(self, type_, **kw): + return self._generate_numeric(type_, "BINARY_FLOAT", **kw) + + def visit_FLOAT(self, type_, **kw): + kw["_requires_binary_precision"] = True + return self._generate_numeric(type_, "FLOAT", **kw) + + def visit_NUMBER(self, type_, **kw): + return self._generate_numeric(type_, "NUMBER", **kw) + + def _generate_numeric( + self, + type_, + name, + precision=None, + scale=None, + _requires_binary_precision=False, + **kw, + ): + if precision is None: + precision = getattr(type_, "precision", None) + + if _requires_binary_precision: + binary_precision = getattr(type_, "binary_precision", None) + + if precision and binary_precision is None: + # https://www.oracletutorial.com/oracle-basics/oracle-float/ + estimated_binary_precision = int(precision / 0.30103) + raise exc.ArgumentError( + "Oracle FLOAT types use 'binary precision', which does " + "not convert cleanly from decimal 'precision'. Please " + "specify " + f"this type with a separate Oracle variant, such as " + f"{type_.__class__.__name__}(precision={precision})." + f"with_variant(oracle.FLOAT" + f"(binary_precision=" + f"{estimated_binary_precision}), 'oracle'), so that the " + "Oracle specific 'binary_precision' may be specified " + "accurately." + ) + else: + precision = binary_precision + + if scale is None: + scale = getattr(type_, "scale", None) + + if precision is None: + return name + elif scale is None: + n = "%(name)s(%(precision)s)" + return n % {"name": name, "precision": precision} + else: + n = "%(name)s(%(precision)s, %(scale)s)" + return n % {"name": name, "precision": precision, "scale": scale} + + def visit_string(self, type_, **kw): + return self.visit_VARCHAR2(type_, **kw) + + def visit_VARCHAR2(self, type_, **kw): + return self._visit_varchar(type_, "", "2") + + def visit_NVARCHAR2(self, type_, **kw): + return self._visit_varchar(type_, "N", "2") + + visit_NVARCHAR = visit_NVARCHAR2 + + def visit_VARCHAR(self, type_, **kw): + return self._visit_varchar(type_, "", "") + + def _visit_varchar(self, type_, n, num): + if not type_.length: + return "%(n)sVARCHAR%(two)s" % {"two": num, "n": n} + elif not n and self.dialect._supports_char_length: + varchar = "VARCHAR%(two)s(%(length)s CHAR)" + return varchar % {"length": type_.length, "two": num} + else: + varchar = "%(n)sVARCHAR%(two)s(%(length)s)" + return varchar % {"length": type_.length, "two": num, "n": n} + + def visit_text(self, type_, **kw): + return self.visit_CLOB(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + if self.dialect._use_nchar_for_unicode: + return self.visit_NCLOB(type_, **kw) + else: + return self.visit_CLOB(type_, **kw) + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) + + def visit_big_integer(self, type_, **kw): + return self.visit_NUMBER(type_, precision=19, **kw) + + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) + + def visit_RAW(self, type_, **kw): + if type_.length: + return "RAW(%(length)s)" % {"length": type_.length} + else: + return "RAW" + + def visit_ROWID(self, type_, **kw): + return "ROWID" + + +class OracleCompiler(compiler.SQLCompiler): + """Oracle compiler modifies the lexical structure of Select + statements to work under non-ANSI configured Oracle databases, if + the use_ansi flag is False. + """ + + compound_keywords = util.update_copy( + compiler.SQLCompiler.compound_keywords, + {expression.CompoundSelect.EXCEPT: "MINUS"}, + ) + + def __init__(self, *args, **kwargs): + self.__wheres = {} + super().__init__(*args, **kwargs) + + def visit_mod_binary(self, binary, operator, **kw): + return "mod(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_char_length_func(self, fn, **kw): + return "LENGTH" + self.function_argspec(fn, **kw) + + def visit_match_op_binary(self, binary, operator, **kw): + return "CONTAINS (%s, %s)" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_true(self, expr, **kw): + return "1" + + def visit_false(self, expr, **kw): + return "0" + + def get_cte_preamble(self, recursive): + return "WITH" + + def get_select_hint_text(self, byfroms): + return " ".join("/*+ %s */" % text for table, text in byfroms.items()) + + def function_argspec(self, fn, **kw): + if len(fn.clauses) > 0 or fn.name.upper() not in NO_ARG_FNS: + return compiler.SQLCompiler.function_argspec(self, fn, **kw) + else: + return "" + + def visit_function(self, func, **kw): + text = super().visit_function(func, **kw) + if kw.get("asfrom", False): + text = "TABLE (%s)" % text + return text + + def visit_table_valued_column(self, element, **kw): + text = super().visit_table_valued_column(element, **kw) + text = text + ".COLUMN_VALUE" + return text + + def default_from(self): + """Called when a ``SELECT`` statement has no froms, + and no ``FROM`` clause is to be appended. + + The Oracle compiler tacks a "FROM DUAL" to the statement. + """ + + return " FROM DUAL" + + def visit_join(self, join, from_linter=None, **kwargs): + if self.dialect.use_ansi: + return compiler.SQLCompiler.visit_join( + self, join, from_linter=from_linter, **kwargs + ) + else: + if from_linter: + from_linter.edges.add((join.left, join.right)) + + kwargs["asfrom"] = True + if isinstance(join.right, expression.FromGrouping): + right = join.right.element + else: + right = join.right + return ( + self.process(join.left, from_linter=from_linter, **kwargs) + + ", " + + self.process(right, from_linter=from_linter, **kwargs) + ) + + def _get_nonansi_join_whereclause(self, froms): + clauses = [] + + def visit_join(join): + if join.isouter: + # https://docs.oracle.com/database/121/SQLRF/queries006.htm#SQLRF52354 + # "apply the outer join operator (+) to all columns of B in + # the join condition in the WHERE clause" - that is, + # unconditionally regardless of operator or the other side + def visit_binary(binary): + if isinstance( + binary.left, expression.ColumnClause + ) and join.right.is_derived_from(binary.left.table): + binary.left = _OuterJoinColumn(binary.left) + elif isinstance( + binary.right, expression.ColumnClause + ) and join.right.is_derived_from(binary.right.table): + binary.right = _OuterJoinColumn(binary.right) + + clauses.append( + visitors.cloned_traverse( + join.onclause, {}, {"binary": visit_binary} + ) + ) + else: + clauses.append(join.onclause) + + for j in join.left, join.right: + if isinstance(j, expression.Join): + visit_join(j) + elif isinstance(j, expression.FromGrouping): + visit_join(j.element) + + for f in froms: + if isinstance(f, expression.Join): + visit_join(f) + + if not clauses: + return None + else: + return sql.and_(*clauses) + + def visit_outer_join_column(self, vc, **kw): + return self.process(vc.column, **kw) + "(+)" + + def visit_sequence(self, seq, **kw): + return self.preparer.format_sequence(seq) + ".nextval" + + def get_render_as_alias_suffix(self, alias_name_text): + """Oracle doesn't like ``FROM table AS alias``""" + + return " " + alias_name_text + + def returning_clause( + self, stmt, returning_cols, *, populate_result_map, **kw + ): + columns = [] + binds = [] + + for i, column in enumerate( + expression._select_iterables(returning_cols) + ): + if ( + self.isupdate + and isinstance(column, sa_schema.Column) + and isinstance(column.server_default, Computed) + and not self.dialect._supports_update_returning_computed_cols + ): + util.warn( + "Computed columns don't work with Oracle UPDATE " + "statements that use RETURNING; the value of the column " + "*before* the UPDATE takes place is returned. It is " + "advised to not use RETURNING with an Oracle computed " + "column. Consider setting implicit_returning to False on " + "the Table object in order to avoid implicit RETURNING " + "clauses from being generated for this Table." + ) + if column.type._has_column_expression: + col_expr = column.type.column_expression(column) + else: + col_expr = column + + outparam = sql.outparam("ret_%d" % i, type_=column.type) + self.binds[outparam.key] = outparam + binds.append( + self.bindparam_string(self._truncate_bindparam(outparam)) + ) + + # has_out_parameters would in a normal case be set to True + # as a result of the compiler visiting an outparam() object. + # in this case, the above outparam() objects are not being + # visited. Ensure the statement itself didn't have other + # outparam() objects independently. + # technically, this could be supported, but as it would be + # a very strange use case without a clear rationale, disallow it + if self.has_out_parameters: + raise exc.InvalidRequestError( + "Using explicit outparam() objects with " + "UpdateBase.returning() in the same Core DML statement " + "is not supported in the Oracle dialect." + ) + + self._oracle_returning = True + + columns.append(self.process(col_expr, within_columns_clause=False)) + if populate_result_map: + self._add_to_result_map( + getattr(col_expr, "name", col_expr._anon_name_label), + getattr(col_expr, "name", col_expr._anon_name_label), + ( + column, + getattr(column, "name", None), + getattr(column, "key", None), + ), + column.type, + ) + + return "RETURNING " + ", ".join(columns) + " INTO " + ", ".join(binds) + + def _row_limit_clause(self, select, **kw): + """ORacle 12c supports OFFSET/FETCH operators + Use it instead subquery with row_number + + """ + + if ( + select._fetch_clause is not None + or not self.dialect._supports_offset_fetch + ): + return super()._row_limit_clause( + select, use_literal_execute_for_simple_int=True, **kw + ) + else: + return self.fetch_clause( + select, + fetch_clause=self._get_limit_or_fetch(select), + use_literal_execute_for_simple_int=True, + **kw, + ) + + def _get_limit_or_fetch(self, select): + if select._fetch_clause is None: + return select._limit_clause + else: + return select._fetch_clause + + def translate_select_structure(self, select_stmt, **kwargs): + select = select_stmt + + if not getattr(select, "_oracle_visit", None): + if not self.dialect.use_ansi: + froms = self._display_froms_for_select( + select, kwargs.get("asfrom", False) + ) + whereclause = self._get_nonansi_join_whereclause(froms) + if whereclause is not None: + select = select.where(whereclause) + select._oracle_visit = True + + # if fetch is used this is not needed + if ( + select._has_row_limiting_clause + and not self.dialect._supports_offset_fetch + and select._fetch_clause is None + ): + limit_clause = select._limit_clause + offset_clause = select._offset_clause + + if select._simple_int_clause(limit_clause): + limit_clause = limit_clause.render_literal_execute() + + if select._simple_int_clause(offset_clause): + offset_clause = offset_clause.render_literal_execute() + + # currently using form at: + # https://blogs.oracle.com/oraclemagazine/\ + # on-rownum-and-limiting-results + + orig_select = select + select = select._generate() + select._oracle_visit = True + + # add expressions to accommodate FOR UPDATE OF + for_update = select._for_update_arg + if for_update is not None and for_update.of: + for_update = for_update._clone() + for_update._copy_internals() + + for elem in for_update.of: + if not select.selected_columns.contains_column(elem): + select = select.add_columns(elem) + + # Wrap the middle select and add the hint + inner_subquery = select.alias() + limitselect = sql.select( + *[ + c + for c in inner_subquery.c + if orig_select.selected_columns.corresponding_column(c) + is not None + ] + ) + + if ( + limit_clause is not None + and self.dialect.optimize_limits + and select._simple_int_clause(limit_clause) + ): + limitselect = limitselect.prefix_with( + expression.text( + "/*+ FIRST_ROWS(%s) */" + % self.process(limit_clause, **kwargs) + ) + ) + + limitselect._oracle_visit = True + limitselect._is_wrapper = True + + # add expressions to accommodate FOR UPDATE OF + if for_update is not None and for_update.of: + adapter = sql_util.ClauseAdapter(inner_subquery) + for_update.of = [ + adapter.traverse(elem) for elem in for_update.of + ] + + # If needed, add the limiting clause + if limit_clause is not None: + if select._simple_int_clause(limit_clause) and ( + offset_clause is None + or select._simple_int_clause(offset_clause) + ): + max_row = limit_clause + + if offset_clause is not None: + max_row = max_row + offset_clause + + else: + max_row = limit_clause + + if offset_clause is not None: + max_row = max_row + offset_clause + limitselect = limitselect.where( + sql.literal_column("ROWNUM") <= max_row + ) + + # If needed, add the ora_rn, and wrap again with offset. + if offset_clause is None: + limitselect._for_update_arg = for_update + select = limitselect + else: + limitselect = limitselect.add_columns( + sql.literal_column("ROWNUM").label("ora_rn") + ) + limitselect._oracle_visit = True + limitselect._is_wrapper = True + + if for_update is not None and for_update.of: + limitselect_cols = limitselect.selected_columns + for elem in for_update.of: + if ( + limitselect_cols.corresponding_column(elem) + is None + ): + limitselect = limitselect.add_columns(elem) + + limit_subquery = limitselect.alias() + origselect_cols = orig_select.selected_columns + offsetselect = sql.select( + *[ + c + for c in limit_subquery.c + if origselect_cols.corresponding_column(c) + is not None + ] + ) + + offsetselect._oracle_visit = True + offsetselect._is_wrapper = True + + if for_update is not None and for_update.of: + adapter = sql_util.ClauseAdapter(limit_subquery) + for_update.of = [ + adapter.traverse(elem) for elem in for_update.of + ] + + offsetselect = offsetselect.where( + sql.literal_column("ora_rn") > offset_clause + ) + + offsetselect._for_update_arg = for_update + select = offsetselect + + return select + + def limit_clause(self, select, **kw): + return "" + + def visit_empty_set_expr(self, type_, **kw): + return "SELECT 1 FROM DUAL WHERE 1!=1" + + def for_update_clause(self, select, **kw): + if self.is_subquery(): + return "" + + tmp = " FOR UPDATE" + + if select._for_update_arg.of: + tmp += " OF " + ", ".join( + self.process(elem, **kw) for elem in select._for_update_arg.of + ) + + if select._for_update_arg.nowait: + tmp += " NOWAIT" + if select._for_update_arg.skip_locked: + tmp += " SKIP LOCKED" + + return tmp + + def visit_is_distinct_from_binary(self, binary, operator, **kw): + return "DECODE(%s, %s, 0, 1) = 1" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): + return "DECODE(%s, %s, 0, 1) = 0" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + string = self.process(binary.left, **kw) + pattern = self.process(binary.right, **kw) + flags = binary.modifiers["flags"] + if flags is None: + return "REGEXP_LIKE(%s, %s)" % (string, pattern) + else: + return "REGEXP_LIKE(%s, %s, %s)" % ( + string, + pattern, + self.render_literal_value(flags, sqltypes.STRINGTYPE), + ) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + return "NOT %s" % self.visit_regexp_match_op_binary( + binary, operator, **kw + ) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + string = self.process(binary.left, **kw) + pattern_replace = self.process(binary.right, **kw) + flags = binary.modifiers["flags"] + if flags is None: + return "REGEXP_REPLACE(%s, %s)" % ( + string, + pattern_replace, + ) + else: + return "REGEXP_REPLACE(%s, %s, %s)" % ( + string, + pattern_replace, + self.render_literal_value(flags, sqltypes.STRINGTYPE), + ) + + def visit_aggregate_strings_func(self, fn, **kw): + return "LISTAGG%s" % self.function_argspec(fn, **kw) + + def _visit_bitwise(self, binary, fn_name, custom_right=None, **kw): + left = self.process(binary.left, **kw) + right = self.process( + custom_right if custom_right is not None else binary.right, **kw + ) + return f"{fn_name}({left}, {right})" + + def visit_bitwise_xor_op_binary(self, binary, operator, **kw): + return self._visit_bitwise(binary, "BITXOR", **kw) + + def visit_bitwise_or_op_binary(self, binary, operator, **kw): + return self._visit_bitwise(binary, "BITOR", **kw) + + def visit_bitwise_and_op_binary(self, binary, operator, **kw): + return self._visit_bitwise(binary, "BITAND", **kw) + + def visit_bitwise_rshift_op_binary(self, binary, operator, **kw): + raise exc.CompileError("Cannot compile bitwise_rshift in oracle") + + def visit_bitwise_lshift_op_binary(self, binary, operator, **kw): + raise exc.CompileError("Cannot compile bitwise_lshift in oracle") + + def visit_bitwise_not_op_unary_operator(self, element, operator, **kw): + raise exc.CompileError("Cannot compile bitwise_not in oracle") + + +class OracleDDLCompiler(compiler.DDLCompiler): + def define_constraint_cascades(self, constraint): + text = "" + if constraint.ondelete is not None: + text += " ON DELETE %s" % constraint.ondelete + + # oracle has no ON UPDATE CASCADE - + # its only available via triggers + # https://web.archive.org/web/20090317041251/https://asktom.oracle.com/tkyte/update_cascade/index.html + if constraint.onupdate is not None: + util.warn( + "Oracle does not contain native UPDATE CASCADE " + "functionality - onupdates will not be rendered for foreign " + "keys. Consider using deferrable=True, initially='deferred' " + "or triggers." + ) + + return text + + def visit_drop_table_comment(self, drop, **kw): + return "COMMENT ON TABLE %s IS ''" % self.preparer.format_table( + drop.element + ) + + def visit_create_index(self, create, **kw): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + if index.dialect_options["oracle"]["bitmap"]: + text += "BITMAP " + text += "INDEX %s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=True), + preparer.format_table(index.table, use_schema=True), + ", ".join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True + ) + for expr in index.expressions + ), + ) + if index.dialect_options["oracle"]["compress"] is not False: + if index.dialect_options["oracle"]["compress"] is True: + text += " COMPRESS" + else: + text += " COMPRESS %d" % ( + index.dialect_options["oracle"]["compress"] + ) + return text + + def post_create_table(self, table): + table_opts = [] + opts = table.dialect_options["oracle"] + + if opts["on_commit"]: + on_commit_options = opts["on_commit"].replace("_", " ").upper() + table_opts.append("\n ON COMMIT %s" % on_commit_options) + + if opts["compress"]: + if opts["compress"] is True: + table_opts.append("\n COMPRESS") + else: + table_opts.append("\n COMPRESS FOR %s" % (opts["compress"])) + + return "".join(table_opts) + + def get_identity_options(self, identity_options): + text = super().get_identity_options(identity_options) + text = text.replace("NO MINVALUE", "NOMINVALUE") + text = text.replace("NO MAXVALUE", "NOMAXVALUE") + text = text.replace("NO CYCLE", "NOCYCLE") + if identity_options.order is not None: + text += " ORDER" if identity_options.order else " NOORDER" + return text.strip() + + def visit_computed_column(self, generated, **kw): + text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process( + generated.sqltext, include_table=False, literal_binds=True + ) + if generated.persisted is True: + raise exc.CompileError( + "Oracle computed columns do not support 'stored' persistence; " + "set the 'persisted' flag to None or False for Oracle support." + ) + elif generated.persisted is False: + text += " VIRTUAL" + return text + + def visit_identity_column(self, identity, **kw): + if identity.always is None: + kind = "" + else: + kind = "ALWAYS" if identity.always else "BY DEFAULT" + text = "GENERATED %s" % kind + if identity.on_null: + text += " ON NULL" + text += " AS IDENTITY" + options = self.get_identity_options(identity) + if options: + text += " (%s)" % options + return text + + +class OracleIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = {x.lower() for x in RESERVED_WORDS} + illegal_initial_characters = {str(dig) for dig in range(0, 10)}.union( + ["_", "$"] + ) + + def _bindparam_requires_quotes(self, value): + """Return True if the given identifier requires quoting.""" + lc_value = value.lower() + return ( + lc_value in self.reserved_words + or value[0] in self.illegal_initial_characters + or not self.legal_characters.match(str(value)) + ) + + def format_savepoint(self, savepoint): + name = savepoint.ident.lstrip("_") + return super().format_savepoint(savepoint, name) + + +class OracleExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): + return self._execute_scalar( + "SELECT " + + self.identifier_preparer.format_sequence(seq) + + ".nextval FROM DUAL", + type_, + ) + + def pre_exec(self): + if self.statement and "_oracle_dblink" in self.execution_options: + self.statement = self.statement.replace( + dictionary.DB_LINK_PLACEHOLDER, + self.execution_options["_oracle_dblink"], + ) + + +class OracleDialect(default.DefaultDialect): + name = "oracle" + supports_statement_cache = True + supports_alter = True + max_identifier_length = 128 + + _supports_offset_fetch = True + + insert_returning = True + update_returning = True + delete_returning = True + + div_is_floordiv = False + + supports_simple_order_by_label = False + cte_follows_insert = True + returns_native_bytes = True + + supports_sequences = True + sequences_optional = False + postfetch_lastrowid = False + + default_paramstyle = "named" + colspecs = colspecs + ischema_names = ischema_names + requires_name_normalize = True + + supports_comments = True + + supports_default_values = False + supports_default_metavalue = True + supports_empty_insert = False + supports_identity_columns = True + + statement_compiler = OracleCompiler + ddl_compiler = OracleDDLCompiler + type_compiler_cls = OracleTypeCompiler + preparer = OracleIdentifierPreparer + execution_ctx_cls = OracleExecutionContext + + reflection_options = ("oracle_resolve_synonyms",) + + _use_nchar_for_unicode = False + + construct_arguments = [ + ( + sa_schema.Table, + {"resolve_synonyms": False, "on_commit": None, "compress": False}, + ), + (sa_schema.Index, {"bitmap": False, "compress": False}), + ] + + @util.deprecated_params( + use_binds_for_limits=( + "1.4", + "The ``use_binds_for_limits`` Oracle dialect parameter is " + "deprecated. The dialect now renders LIMIT /OFFSET integers " + "inline in all cases using a post-compilation hook, so that the " + "value is still represented by a 'bound parameter' on the Core " + "Expression side.", + ) + ) + def __init__( + self, + use_ansi=True, + optimize_limits=False, + use_binds_for_limits=None, + use_nchar_for_unicode=False, + exclude_tablespaces=("SYSTEM", "SYSAUX"), + enable_offset_fetch=True, + **kwargs, + ): + default.DefaultDialect.__init__(self, **kwargs) + self._use_nchar_for_unicode = use_nchar_for_unicode + self.use_ansi = use_ansi + self.optimize_limits = optimize_limits + self.exclude_tablespaces = exclude_tablespaces + self.enable_offset_fetch = self._supports_offset_fetch = ( + enable_offset_fetch + ) + + def initialize(self, connection): + super().initialize(connection) + + # Oracle 8i has RETURNING: + # https://docs.oracle.com/cd/A87860_01/doc/index.htm + + # so does Oracle8: + # https://docs.oracle.com/cd/A64702_01/doc/index.htm + + if self._is_oracle_8: + self.colspecs = self.colspecs.copy() + self.colspecs.pop(sqltypes.Interval) + self.use_ansi = False + + self.supports_identity_columns = self.server_version_info >= (12,) + self._supports_offset_fetch = ( + self.enable_offset_fetch and self.server_version_info >= (12,) + ) + + def _get_effective_compat_server_version_info(self, connection): + # dialect does not need compat levels below 12.2, so don't query + # in those cases + + if self.server_version_info < (12, 2): + return self.server_version_info + try: + compat = connection.exec_driver_sql( + "SELECT value FROM v$parameter WHERE name = 'compatible'" + ).scalar() + except exc.DBAPIError: + compat = None + + if compat: + try: + return tuple(int(x) for x in compat.split(".")) + except: + return self.server_version_info + else: + return self.server_version_info + + @property + def _is_oracle_8(self): + return self.server_version_info and self.server_version_info < (9,) + + @property + def _supports_table_compression(self): + return self.server_version_info and self.server_version_info >= (10, 1) + + @property + def _supports_table_compress_for(self): + return self.server_version_info and self.server_version_info >= (11,) + + @property + def _supports_char_length(self): + return not self._is_oracle_8 + + @property + def _supports_update_returning_computed_cols(self): + # on version 18 this error is no longet present while it happens on 11 + # it may work also on versions before the 18 + return self.server_version_info and self.server_version_info >= (18,) + + @property + def _supports_except_all(self): + return self.server_version_info and self.server_version_info >= (21,) + + def do_release_savepoint(self, connection, name): + # Oracle does not support RELEASE SAVEPOINT + pass + + def _check_max_identifier_length(self, connection): + if self._get_effective_compat_server_version_info(connection) < ( + 12, + 2, + ): + return 30 + else: + # use the default + return None + + def get_isolation_level_values(self, dbapi_connection): + return ["READ COMMITTED", "SERIALIZABLE"] + + def get_default_isolation_level(self, dbapi_conn): + try: + return self.get_isolation_level(dbapi_conn) + except NotImplementedError: + raise + except: + return "READ COMMITTED" + + def _execute_reflection( + self, connection, query, dblink, returns_long, params=None + ): + if dblink and not dblink.startswith("@"): + dblink = f"@{dblink}" + execution_options = { + # handle db links + "_oracle_dblink": dblink or "", + # override any schema translate map + "schema_translate_map": None, + } + + if dblink and returns_long: + # Oracle seems to error with + # "ORA-00997: illegal use of LONG datatype" when returning + # LONG columns via a dblink in a query with bind params + # This type seems to be very hard to cast into something else + # so it seems easier to just use bind param in this case + def visit_bindparam(bindparam): + bindparam.literal_execute = True + + query = visitors.cloned_traverse( + query, {}, {"bindparam": visit_bindparam} + ) + return connection.execute( + query, params, execution_options=execution_options + ) + + @util.memoized_property + def _has_table_query(self): + # materialized views are returned by all_tables + tables = ( + select( + dictionary.all_tables.c.table_name, + dictionary.all_tables.c.owner, + ) + .union_all( + select( + dictionary.all_views.c.view_name.label("table_name"), + dictionary.all_views.c.owner, + ) + ) + .subquery("tables_and_views") + ) + + query = select(tables.c.table_name).where( + tables.c.table_name == bindparam("table_name"), + tables.c.owner == bindparam("owner"), + ) + return query + + @reflection.cache + def has_table( + self, connection, table_name, schema=None, dblink=None, **kw + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + self._ensure_has_table_connection(connection) + + if not schema: + schema = self.default_schema_name + + params = { + "table_name": self.denormalize_name(table_name), + "owner": self.denormalize_schema_name(schema), + } + cursor = self._execute_reflection( + connection, + self._has_table_query, + dblink, + returns_long=False, + params=params, + ) + return bool(cursor.scalar()) + + @reflection.cache + def has_sequence( + self, connection, sequence_name, schema=None, dblink=None, **kw + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + if not schema: + schema = self.default_schema_name + + query = select(dictionary.all_sequences.c.sequence_name).where( + dictionary.all_sequences.c.sequence_name + == self.denormalize_schema_name(sequence_name), + dictionary.all_sequences.c.sequence_owner + == self.denormalize_schema_name(schema), + ) + + cursor = self._execute_reflection( + connection, query, dblink, returns_long=False + ) + return bool(cursor.scalar()) + + def _get_default_schema_name(self, connection): + return self.normalize_name( + connection.exec_driver_sql( + "select sys_context( 'userenv', 'current_schema' ) from dual" + ).scalar() + ) + + def denormalize_schema_name(self, name): + # look for quoted_name + force = getattr(name, "quote", None) + if force is None and name == "public": + # look for case insensitive, no quoting specified, "public" + return "PUBLIC" + return super().denormalize_name(name) + + @reflection.flexi_cache( + ("schema", InternalTraversal.dp_string), + ("filter_names", InternalTraversal.dp_string_list), + ("dblink", InternalTraversal.dp_string), + ) + def _get_synonyms(self, connection, schema, filter_names, dblink, **kw): + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + + has_filter_names, params = self._prepare_filter_names(filter_names) + query = select( + dictionary.all_synonyms.c.synonym_name, + dictionary.all_synonyms.c.table_name, + dictionary.all_synonyms.c.table_owner, + dictionary.all_synonyms.c.db_link, + ).where(dictionary.all_synonyms.c.owner == owner) + if has_filter_names: + query = query.where( + dictionary.all_synonyms.c.synonym_name.in_( + params["filter_names"] + ) + ) + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).mappings() + return result.all() + + @lru_cache() + def _all_objects_query( + self, owner, scope, kind, has_filter_names, has_mat_views + ): + query = ( + select(dictionary.all_objects.c.object_name) + .select_from(dictionary.all_objects) + .where(dictionary.all_objects.c.owner == owner) + ) + + # NOTE: materialized views are listed in all_objects twice; + # once as MATERIALIZE VIEW and once as TABLE + if kind is ObjectKind.ANY: + # materilaized view are listed also as tables so there is no + # need to add them to the in_. + query = query.where( + dictionary.all_objects.c.object_type.in_(("TABLE", "VIEW")) + ) + else: + object_type = [] + if ObjectKind.VIEW in kind: + object_type.append("VIEW") + if ( + ObjectKind.MATERIALIZED_VIEW in kind + and ObjectKind.TABLE not in kind + ): + # materilaized view are listed also as tables so there is no + # need to add them to the in_ if also selecting tables. + object_type.append("MATERIALIZED VIEW") + if ObjectKind.TABLE in kind: + object_type.append("TABLE") + if has_mat_views and ObjectKind.MATERIALIZED_VIEW not in kind: + # materialized view are listed also as tables, + # so they need to be filtered out + # EXCEPT ALL / MINUS profiles as faster than using + # NOT EXISTS or NOT IN with a subquery, but it's in + # general faster to get the mat view names and exclude + # them only when needed + query = query.where( + dictionary.all_objects.c.object_name.not_in( + bindparam("mat_views") + ) + ) + query = query.where( + dictionary.all_objects.c.object_type.in_(object_type) + ) + + # handles scope + if scope is ObjectScope.DEFAULT: + query = query.where(dictionary.all_objects.c.temporary == "N") + elif scope is ObjectScope.TEMPORARY: + query = query.where(dictionary.all_objects.c.temporary == "Y") + + if has_filter_names: + query = query.where( + dictionary.all_objects.c.object_name.in_( + bindparam("filter_names") + ) + ) + return query + + @reflection.flexi_cache( + ("schema", InternalTraversal.dp_string), + ("scope", InternalTraversal.dp_plain_obj), + ("kind", InternalTraversal.dp_plain_obj), + ("filter_names", InternalTraversal.dp_string_list), + ("dblink", InternalTraversal.dp_string), + ) + def _get_all_objects( + self, connection, schema, scope, kind, filter_names, dblink, **kw + ): + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + + has_filter_names, params = self._prepare_filter_names(filter_names) + has_mat_views = False + if ( + ObjectKind.TABLE in kind + and ObjectKind.MATERIALIZED_VIEW not in kind + ): + # see note in _all_objects_query + mat_views = self.get_materialized_view_names( + connection, schema, dblink, _normalize=False, **kw + ) + if mat_views: + params["mat_views"] = mat_views + has_mat_views = True + + query = self._all_objects_query( + owner, scope, kind, has_filter_names, has_mat_views + ) + + result = self._execute_reflection( + connection, query, dblink, returns_long=False, params=params + ).scalars() + + return result.all() + + def _handle_synonyms_decorator(fn): + @wraps(fn) + def wrapper(self, *args, **kwargs): + return self._handle_synonyms(fn, *args, **kwargs) + + return wrapper + + def _handle_synonyms(self, fn, connection, *args, **kwargs): + if not kwargs.get("oracle_resolve_synonyms", False): + return fn(self, connection, *args, **kwargs) + + original_kw = kwargs.copy() + schema = kwargs.pop("schema", None) + result = self._get_synonyms( + connection, + schema=schema, + filter_names=kwargs.pop("filter_names", None), + dblink=kwargs.pop("dblink", None), + info_cache=kwargs.get("info_cache", None), + ) + + dblinks_owners = defaultdict(dict) + for row in result: + key = row["db_link"], row["table_owner"] + tn = self.normalize_name(row["table_name"]) + dblinks_owners[key][tn] = row["synonym_name"] + + if not dblinks_owners: + # No synonym, do the plain thing + return fn(self, connection, *args, **original_kw) + + data = {} + for (dblink, table_owner), mapping in dblinks_owners.items(): + call_kw = { + **original_kw, + "schema": table_owner, + "dblink": self.normalize_name(dblink), + "filter_names": mapping.keys(), + } + call_result = fn(self, connection, *args, **call_kw) + for (_, tn), value in call_result: + synonym_name = self.normalize_name(mapping[tn]) + data[(schema, synonym_name)] = value + return data.items() + + @reflection.cache + def get_schema_names(self, connection, dblink=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + query = select(dictionary.all_users.c.username).order_by( + dictionary.all_users.c.username + ) + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + return [self.normalize_name(row) for row in result] + + @reflection.cache + def get_table_names(self, connection, schema=None, dblink=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + # note that table_names() isn't loading DBLINKed or synonym'ed tables + if schema is None: + schema = self.default_schema_name + + den_schema = self.denormalize_schema_name(schema) + if kw.get("oracle_resolve_synonyms", False): + tables = ( + select( + dictionary.all_tables.c.table_name, + dictionary.all_tables.c.owner, + dictionary.all_tables.c.iot_name, + dictionary.all_tables.c.duration, + dictionary.all_tables.c.tablespace_name, + ) + .union_all( + select( + dictionary.all_synonyms.c.synonym_name.label( + "table_name" + ), + dictionary.all_synonyms.c.owner, + dictionary.all_tables.c.iot_name, + dictionary.all_tables.c.duration, + dictionary.all_tables.c.tablespace_name, + ) + .select_from(dictionary.all_tables) + .join( + dictionary.all_synonyms, + and_( + dictionary.all_tables.c.table_name + == dictionary.all_synonyms.c.table_name, + dictionary.all_tables.c.owner + == func.coalesce( + dictionary.all_synonyms.c.table_owner, + dictionary.all_synonyms.c.owner, + ), + ), + ) + ) + .subquery("available_tables") + ) + else: + tables = dictionary.all_tables + + query = select(tables.c.table_name) + if self.exclude_tablespaces: + query = query.where( + func.coalesce( + tables.c.tablespace_name, "no tablespace" + ).not_in(self.exclude_tablespaces) + ) + query = query.where( + tables.c.owner == den_schema, + tables.c.iot_name.is_(null()), + tables.c.duration.is_(null()), + ) + + # remove materialized views + mat_query = select( + dictionary.all_mviews.c.mview_name.label("table_name") + ).where(dictionary.all_mviews.c.owner == den_schema) + + query = ( + query.except_all(mat_query) + if self._supports_except_all + else query.except_(mat_query) + ) + + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + return [self.normalize_name(row) for row in result] + + @reflection.cache + def get_temp_table_names(self, connection, dblink=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + schema = self.denormalize_schema_name(self.default_schema_name) + + query = select(dictionary.all_tables.c.table_name) + if self.exclude_tablespaces: + query = query.where( + func.coalesce( + dictionary.all_tables.c.tablespace_name, "no tablespace" + ).not_in(self.exclude_tablespaces) + ) + query = query.where( + dictionary.all_tables.c.owner == schema, + dictionary.all_tables.c.iot_name.is_(null()), + dictionary.all_tables.c.duration.is_not(null()), + ) + + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + return [self.normalize_name(row) for row in result] + + @reflection.cache + def get_materialized_view_names( + self, connection, schema=None, dblink=None, _normalize=True, **kw + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + if not schema: + schema = self.default_schema_name + + query = select(dictionary.all_mviews.c.mview_name).where( + dictionary.all_mviews.c.owner + == self.denormalize_schema_name(schema) + ) + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + if _normalize: + return [self.normalize_name(row) for row in result] + else: + return result.all() + + @reflection.cache + def get_view_names(self, connection, schema=None, dblink=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + if not schema: + schema = self.default_schema_name + + query = select(dictionary.all_views.c.view_name).where( + dictionary.all_views.c.owner + == self.denormalize_schema_name(schema) + ) + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + return [self.normalize_name(row) for row in result] + + @reflection.cache + def get_sequence_names(self, connection, schema=None, dblink=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link.""" + if not schema: + schema = self.default_schema_name + query = select(dictionary.all_sequences.c.sequence_name).where( + dictionary.all_sequences.c.sequence_owner + == self.denormalize_schema_name(schema) + ) + + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + return [self.normalize_name(row) for row in result] + + def _value_or_raise(self, data, table, schema): + table = self.normalize_name(str(table)) + try: + return dict(data)[(schema, table)] + except KeyError: + raise exc.NoSuchTableError( + f"{schema}.{table}" if schema else table + ) from None + + def _prepare_filter_names(self, filter_names): + if filter_names: + fn = [self.denormalize_name(name) for name in filter_names] + return True, {"filter_names": fn} + else: + return False, {} + + @reflection.cache + def get_table_options(self, connection, table_name, schema=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_table_options( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _table_options_query( + self, owner, scope, kind, has_filter_names, has_mat_views + ): + query = select( + dictionary.all_tables.c.table_name, + ( + dictionary.all_tables.c.compression + if self._supports_table_compression + else sql.null().label("compression") + ), + ( + dictionary.all_tables.c.compress_for + if self._supports_table_compress_for + else sql.null().label("compress_for") + ), + ).where(dictionary.all_tables.c.owner == owner) + if has_filter_names: + query = query.where( + dictionary.all_tables.c.table_name.in_( + bindparam("filter_names") + ) + ) + if scope is ObjectScope.DEFAULT: + query = query.where(dictionary.all_tables.c.duration.is_(null())) + elif scope is ObjectScope.TEMPORARY: + query = query.where( + dictionary.all_tables.c.duration.is_not(null()) + ) + + if ( + has_mat_views + and ObjectKind.TABLE in kind + and ObjectKind.MATERIALIZED_VIEW not in kind + ): + # cant use EXCEPT ALL / MINUS here because we don't have an + # excludable row vs. the query above + # outerjoin + where null works better on oracle 21 but 11 does + # not like it at all. this is the next best thing + + query = query.where( + dictionary.all_tables.c.table_name.not_in( + bindparam("mat_views") + ) + ) + elif ( + ObjectKind.TABLE not in kind + and ObjectKind.MATERIALIZED_VIEW in kind + ): + query = query.where( + dictionary.all_tables.c.table_name.in_(bindparam("mat_views")) + ) + return query + + @_handle_synonyms_decorator + def get_multi_table_options( + self, + connection, + *, + schema, + filter_names, + scope, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + + has_filter_names, params = self._prepare_filter_names(filter_names) + has_mat_views = False + + if ( + ObjectKind.TABLE in kind + and ObjectKind.MATERIALIZED_VIEW not in kind + ): + # see note in _table_options_query + mat_views = self.get_materialized_view_names( + connection, schema, dblink, _normalize=False, **kw + ) + if mat_views: + params["mat_views"] = mat_views + has_mat_views = True + elif ( + ObjectKind.TABLE not in kind + and ObjectKind.MATERIALIZED_VIEW in kind + ): + mat_views = self.get_materialized_view_names( + connection, schema, dblink, _normalize=False, **kw + ) + params["mat_views"] = mat_views + + options = {} + default = ReflectionDefaults.table_options + + if ObjectKind.TABLE in kind or ObjectKind.MATERIALIZED_VIEW in kind: + query = self._table_options_query( + owner, scope, kind, has_filter_names, has_mat_views + ) + result = self._execute_reflection( + connection, query, dblink, returns_long=False, params=params + ) + + for table, compression, compress_for in result: + if compression == "ENABLED": + data = {"oracle_compress": compress_for} + else: + data = default() + options[(schema, self.normalize_name(table))] = data + if ObjectKind.VIEW in kind and ObjectScope.DEFAULT in scope: + # add the views (no temporary views) + for view in self.get_view_names(connection, schema, dblink, **kw): + if not filter_names or view in filter_names: + options[(schema, view)] = default() + + return options.items() + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + + data = self.get_multi_columns( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + def _run_batches( + self, connection, query, dblink, returns_long, mappings, all_objects + ): + each_batch = 500 + batches = list(all_objects) + while batches: + batch = batches[0:each_batch] + batches[0:each_batch] = [] + + result = self._execute_reflection( + connection, + query, + dblink, + returns_long=returns_long, + params={"all_objects": batch}, + ) + if mappings: + yield from result.mappings() + else: + yield from result + + @lru_cache() + def _column_query(self, owner): + all_cols = dictionary.all_tab_cols + all_comments = dictionary.all_col_comments + all_ids = dictionary.all_tab_identity_cols + + if self.server_version_info >= (12,): + add_cols = ( + all_cols.c.default_on_null, + sql.case( + (all_ids.c.table_name.is_(None), sql.null()), + else_=all_ids.c.generation_type + + "," + + all_ids.c.identity_options, + ).label("identity_options"), + ) + join_identity_cols = True + else: + add_cols = ( + sql.null().label("default_on_null"), + sql.null().label("identity_options"), + ) + join_identity_cols = False + + # NOTE: on oracle cannot create tables/views without columns and + # a table cannot have all column hidden: + # ORA-54039: table must have at least one column that is not invisible + # all_tab_cols returns data for tables/views/mat-views. + # all_tab_cols does not return recycled tables + + query = ( + select( + all_cols.c.table_name, + all_cols.c.column_name, + all_cols.c.data_type, + all_cols.c.char_length, + all_cols.c.data_precision, + all_cols.c.data_scale, + all_cols.c.nullable, + all_cols.c.data_default, + all_comments.c.comments, + all_cols.c.virtual_column, + *add_cols, + ).select_from(all_cols) + # NOTE: all_col_comments has a row for each column even if no + # comment is present, so a join could be performed, but there + # seems to be no difference compared to an outer join + .outerjoin( + all_comments, + and_( + all_cols.c.table_name == all_comments.c.table_name, + all_cols.c.column_name == all_comments.c.column_name, + all_cols.c.owner == all_comments.c.owner, + ), + ) + ) + if join_identity_cols: + query = query.outerjoin( + all_ids, + and_( + all_cols.c.table_name == all_ids.c.table_name, + all_cols.c.column_name == all_ids.c.column_name, + all_cols.c.owner == all_ids.c.owner, + ), + ) + + query = query.where( + all_cols.c.table_name.in_(bindparam("all_objects")), + all_cols.c.hidden_column == "NO", + all_cols.c.owner == owner, + ).order_by(all_cols.c.table_name, all_cols.c.column_id) + return query + + @_handle_synonyms_decorator + def get_multi_columns( + self, + connection, + *, + schema, + filter_names, + scope, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + query = self._column_query(owner) + + if ( + filter_names + and kind is ObjectKind.ANY + and scope is ObjectScope.ANY + ): + all_objects = [self.denormalize_name(n) for n in filter_names] + else: + all_objects = self._get_all_objects( + connection, schema, scope, kind, filter_names, dblink, **kw + ) + + columns = defaultdict(list) + + # all_tab_cols.data_default is LONG + result = self._run_batches( + connection, + query, + dblink, + returns_long=True, + mappings=True, + all_objects=all_objects, + ) + + def maybe_int(value): + if isinstance(value, float) and value.is_integer(): + return int(value) + else: + return value + + remove_size = re.compile(r"\(\d+\)") + + for row_dict in result: + table_name = self.normalize_name(row_dict["table_name"]) + orig_colname = row_dict["column_name"] + colname = self.normalize_name(orig_colname) + coltype = row_dict["data_type"] + precision = maybe_int(row_dict["data_precision"]) + + if coltype == "NUMBER": + scale = maybe_int(row_dict["data_scale"]) + if precision is None and scale == 0: + coltype = INTEGER() + else: + coltype = NUMBER(precision, scale) + elif coltype == "FLOAT": + # https://docs.oracle.com/cd/B14117_01/server.101/b10758/sqlqr06.htm + if precision == 126: + # The DOUBLE PRECISION datatype is a floating-point + # number with binary precision 126. + coltype = DOUBLE_PRECISION() + elif precision == 63: + # The REAL datatype is a floating-point number with a + # binary precision of 63, or 18 decimal. + coltype = REAL() + else: + # non standard precision + coltype = FLOAT(binary_precision=precision) + + elif coltype in ("VARCHAR2", "NVARCHAR2", "CHAR", "NCHAR"): + char_length = maybe_int(row_dict["char_length"]) + coltype = self.ischema_names.get(coltype)(char_length) + elif "WITH TIME ZONE" in coltype: + coltype = TIMESTAMP(timezone=True) + elif "WITH LOCAL TIME ZONE" in coltype: + coltype = TIMESTAMP(local_timezone=True) + else: + coltype = re.sub(remove_size, "", coltype) + try: + coltype = self.ischema_names[coltype] + except KeyError: + util.warn( + "Did not recognize type '%s' of column '%s'" + % (coltype, colname) + ) + coltype = sqltypes.NULLTYPE + + default = row_dict["data_default"] + if row_dict["virtual_column"] == "YES": + computed = dict(sqltext=default) + default = None + else: + computed = None + + identity_options = row_dict["identity_options"] + if identity_options is not None: + identity = self._parse_identity_options( + identity_options, row_dict["default_on_null"] + ) + default = None + else: + identity = None + + cdict = { + "name": colname, + "type": coltype, + "nullable": row_dict["nullable"] == "Y", + "default": default, + "comment": row_dict["comments"], + } + if orig_colname.lower() == orig_colname: + cdict["quote"] = True + if computed is not None: + cdict["computed"] = computed + if identity is not None: + cdict["identity"] = identity + + columns[(schema, table_name)].append(cdict) + + # NOTE: default not needed since all tables have columns + # default = ReflectionDefaults.columns + # return ( + # (key, value if value else default()) + # for key, value in columns.items() + # ) + return columns.items() + + def _parse_identity_options(self, identity_options, default_on_null): + # identity_options is a string that starts with 'ALWAYS,' or + # 'BY DEFAULT,' and continues with + # START WITH: 1, INCREMENT BY: 1, MAX_VALUE: 123, MIN_VALUE: 1, + # CYCLE_FLAG: N, CACHE_SIZE: 1, ORDER_FLAG: N, SCALE_FLAG: N, + # EXTEND_FLAG: N, SESSION_FLAG: N, KEEP_VALUE: N + parts = [p.strip() for p in identity_options.split(",")] + identity = { + "always": parts[0] == "ALWAYS", + "on_null": default_on_null == "YES", + } + + for part in parts[1:]: + option, value = part.split(":") + value = value.strip() + + if "START WITH" in option: + identity["start"] = int(value) + elif "INCREMENT BY" in option: + identity["increment"] = int(value) + elif "MAX_VALUE" in option: + identity["maxvalue"] = int(value) + elif "MIN_VALUE" in option: + identity["minvalue"] = int(value) + elif "CYCLE_FLAG" in option: + identity["cycle"] = value == "Y" + elif "CACHE_SIZE" in option: + identity["cache"] = int(value) + elif "ORDER_FLAG" in option: + identity["order"] = value == "Y" + return identity + + @reflection.cache + def get_table_comment(self, connection, table_name, schema=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_table_comment( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _comment_query(self, owner, scope, kind, has_filter_names): + # NOTE: all_tab_comments / all_mview_comments have a row for all + # object even if they don't have comments + queries = [] + if ObjectKind.TABLE in kind or ObjectKind.VIEW in kind: + # all_tab_comments returns also plain views + tbl_view = select( + dictionary.all_tab_comments.c.table_name, + dictionary.all_tab_comments.c.comments, + ).where( + dictionary.all_tab_comments.c.owner == owner, + dictionary.all_tab_comments.c.table_name.not_like("BIN$%"), + ) + if ObjectKind.VIEW not in kind: + tbl_view = tbl_view.where( + dictionary.all_tab_comments.c.table_type == "TABLE" + ) + elif ObjectKind.TABLE not in kind: + tbl_view = tbl_view.where( + dictionary.all_tab_comments.c.table_type == "VIEW" + ) + queries.append(tbl_view) + if ObjectKind.MATERIALIZED_VIEW in kind: + mat_view = select( + dictionary.all_mview_comments.c.mview_name.label("table_name"), + dictionary.all_mview_comments.c.comments, + ).where( + dictionary.all_mview_comments.c.owner == owner, + dictionary.all_mview_comments.c.mview_name.not_like("BIN$%"), + ) + queries.append(mat_view) + if len(queries) == 1: + query = queries[0] + else: + union = sql.union_all(*queries).subquery("tables_and_views") + query = select(union.c.table_name, union.c.comments) + + name_col = query.selected_columns.table_name + + if scope in (ObjectScope.DEFAULT, ObjectScope.TEMPORARY): + temp = "Y" if scope is ObjectScope.TEMPORARY else "N" + # need distinct since materialized view are listed also + # as tables in all_objects + query = query.distinct().join( + dictionary.all_objects, + and_( + dictionary.all_objects.c.owner == owner, + dictionary.all_objects.c.object_name == name_col, + dictionary.all_objects.c.temporary == temp, + ), + ) + if has_filter_names: + query = query.where(name_col.in_(bindparam("filter_names"))) + return query + + @_handle_synonyms_decorator + def get_multi_table_comment( + self, + connection, + *, + schema, + filter_names, + scope, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + has_filter_names, params = self._prepare_filter_names(filter_names) + query = self._comment_query(owner, scope, kind, has_filter_names) + + result = self._execute_reflection( + connection, query, dblink, returns_long=False, params=params + ) + default = ReflectionDefaults.table_comment + # materialized views by default seem to have a comment like + # "snapshot table for snapshot owner.mat_view_name" + ignore_mat_view = "snapshot table for snapshot " + return ( + ( + (schema, self.normalize_name(table)), + ( + {"text": comment} + if comment is not None + and not comment.startswith(ignore_mat_view) + else default() + ), + ) + for table, comment in result + ) + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_indexes( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _index_query(self, owner): + return ( + select( + dictionary.all_ind_columns.c.table_name, + dictionary.all_ind_columns.c.index_name, + dictionary.all_ind_columns.c.column_name, + dictionary.all_indexes.c.index_type, + dictionary.all_indexes.c.uniqueness, + dictionary.all_indexes.c.compression, + dictionary.all_indexes.c.prefix_length, + dictionary.all_ind_columns.c.descend, + dictionary.all_ind_expressions.c.column_expression, + ) + .select_from(dictionary.all_ind_columns) + .join( + dictionary.all_indexes, + sql.and_( + dictionary.all_ind_columns.c.index_name + == dictionary.all_indexes.c.index_name, + dictionary.all_ind_columns.c.index_owner + == dictionary.all_indexes.c.owner, + ), + ) + .outerjoin( + # NOTE: this adds about 20% to the query time. Using a + # case expression with a scalar subquery only when needed + # with the assumption that most indexes are not expression + # would be faster but oracle does not like that with + # LONG datatype. It errors with: + # ORA-00997: illegal use of LONG datatype + dictionary.all_ind_expressions, + sql.and_( + dictionary.all_ind_expressions.c.index_name + == dictionary.all_ind_columns.c.index_name, + dictionary.all_ind_expressions.c.index_owner + == dictionary.all_ind_columns.c.index_owner, + dictionary.all_ind_expressions.c.column_position + == dictionary.all_ind_columns.c.column_position, + ), + ) + .where( + dictionary.all_indexes.c.table_owner == owner, + dictionary.all_indexes.c.table_name.in_( + bindparam("all_objects") + ), + ) + .order_by( + dictionary.all_ind_columns.c.index_name, + dictionary.all_ind_columns.c.column_position, + ) + ) + + @reflection.flexi_cache( + ("schema", InternalTraversal.dp_string), + ("dblink", InternalTraversal.dp_string), + ("all_objects", InternalTraversal.dp_string_list), + ) + def _get_indexes_rows(self, connection, schema, dblink, all_objects, **kw): + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + + query = self._index_query(owner) + + pks = { + row_dict["constraint_name"] + for row_dict in self._get_all_constraint_rows( + connection, schema, dblink, all_objects, **kw + ) + if row_dict["constraint_type"] == "P" + } + + # all_ind_expressions.column_expression is LONG + result = self._run_batches( + connection, + query, + dblink, + returns_long=True, + mappings=True, + all_objects=all_objects, + ) + + return [ + row_dict + for row_dict in result + if row_dict["index_name"] not in pks + ] + + @_handle_synonyms_decorator + def get_multi_indexes( + self, + connection, + *, + schema, + filter_names, + scope, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + all_objects = self._get_all_objects( + connection, schema, scope, kind, filter_names, dblink, **kw + ) + + uniqueness = {"NONUNIQUE": False, "UNIQUE": True} + enabled = {"DISABLED": False, "ENABLED": True} + is_bitmap = {"BITMAP", "FUNCTION-BASED BITMAP"} + + indexes = defaultdict(dict) + + for row_dict in self._get_indexes_rows( + connection, schema, dblink, all_objects, **kw + ): + index_name = self.normalize_name(row_dict["index_name"]) + table_name = self.normalize_name(row_dict["table_name"]) + table_indexes = indexes[(schema, table_name)] + + if index_name not in table_indexes: + table_indexes[index_name] = index_dict = { + "name": index_name, + "column_names": [], + "dialect_options": {}, + "unique": uniqueness.get(row_dict["uniqueness"], False), + } + do = index_dict["dialect_options"] + if row_dict["index_type"] in is_bitmap: + do["oracle_bitmap"] = True + if enabled.get(row_dict["compression"], False): + do["oracle_compress"] = row_dict["prefix_length"] + + else: + index_dict = table_indexes[index_name] + + expr = row_dict["column_expression"] + if expr is not None: + index_dict["column_names"].append(None) + if "expressions" in index_dict: + index_dict["expressions"].append(expr) + else: + index_dict["expressions"] = index_dict["column_names"][:-1] + index_dict["expressions"].append(expr) + + if row_dict["descend"].lower() != "asc": + assert row_dict["descend"].lower() == "desc" + cs = index_dict.setdefault("column_sorting", {}) + cs[expr] = ("desc",) + else: + assert row_dict["descend"].lower() == "asc" + cn = self.normalize_name(row_dict["column_name"]) + index_dict["column_names"].append(cn) + if "expressions" in index_dict: + index_dict["expressions"].append(cn) + + default = ReflectionDefaults.indexes + + return ( + (key, list(indexes[key].values()) if key in indexes else default()) + for key in ( + (schema, self.normalize_name(obj_name)) + for obj_name in all_objects + ) + ) + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_pk_constraint( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _constraint_query(self, owner): + local = dictionary.all_cons_columns.alias("local") + remote = dictionary.all_cons_columns.alias("remote") + return ( + select( + dictionary.all_constraints.c.table_name, + dictionary.all_constraints.c.constraint_type, + dictionary.all_constraints.c.constraint_name, + local.c.column_name.label("local_column"), + remote.c.table_name.label("remote_table"), + remote.c.column_name.label("remote_column"), + remote.c.owner.label("remote_owner"), + dictionary.all_constraints.c.search_condition, + dictionary.all_constraints.c.delete_rule, + ) + .select_from(dictionary.all_constraints) + .join( + local, + and_( + local.c.owner == dictionary.all_constraints.c.owner, + dictionary.all_constraints.c.constraint_name + == local.c.constraint_name, + ), + ) + .outerjoin( + remote, + and_( + dictionary.all_constraints.c.r_owner == remote.c.owner, + dictionary.all_constraints.c.r_constraint_name + == remote.c.constraint_name, + or_( + remote.c.position.is_(sql.null()), + local.c.position == remote.c.position, + ), + ), + ) + .where( + dictionary.all_constraints.c.owner == owner, + dictionary.all_constraints.c.table_name.in_( + bindparam("all_objects") + ), + dictionary.all_constraints.c.constraint_type.in_( + ("R", "P", "U", "C") + ), + ) + .order_by( + dictionary.all_constraints.c.constraint_name, local.c.position + ) + ) + + @reflection.flexi_cache( + ("schema", InternalTraversal.dp_string), + ("dblink", InternalTraversal.dp_string), + ("all_objects", InternalTraversal.dp_string_list), + ) + def _get_all_constraint_rows( + self, connection, schema, dblink, all_objects, **kw + ): + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + query = self._constraint_query(owner) + + # since the result is cached a list must be created + values = list( + self._run_batches( + connection, + query, + dblink, + returns_long=False, + mappings=True, + all_objects=all_objects, + ) + ) + return values + + @_handle_synonyms_decorator + def get_multi_pk_constraint( + self, + connection, + *, + scope, + schema, + filter_names, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + all_objects = self._get_all_objects( + connection, schema, scope, kind, filter_names, dblink, **kw + ) + + primary_keys = defaultdict(dict) + default = ReflectionDefaults.pk_constraint + + for row_dict in self._get_all_constraint_rows( + connection, schema, dblink, all_objects, **kw + ): + if row_dict["constraint_type"] != "P": + continue + table_name = self.normalize_name(row_dict["table_name"]) + constraint_name = self.normalize_name(row_dict["constraint_name"]) + column_name = self.normalize_name(row_dict["local_column"]) + + table_pk = primary_keys[(schema, table_name)] + if not table_pk: + table_pk["name"] = constraint_name + table_pk["constrained_columns"] = [column_name] + else: + table_pk["constrained_columns"].append(column_name) + + return ( + (key, primary_keys[key] if key in primary_keys else default()) + for key in ( + (schema, self.normalize_name(obj_name)) + for obj_name in all_objects + ) + ) + + @reflection.cache + def get_foreign_keys( + self, + connection, + table_name, + schema=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_foreign_keys( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @_handle_synonyms_decorator + def get_multi_foreign_keys( + self, + connection, + *, + scope, + schema, + filter_names, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + all_objects = self._get_all_objects( + connection, schema, scope, kind, filter_names, dblink, **kw + ) + + resolve_synonyms = kw.get("oracle_resolve_synonyms", False) + + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + + all_remote_owners = set() + fkeys = defaultdict(dict) + + for row_dict in self._get_all_constraint_rows( + connection, schema, dblink, all_objects, **kw + ): + if row_dict["constraint_type"] != "R": + continue + + table_name = self.normalize_name(row_dict["table_name"]) + constraint_name = self.normalize_name(row_dict["constraint_name"]) + table_fkey = fkeys[(schema, table_name)] + + assert constraint_name is not None + + local_column = self.normalize_name(row_dict["local_column"]) + remote_table = self.normalize_name(row_dict["remote_table"]) + remote_column = self.normalize_name(row_dict["remote_column"]) + remote_owner_orig = row_dict["remote_owner"] + remote_owner = self.normalize_name(remote_owner_orig) + if remote_owner_orig is not None: + all_remote_owners.add(remote_owner_orig) + + if remote_table is None: + # ticket 363 + if dblink and not dblink.startswith("@"): + dblink = f"@{dblink}" + util.warn( + "Got 'None' querying 'table_name' from " + f"all_cons_columns{dblink or ''} - does the user have " + "proper rights to the table?" + ) + continue + + if constraint_name not in table_fkey: + table_fkey[constraint_name] = fkey = { + "name": constraint_name, + "constrained_columns": [], + "referred_schema": None, + "referred_table": remote_table, + "referred_columns": [], + "options": {}, + } + + if resolve_synonyms: + # will be removed below + fkey["_ref_schema"] = remote_owner + + if schema is not None or remote_owner_orig != owner: + fkey["referred_schema"] = remote_owner + + delete_rule = row_dict["delete_rule"] + if delete_rule != "NO ACTION": + fkey["options"]["ondelete"] = delete_rule + + else: + fkey = table_fkey[constraint_name] + + fkey["constrained_columns"].append(local_column) + fkey["referred_columns"].append(remote_column) + + if resolve_synonyms and all_remote_owners: + query = select( + dictionary.all_synonyms.c.owner, + dictionary.all_synonyms.c.table_name, + dictionary.all_synonyms.c.table_owner, + dictionary.all_synonyms.c.synonym_name, + ).where(dictionary.all_synonyms.c.owner.in_(all_remote_owners)) + + result = self._execute_reflection( + connection, query, dblink, returns_long=False + ).mappings() + + remote_owners_lut = {} + for row in result: + synonym_owner = self.normalize_name(row["owner"]) + table_name = self.normalize_name(row["table_name"]) + + remote_owners_lut[(synonym_owner, table_name)] = ( + row["table_owner"], + row["synonym_name"], + ) + + empty = (None, None) + for table_fkeys in fkeys.values(): + for table_fkey in table_fkeys.values(): + key = ( + table_fkey.pop("_ref_schema"), + table_fkey["referred_table"], + ) + remote_owner, syn_name = remote_owners_lut.get(key, empty) + if syn_name: + sn = self.normalize_name(syn_name) + table_fkey["referred_table"] = sn + if schema is not None or remote_owner != owner: + ro = self.normalize_name(remote_owner) + table_fkey["referred_schema"] = ro + else: + table_fkey["referred_schema"] = None + default = ReflectionDefaults.foreign_keys + + return ( + (key, list(fkeys[key].values()) if key in fkeys else default()) + for key in ( + (schema, self.normalize_name(obj_name)) + for obj_name in all_objects + ) + ) + + @reflection.cache + def get_unique_constraints( + self, connection, table_name, schema=None, **kw + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_unique_constraints( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @_handle_synonyms_decorator + def get_multi_unique_constraints( + self, + connection, + *, + scope, + schema, + filter_names, + kind, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + all_objects = self._get_all_objects( + connection, schema, scope, kind, filter_names, dblink, **kw + ) + + unique_cons = defaultdict(dict) + + index_names = { + row_dict["index_name"] + for row_dict in self._get_indexes_rows( + connection, schema, dblink, all_objects, **kw + ) + } + + for row_dict in self._get_all_constraint_rows( + connection, schema, dblink, all_objects, **kw + ): + if row_dict["constraint_type"] != "U": + continue + table_name = self.normalize_name(row_dict["table_name"]) + constraint_name_orig = row_dict["constraint_name"] + constraint_name = self.normalize_name(constraint_name_orig) + column_name = self.normalize_name(row_dict["local_column"]) + table_uc = unique_cons[(schema, table_name)] + + assert constraint_name is not None + + if constraint_name not in table_uc: + table_uc[constraint_name] = uc = { + "name": constraint_name, + "column_names": [], + "duplicates_index": ( + constraint_name + if constraint_name_orig in index_names + else None + ), + } + else: + uc = table_uc[constraint_name] + + uc["column_names"].append(column_name) + + default = ReflectionDefaults.unique_constraints + + return ( + ( + key, + ( + list(unique_cons[key].values()) + if key in unique_cons + else default() + ), + ) + for key in ( + (schema, self.normalize_name(obj_name)) + for obj_name in all_objects + ) + ) + + @reflection.cache + def get_view_definition( + self, + connection, + view_name, + schema=None, + dblink=None, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + if kw.get("oracle_resolve_synonyms", False): + synonyms = self._get_synonyms( + connection, schema, filter_names=[view_name], dblink=dblink + ) + if synonyms: + assert len(synonyms) == 1 + row_dict = synonyms[0] + dblink = self.normalize_name(row_dict["db_link"]) + schema = row_dict["table_owner"] + view_name = row_dict["table_name"] + + name = self.denormalize_name(view_name) + owner = self.denormalize_schema_name( + schema or self.default_schema_name + ) + query = ( + select(dictionary.all_views.c.text) + .where( + dictionary.all_views.c.view_name == name, + dictionary.all_views.c.owner == owner, + ) + .union_all( + select(dictionary.all_mviews.c.query).where( + dictionary.all_mviews.c.mview_name == name, + dictionary.all_mviews.c.owner == owner, + ) + ) + ) + + rp = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalar() + if rp is None: + raise exc.NoSuchTableError( + f"{schema}.{view_name}" if schema else view_name + ) + else: + return rp + + @reflection.cache + def get_check_constraints( + self, connection, table_name, schema=None, include_all=False, **kw + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + data = self.get_multi_check_constraints( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + include_all=include_all, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @_handle_synonyms_decorator + def get_multi_check_constraints( + self, + connection, + *, + schema, + filter_names, + dblink=None, + scope, + kind, + include_all=False, + **kw, + ): + """Supported kw arguments are: ``dblink`` to reflect via a db link; + ``oracle_resolve_synonyms`` to resolve names to synonyms + """ + all_objects = self._get_all_objects( + connection, schema, scope, kind, filter_names, dblink, **kw + ) + + not_null = re.compile(r"..+?. IS NOT NULL$") + + check_constraints = defaultdict(list) + + for row_dict in self._get_all_constraint_rows( + connection, schema, dblink, all_objects, **kw + ): + if row_dict["constraint_type"] != "C": + continue + table_name = self.normalize_name(row_dict["table_name"]) + constraint_name = self.normalize_name(row_dict["constraint_name"]) + search_condition = row_dict["search_condition"] + + table_checks = check_constraints[(schema, table_name)] + if constraint_name is not None and ( + include_all or not not_null.match(search_condition) + ): + table_checks.append( + {"name": constraint_name, "sqltext": search_condition} + ) + + default = ReflectionDefaults.check_constraints + + return ( + ( + key, + ( + check_constraints[key] + if key in check_constraints + else default() + ), + ) + for key in ( + (schema, self.normalize_name(obj_name)) + for obj_name in all_objects + ) + ) + + def _list_dblinks(self, connection, dblink=None): + query = select(dictionary.all_db_links.c.db_link) + links = self._execute_reflection( + connection, query, dblink, returns_long=False + ).scalars() + return [self.normalize_name(link) for link in links] + + +class _OuterJoinColumn(sql.ClauseElement): + __visit_name__ = "outer_join_column" + + def __init__(self, column): + self.column = column diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py new file mode 100644 index 00000000..ed9b02d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py @@ -0,0 +1,1483 @@ +# dialects/oracle/cx_oracle.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" +.. dialect:: oracle+cx_oracle + :name: cx-Oracle + :dbapi: cx_oracle + :connectstring: oracle+cx_oracle://user:pass@hostname:port[/dbname][?service_name=[&key=value&key=value...]] + :url: https://oracle.github.io/python-cx_Oracle/ + +DSN vs. Hostname connections +----------------------------- + +cx_Oracle provides several methods of indicating the target database. The +dialect translates from a series of different URL forms. + +Hostname Connections with Easy Connect Syntax +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Given a hostname, port and service name of the target Oracle Database, for +example from Oracle's `Easy Connect syntax +`_, +then connect in SQLAlchemy using the ``service_name`` query string parameter:: + + engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:port/?service_name=myservice&encoding=UTF-8&nencoding=UTF-8") + +The `full Easy Connect syntax +`_ +is not supported. Instead, use a ``tnsnames.ora`` file and connect using a +DSN. + +Connections with tnsnames.ora or Oracle Cloud +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Alternatively, if no port, database name, or ``service_name`` is provided, the +dialect will use an Oracle DSN "connection string". This takes the "hostname" +portion of the URL as the data source name. For example, if the +``tnsnames.ora`` file contains a `Net Service Name +`_ +of ``myalias`` as below:: + + myalias = + (DESCRIPTION = + (ADDRESS = (PROTOCOL = TCP)(HOST = mymachine.example.com)(PORT = 1521)) + (CONNECT_DATA = + (SERVER = DEDICATED) + (SERVICE_NAME = orclpdb1) + ) + ) + +The cx_Oracle dialect connects to this database service when ``myalias`` is the +hostname portion of the URL, without specifying a port, database name or +``service_name``:: + + engine = create_engine("oracle+cx_oracle://scott:tiger@myalias/?encoding=UTF-8&nencoding=UTF-8") + +Users of Oracle Cloud should use this syntax and also configure the cloud +wallet as shown in cx_Oracle documentation `Connecting to Autononmous Databases +`_. + +SID Connections +^^^^^^^^^^^^^^^ + +To use Oracle's obsolete SID connection syntax, the SID can be passed in a +"database name" portion of the URL as below:: + + engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:1521/dbname?encoding=UTF-8&nencoding=UTF-8") + +Above, the DSN passed to cx_Oracle is created by ``cx_Oracle.makedsn()`` as +follows:: + + >>> import cx_Oracle + >>> cx_Oracle.makedsn("hostname", 1521, sid="dbname") + '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=hostname)(PORT=1521))(CONNECT_DATA=(SID=dbname)))' + +Passing cx_Oracle connect arguments +----------------------------------- + +Additional connection arguments can usually be passed via the URL +query string; particular symbols like ``cx_Oracle.SYSDBA`` are intercepted +and converted to the correct symbol:: + + e = create_engine( + "oracle+cx_oracle://user:pass@dsn?encoding=UTF-8&nencoding=UTF-8&mode=SYSDBA&events=true") + +.. versionchanged:: 1.3 the cx_oracle dialect now accepts all argument names + within the URL string itself, to be passed to the cx_Oracle DBAPI. As + was the case earlier but not correctly documented, the + :paramref:`_sa.create_engine.connect_args` parameter also accepts all + cx_Oracle DBAPI connect arguments. + +To pass arguments directly to ``.connect()`` without using the query +string, use the :paramref:`_sa.create_engine.connect_args` dictionary. +Any cx_Oracle parameter value and/or constant may be passed, such as:: + + import cx_Oracle + e = create_engine( + "oracle+cx_oracle://user:pass@dsn", + connect_args={ + "encoding": "UTF-8", + "nencoding": "UTF-8", + "mode": cx_Oracle.SYSDBA, + "events": True + } + ) + +Note that the default value for ``encoding`` and ``nencoding`` was changed to +"UTF-8" in cx_Oracle 8.0 so these parameters can be omitted when using that +version, or later. + +Options consumed by the SQLAlchemy cx_Oracle dialect outside of the driver +-------------------------------------------------------------------------- + +There are also options that are consumed by the SQLAlchemy cx_oracle dialect +itself. These options are always passed directly to :func:`_sa.create_engine` +, such as:: + + e = create_engine( + "oracle+cx_oracle://user:pass@dsn", coerce_to_decimal=False) + +The parameters accepted by the cx_oracle dialect are as follows: + +* ``arraysize`` - set the cx_oracle.arraysize value on cursors; defaults + to ``None``, indicating that the driver default should be used (typically + the value is 100). This setting controls how many rows are buffered when + fetching rows, and can have a significant effect on performance when + modified. The setting is used for both ``cx_Oracle`` as well as + ``oracledb``. + + .. versionchanged:: 2.0.26 - changed the default value from 50 to None, + to use the default value of the driver itself. + +* ``auto_convert_lobs`` - defaults to True; See :ref:`cx_oracle_lob`. + +* ``coerce_to_decimal`` - see :ref:`cx_oracle_numeric` for detail. + +* ``encoding_errors`` - see :ref:`cx_oracle_unicode_encoding_errors` for detail. + +.. _cx_oracle_sessionpool: + +Using cx_Oracle SessionPool +--------------------------- + +The cx_Oracle library provides its own connection pool implementation that may +be used in place of SQLAlchemy's pooling functionality. This can be achieved +by using the :paramref:`_sa.create_engine.creator` parameter to provide a +function that returns a new connection, along with setting +:paramref:`_sa.create_engine.pool_class` to ``NullPool`` to disable +SQLAlchemy's pooling:: + + import cx_Oracle + from sqlalchemy import create_engine + from sqlalchemy.pool import NullPool + + pool = cx_Oracle.SessionPool( + user="scott", password="tiger", dsn="orclpdb", + min=2, max=5, increment=1, threaded=True, + encoding="UTF-8", nencoding="UTF-8" + ) + + engine = create_engine("oracle+cx_oracle://", creator=pool.acquire, poolclass=NullPool) + +The above engine may then be used normally where cx_Oracle's pool handles +connection pooling:: + + with engine.connect() as conn: + print(conn.scalar("select 1 FROM dual")) + + +As well as providing a scalable solution for multi-user applications, the +cx_Oracle session pool supports some Oracle features such as DRCP and +`Application Continuity +`_. + +Using Oracle Database Resident Connection Pooling (DRCP) +-------------------------------------------------------- + +When using Oracle's `DRCP +`_, +the best practice is to pass a connection class and "purity" when acquiring a +connection from the SessionPool. Refer to the `cx_Oracle DRCP documentation +`_. + +This can be achieved by wrapping ``pool.acquire()``:: + + import cx_Oracle + from sqlalchemy import create_engine + from sqlalchemy.pool import NullPool + + pool = cx_Oracle.SessionPool( + user="scott", password="tiger", dsn="orclpdb", + min=2, max=5, increment=1, threaded=True, + encoding="UTF-8", nencoding="UTF-8" + ) + + def creator(): + return pool.acquire(cclass="MYCLASS", purity=cx_Oracle.ATTR_PURITY_SELF) + + engine = create_engine("oracle+cx_oracle://", creator=creator, poolclass=NullPool) + +The above engine may then be used normally where cx_Oracle handles session +pooling and Oracle Database additionally uses DRCP:: + + with engine.connect() as conn: + print(conn.scalar("select 1 FROM dual")) + +.. _cx_oracle_unicode: + +Unicode +------- + +As is the case for all DBAPIs under Python 3, all strings are inherently +Unicode strings. In all cases however, the driver requires an explicit +encoding configuration. + +Ensuring the Correct Client Encoding +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The long accepted standard for establishing client encoding for nearly all +Oracle related software is via the `NLS_LANG `_ +environment variable. cx_Oracle like most other Oracle drivers will use +this environment variable as the source of its encoding configuration. The +format of this variable is idiosyncratic; a typical value would be +``AMERICAN_AMERICA.AL32UTF8``. + +The cx_Oracle driver also supports a programmatic alternative which is to +pass the ``encoding`` and ``nencoding`` parameters directly to its +``.connect()`` function. These can be present in the URL as follows:: + + engine = create_engine("oracle+cx_oracle://scott:tiger@orclpdb/?encoding=UTF-8&nencoding=UTF-8") + +For the meaning of the ``encoding`` and ``nencoding`` parameters, please +consult +`Characters Sets and National Language Support (NLS) `_. + +.. seealso:: + + `Characters Sets and National Language Support (NLS) `_ + - in the cx_Oracle documentation. + + +Unicode-specific Column datatypes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Core expression language handles unicode data by use of the :class:`.Unicode` +and :class:`.UnicodeText` +datatypes. These types correspond to the VARCHAR2 and CLOB Oracle datatypes by +default. When using these datatypes with Unicode data, it is expected that +the Oracle database is configured with a Unicode-aware character set, as well +as that the ``NLS_LANG`` environment variable is set appropriately, so that +the VARCHAR2 and CLOB datatypes can accommodate the data. + +In the case that the Oracle database is not configured with a Unicode character +set, the two options are to use the :class:`_types.NCHAR` and +:class:`_oracle.NCLOB` datatypes explicitly, or to pass the flag +``use_nchar_for_unicode=True`` to :func:`_sa.create_engine`, +which will cause the +SQLAlchemy dialect to use NCHAR/NCLOB for the :class:`.Unicode` / +:class:`.UnicodeText` datatypes instead of VARCHAR/CLOB. + +.. versionchanged:: 1.3 The :class:`.Unicode` and :class:`.UnicodeText` + datatypes now correspond to the ``VARCHAR2`` and ``CLOB`` Oracle datatypes + unless the ``use_nchar_for_unicode=True`` is passed to the dialect + when :func:`_sa.create_engine` is called. + + +.. _cx_oracle_unicode_encoding_errors: + +Encoding Errors +^^^^^^^^^^^^^^^ + +For the unusual case that data in the Oracle database is present with a broken +encoding, the dialect accepts a parameter ``encoding_errors`` which will be +passed to Unicode decoding functions in order to affect how decoding errors are +handled. The value is ultimately consumed by the Python `decode +`_ function, and +is passed both via cx_Oracle's ``encodingErrors`` parameter consumed by +``Cursor.var()``, as well as SQLAlchemy's own decoding function, as the +cx_Oracle dialect makes use of both under different circumstances. + +.. versionadded:: 1.3.11 + + +.. _cx_oracle_setinputsizes: + +Fine grained control over cx_Oracle data binding performance with setinputsizes +------------------------------------------------------------------------------- + +The cx_Oracle DBAPI has a deep and fundamental reliance upon the usage of the +DBAPI ``setinputsizes()`` call. The purpose of this call is to establish the +datatypes that are bound to a SQL statement for Python values being passed as +parameters. While virtually no other DBAPI assigns any use to the +``setinputsizes()`` call, the cx_Oracle DBAPI relies upon it heavily in its +interactions with the Oracle client interface, and in some scenarios it is not +possible for SQLAlchemy to know exactly how data should be bound, as some +settings can cause profoundly different performance characteristics, while +altering the type coercion behavior at the same time. + +Users of the cx_Oracle dialect are **strongly encouraged** to read through +cx_Oracle's list of built-in datatype symbols at +https://cx-oracle.readthedocs.io/en/latest/api_manual/module.html#database-types. +Note that in some cases, significant performance degradation can occur when +using these types vs. not, in particular when specifying ``cx_Oracle.CLOB``. + +On the SQLAlchemy side, the :meth:`.DialectEvents.do_setinputsizes` event can +be used both for runtime visibility (e.g. logging) of the setinputsizes step as +well as to fully control how ``setinputsizes()`` is used on a per-statement +basis. + +.. versionadded:: 1.2.9 Added :meth:`.DialectEvents.setinputsizes` + + +Example 1 - logging all setinputsizes calls +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following example illustrates how to log the intermediary values from a +SQLAlchemy perspective before they are converted to the raw ``setinputsizes()`` +parameter dictionary. The keys of the dictionary are :class:`.BindParameter` +objects which have a ``.key`` and a ``.type`` attribute:: + + from sqlalchemy import create_engine, event + + engine = create_engine("oracle+cx_oracle://scott:tiger@host/xe") + + @event.listens_for(engine, "do_setinputsizes") + def _log_setinputsizes(inputsizes, cursor, statement, parameters, context): + for bindparam, dbapitype in inputsizes.items(): + log.info( + "Bound parameter name: %s SQLAlchemy type: %r " + "DBAPI object: %s", + bindparam.key, bindparam.type, dbapitype) + +Example 2 - remove all bindings to CLOB +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``CLOB`` datatype in cx_Oracle incurs a significant performance overhead, +however is set by default for the ``Text`` type within the SQLAlchemy 1.2 +series. This setting can be modified as follows:: + + from sqlalchemy import create_engine, event + from cx_Oracle import CLOB + + engine = create_engine("oracle+cx_oracle://scott:tiger@host/xe") + + @event.listens_for(engine, "do_setinputsizes") + def _remove_clob(inputsizes, cursor, statement, parameters, context): + for bindparam, dbapitype in list(inputsizes.items()): + if dbapitype is CLOB: + del inputsizes[bindparam] + +.. _cx_oracle_returning: + +RETURNING Support +----------------- + +The cx_Oracle dialect implements RETURNING using OUT parameters. +The dialect supports RETURNING fully. + +.. _cx_oracle_lob: + +LOB Datatypes +-------------- + +LOB datatypes refer to the "large object" datatypes such as CLOB, NCLOB and +BLOB. Modern versions of cx_Oracle and oracledb are optimized for these +datatypes to be delivered as a single buffer. As such, SQLAlchemy makes use of +these newer type handlers by default. + +To disable the use of newer type handlers and deliver LOB objects as classic +buffered objects with a ``read()`` method, the parameter +``auto_convert_lobs=False`` may be passed to :func:`_sa.create_engine`, +which takes place only engine-wide. + +Two Phase Transactions Not Supported (use oracledb) +--------------------------------------------------- + +Two phase transactions are **not supported** under cx_Oracle due to poor driver +support. The newer :ref:`oracledb` dialect however **does** support two phase +transactions and should be preferred. + +.. _cx_oracle_numeric: + +Precision Numerics +------------------ + +SQLAlchemy's numeric types can handle receiving and returning values as Python +``Decimal`` objects or float objects. When a :class:`.Numeric` object, or a +subclass such as :class:`.Float`, :class:`_oracle.DOUBLE_PRECISION` etc. is in +use, the :paramref:`.Numeric.asdecimal` flag determines if values should be +coerced to ``Decimal`` upon return, or returned as float objects. To make +matters more complicated under Oracle, Oracle's ``NUMBER`` type can also +represent integer values if the "scale" is zero, so the Oracle-specific +:class:`_oracle.NUMBER` type takes this into account as well. + +The cx_Oracle dialect makes extensive use of connection- and cursor-level +"outputtypehandler" callables in order to coerce numeric values as requested. +These callables are specific to the specific flavor of :class:`.Numeric` in +use, as well as if no SQLAlchemy typing objects are present. There are +observed scenarios where Oracle may sends incomplete or ambiguous information +about the numeric types being returned, such as a query where the numeric types +are buried under multiple levels of subquery. The type handlers do their best +to make the right decision in all cases, deferring to the underlying cx_Oracle +DBAPI for all those cases where the driver can make the best decision. + +When no typing objects are present, as when executing plain SQL strings, a +default "outputtypehandler" is present which will generally return numeric +values which specify precision and scale as Python ``Decimal`` objects. To +disable this coercion to decimal for performance reasons, pass the flag +``coerce_to_decimal=False`` to :func:`_sa.create_engine`:: + + engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False) + +The ``coerce_to_decimal`` flag only impacts the results of plain string +SQL statements that are not otherwise associated with a :class:`.Numeric` +SQLAlchemy type (or a subclass of such). + +.. versionchanged:: 1.2 The numeric handling system for cx_Oracle has been + reworked to take advantage of newer cx_Oracle features as well + as better integration of outputtypehandlers. + +""" # noqa +from __future__ import annotations + +import decimal +import random +import re + +from . import base as oracle +from .base import OracleCompiler +from .base import OracleDialect +from .base import OracleExecutionContext +from .types import _OracleDateLiteralRender +from ... import exc +from ... import util +from ...engine import cursor as _cursor +from ...engine import interfaces +from ...engine import processors +from ...sql import sqltypes +from ...sql._typing import is_sql_compiler + +# source: +# https://github.com/oracle/python-cx_Oracle/issues/596#issuecomment-999243649 +_CX_ORACLE_MAGIC_LOB_SIZE = 131072 + + +class _OracleInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): + # see https://github.com/oracle/python-cx_Oracle/issues/ + # 208#issuecomment-409715955 + return int + + def _cx_oracle_var(self, dialect, cursor, arraysize=None): + cx_Oracle = dialect.dbapi + return cursor.var( + cx_Oracle.STRING, + 255, + arraysize=arraysize if arraysize is not None else cursor.arraysize, + outconverter=int, + ) + + def _cx_oracle_outputtypehandler(self, dialect): + def handler(cursor, name, default_type, size, precision, scale): + return self._cx_oracle_var(dialect, cursor) + + return handler + + +class _OracleNumeric(sqltypes.Numeric): + is_number = False + + def bind_processor(self, dialect): + if self.scale == 0: + return None + elif self.asdecimal: + processor = processors.to_decimal_processor_factory( + decimal.Decimal, self._effective_decimal_return_scale + ) + + def process(value): + if isinstance(value, (int, float)): + return processor(value) + elif value is not None and value.is_infinite(): + return float(value) + else: + return value + + return process + else: + return processors.to_float + + def result_processor(self, dialect, coltype): + return None + + def _cx_oracle_outputtypehandler(self, dialect): + cx_Oracle = dialect.dbapi + + def handler(cursor, name, default_type, size, precision, scale): + outconverter = None + + if precision: + if self.asdecimal: + if default_type == cx_Oracle.NATIVE_FLOAT: + # receiving float and doing Decimal after the fact + # allows for float("inf") to be handled + type_ = default_type + outconverter = decimal.Decimal + else: + type_ = decimal.Decimal + else: + if self.is_number and scale == 0: + # integer. cx_Oracle is observed to handle the widest + # variety of ints when no directives are passed, + # from 5.2 to 7.0. See [ticket:4457] + return None + else: + type_ = cx_Oracle.NATIVE_FLOAT + + else: + if self.asdecimal: + if default_type == cx_Oracle.NATIVE_FLOAT: + type_ = default_type + outconverter = decimal.Decimal + else: + type_ = decimal.Decimal + else: + if self.is_number and scale == 0: + # integer. cx_Oracle is observed to handle the widest + # variety of ints when no directives are passed, + # from 5.2 to 7.0. See [ticket:4457] + return None + else: + type_ = cx_Oracle.NATIVE_FLOAT + + return cursor.var( + type_, + 255, + arraysize=cursor.arraysize, + outconverter=outconverter, + ) + + return handler + + +class _OracleUUID(sqltypes.Uuid): + def get_dbapi_type(self, dbapi): + return dbapi.STRING + + +class _OracleBinaryFloat(_OracleNumeric): + def get_dbapi_type(self, dbapi): + return dbapi.NATIVE_FLOAT + + +class _OracleBINARY_FLOAT(_OracleBinaryFloat, oracle.BINARY_FLOAT): + pass + + +class _OracleBINARY_DOUBLE(_OracleBinaryFloat, oracle.BINARY_DOUBLE): + pass + + +class _OracleNUMBER(_OracleNumeric): + is_number = True + + +class _CXOracleDate(oracle._OracleDate): + def bind_processor(self, dialect): + return None + + def result_processor(self, dialect, coltype): + def process(value): + if value is not None: + return value.date() + else: + return value + + return process + + +class _CXOracleTIMESTAMP(_OracleDateLiteralRender, sqltypes.TIMESTAMP): + def literal_processor(self, dialect): + return self._literal_processor_datetime(dialect) + + +class _LOBDataType: + pass + + +# TODO: the names used across CHAR / VARCHAR / NCHAR / NVARCHAR +# here are inconsistent and not very good +class _OracleChar(sqltypes.CHAR): + def get_dbapi_type(self, dbapi): + return dbapi.FIXED_CHAR + + +class _OracleNChar(sqltypes.NCHAR): + def get_dbapi_type(self, dbapi): + return dbapi.FIXED_NCHAR + + +class _OracleUnicodeStringNCHAR(oracle.NVARCHAR2): + def get_dbapi_type(self, dbapi): + return dbapi.NCHAR + + +class _OracleUnicodeStringCHAR(sqltypes.Unicode): + def get_dbapi_type(self, dbapi): + return dbapi.LONG_STRING + + +class _OracleUnicodeTextNCLOB(_LOBDataType, oracle.NCLOB): + def get_dbapi_type(self, dbapi): + # previously, this was dbapi.NCLOB. + # DB_TYPE_NVARCHAR will instead be passed to setinputsizes() + # when this datatype is used. + return dbapi.DB_TYPE_NVARCHAR + + +class _OracleUnicodeTextCLOB(_LOBDataType, sqltypes.UnicodeText): + def get_dbapi_type(self, dbapi): + # previously, this was dbapi.CLOB. + # DB_TYPE_NVARCHAR will instead be passed to setinputsizes() + # when this datatype is used. + return dbapi.DB_TYPE_NVARCHAR + + +class _OracleText(_LOBDataType, sqltypes.Text): + def get_dbapi_type(self, dbapi): + # previously, this was dbapi.CLOB. + # DB_TYPE_NVARCHAR will instead be passed to setinputsizes() + # when this datatype is used. + return dbapi.DB_TYPE_NVARCHAR + + +class _OracleLong(_LOBDataType, oracle.LONG): + def get_dbapi_type(self, dbapi): + return dbapi.LONG_STRING + + +class _OracleString(sqltypes.String): + pass + + +class _OracleEnum(sqltypes.Enum): + def bind_processor(self, dialect): + enum_proc = sqltypes.Enum.bind_processor(self, dialect) + + def process(value): + raw_str = enum_proc(value) + return raw_str + + return process + + +class _OracleBinary(_LOBDataType, sqltypes.LargeBinary): + def get_dbapi_type(self, dbapi): + # previously, this was dbapi.BLOB. + # DB_TYPE_RAW will instead be passed to setinputsizes() + # when this datatype is used. + return dbapi.DB_TYPE_RAW + + def bind_processor(self, dialect): + return None + + def result_processor(self, dialect, coltype): + if not dialect.auto_convert_lobs: + return None + else: + return super().result_processor(dialect, coltype) + + +class _OracleInterval(oracle.INTERVAL): + def get_dbapi_type(self, dbapi): + return dbapi.INTERVAL + + +class _OracleRaw(oracle.RAW): + pass + + +class _OracleRowid(oracle.ROWID): + def get_dbapi_type(self, dbapi): + return dbapi.ROWID + + +class OracleCompiler_cx_oracle(OracleCompiler): + _oracle_cx_sql_compiler = True + + _oracle_returning = False + + # Oracle bind names can't start with digits or underscores. + # currently we rely upon Oracle-specific quoting of bind names in most + # cases. however for expanding params, the escape chars are used. + # see #8708 + bindname_escape_characters = util.immutabledict( + { + "%": "P", + "(": "A", + ")": "Z", + ":": "C", + ".": "C", + "[": "C", + "]": "C", + " ": "C", + "\\": "C", + "/": "C", + "?": "C", + } + ) + + def bindparam_string(self, name, **kw): + quote = getattr(name, "quote", None) + if ( + quote is True + or quote is not False + and self.preparer._bindparam_requires_quotes(name) + # bind param quoting for Oracle doesn't work with post_compile + # params. For those, the default bindparam_string will escape + # special chars, and the appending of a number "_1" etc. will + # take care of reserved words + and not kw.get("post_compile", False) + ): + # interesting to note about expanding parameters - since the + # new parameters take the form _, at least if + # they are originally formed from reserved words, they no longer + # need quoting :). names that include illegal characters + # won't work however. + quoted_name = '"%s"' % name + kw["escaped_from"] = name + name = quoted_name + return OracleCompiler.bindparam_string(self, name, **kw) + + # TODO: we could likely do away with quoting altogether for + # Oracle parameters and use the custom escaping here + escaped_from = kw.get("escaped_from", None) + if not escaped_from: + if self._bind_translate_re.search(name): + # not quite the translate use case as we want to + # also get a quick boolean if we even found + # unusual characters in the name + new_name = self._bind_translate_re.sub( + lambda m: self._bind_translate_chars[m.group(0)], + name, + ) + if new_name[0].isdigit() or new_name[0] == "_": + new_name = "D" + new_name + kw["escaped_from"] = name + name = new_name + elif name[0].isdigit() or name[0] == "_": + new_name = "D" + name + kw["escaped_from"] = name + name = new_name + + return OracleCompiler.bindparam_string(self, name, **kw) + + +class OracleExecutionContext_cx_oracle(OracleExecutionContext): + out_parameters = None + + def _generate_out_parameter_vars(self): + # check for has_out_parameters or RETURNING, create cx_Oracle.var + # objects if so + if self.compiled.has_out_parameters or self.compiled._oracle_returning: + out_parameters = self.out_parameters + assert out_parameters is not None + + len_params = len(self.parameters) + + quoted_bind_names = self.compiled.escaped_bind_names + for bindparam in self.compiled.binds.values(): + if bindparam.isoutparam: + name = self.compiled.bind_names[bindparam] + type_impl = bindparam.type.dialect_impl(self.dialect) + + if hasattr(type_impl, "_cx_oracle_var"): + out_parameters[name] = type_impl._cx_oracle_var( + self.dialect, self.cursor, arraysize=len_params + ) + else: + dbtype = type_impl.get_dbapi_type(self.dialect.dbapi) + + cx_Oracle = self.dialect.dbapi + + assert cx_Oracle is not None + + if dbtype is None: + raise exc.InvalidRequestError( + "Cannot create out parameter for " + "parameter " + "%r - its type %r is not supported by" + " cx_oracle" % (bindparam.key, bindparam.type) + ) + + # note this is an OUT parameter. Using + # non-LOB datavalues with large unicode-holding + # values causes the failure (both cx_Oracle and + # oracledb): + # ORA-22835: Buffer too small for CLOB to CHAR or + # BLOB to RAW conversion (actual: 16507, + # maximum: 4000) + # [SQL: INSERT INTO long_text (x, y, z) VALUES + # (:x, :y, :z) RETURNING long_text.x, long_text.y, + # long_text.z INTO :ret_0, :ret_1, :ret_2] + # so even for DB_TYPE_NVARCHAR we convert to a LOB + + if isinstance(type_impl, _LOBDataType): + if dbtype == cx_Oracle.DB_TYPE_NVARCHAR: + dbtype = cx_Oracle.NCLOB + elif dbtype == cx_Oracle.DB_TYPE_RAW: + dbtype = cx_Oracle.BLOB + # other LOB types go in directly + + out_parameters[name] = self.cursor.var( + dbtype, + # this is fine also in oracledb_async since + # the driver will await the read coroutine + outconverter=lambda value: value.read(), + arraysize=len_params, + ) + elif ( + isinstance(type_impl, _OracleNumeric) + and type_impl.asdecimal + ): + out_parameters[name] = self.cursor.var( + decimal.Decimal, + arraysize=len_params, + ) + + else: + out_parameters[name] = self.cursor.var( + dbtype, arraysize=len_params + ) + + for param in self.parameters: + param[quoted_bind_names.get(name, name)] = ( + out_parameters[name] + ) + + def _generate_cursor_outputtype_handler(self): + output_handlers = {} + + for keyname, name, objects, type_ in self.compiled._result_columns: + handler = type_._cached_custom_processor( + self.dialect, + "cx_oracle_outputtypehandler", + self._get_cx_oracle_type_handler, + ) + + if handler: + denormalized_name = self.dialect.denormalize_name(keyname) + output_handlers[denormalized_name] = handler + + if output_handlers: + default_handler = self._dbapi_connection.outputtypehandler + + def output_type_handler( + cursor, name, default_type, size, precision, scale + ): + if name in output_handlers: + return output_handlers[name]( + cursor, name, default_type, size, precision, scale + ) + else: + return default_handler( + cursor, name, default_type, size, precision, scale + ) + + self.cursor.outputtypehandler = output_type_handler + + def _get_cx_oracle_type_handler(self, impl): + if hasattr(impl, "_cx_oracle_outputtypehandler"): + return impl._cx_oracle_outputtypehandler(self.dialect) + else: + return None + + def pre_exec(self): + super().pre_exec() + if not getattr(self.compiled, "_oracle_cx_sql_compiler", False): + return + + self.out_parameters = {} + + self._generate_out_parameter_vars() + + self._generate_cursor_outputtype_handler() + + def post_exec(self): + if ( + self.compiled + and is_sql_compiler(self.compiled) + and self.compiled._oracle_returning + ): + initial_buffer = self.fetchall_for_returning( + self.cursor, _internal=True + ) + + fetch_strategy = _cursor.FullyBufferedCursorFetchStrategy( + self.cursor, + [ + (entry.keyname, None) + for entry in self.compiled._result_columns + ], + initial_buffer=initial_buffer, + ) + + self.cursor_fetch_strategy = fetch_strategy + + def create_cursor(self): + c = self._dbapi_connection.cursor() + if self.dialect.arraysize: + c.arraysize = self.dialect.arraysize + + return c + + def fetchall_for_returning(self, cursor, *, _internal=False): + compiled = self.compiled + if ( + not _internal + and compiled is None + or not is_sql_compiler(compiled) + or not compiled._oracle_returning + ): + raise NotImplementedError( + "execution context was not prepared for Oracle RETURNING" + ) + + # create a fake cursor result from the out parameters. unlike + # get_out_parameter_values(), the result-row handlers here will be + # applied at the Result level + + numcols = len(self.out_parameters) + + # [stmt_result for stmt_result in outparam.values] == each + # statement in executemany + # [val for val in stmt_result] == each row for a particular + # statement + return list( + zip( + *[ + [ + val + for stmt_result in self.out_parameters[ + f"ret_{j}" + ].values + for val in (stmt_result or ()) + ] + for j in range(numcols) + ] + ) + ) + + def get_out_parameter_values(self, out_param_names): + # this method should not be called when the compiler has + # RETURNING as we've turned the has_out_parameters flag set to + # False. + assert not self.compiled.returning + + return [ + self.dialect._paramval(self.out_parameters[name]) + for name in out_param_names + ] + + +class OracleDialect_cx_oracle(OracleDialect): + supports_statement_cache = True + execution_ctx_cls = OracleExecutionContext_cx_oracle + statement_compiler = OracleCompiler_cx_oracle + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + insert_executemany_returning = True + insert_executemany_returning_sort_by_parameter_order = True + update_executemany_returning = True + delete_executemany_returning = True + + bind_typing = interfaces.BindTyping.SETINPUTSIZES + + driver = "cx_oracle" + + colspecs = util.update_copy( + OracleDialect.colspecs, + { + sqltypes.TIMESTAMP: _CXOracleTIMESTAMP, + sqltypes.Numeric: _OracleNumeric, + sqltypes.Float: _OracleNumeric, + oracle.BINARY_FLOAT: _OracleBINARY_FLOAT, + oracle.BINARY_DOUBLE: _OracleBINARY_DOUBLE, + sqltypes.Integer: _OracleInteger, + oracle.NUMBER: _OracleNUMBER, + sqltypes.Date: _CXOracleDate, + sqltypes.LargeBinary: _OracleBinary, + sqltypes.Boolean: oracle._OracleBoolean, + sqltypes.Interval: _OracleInterval, + oracle.INTERVAL: _OracleInterval, + sqltypes.Text: _OracleText, + sqltypes.String: _OracleString, + sqltypes.UnicodeText: _OracleUnicodeTextCLOB, + sqltypes.CHAR: _OracleChar, + sqltypes.NCHAR: _OracleNChar, + sqltypes.Enum: _OracleEnum, + oracle.LONG: _OracleLong, + oracle.RAW: _OracleRaw, + sqltypes.Unicode: _OracleUnicodeStringCHAR, + sqltypes.NVARCHAR: _OracleUnicodeStringNCHAR, + sqltypes.Uuid: _OracleUUID, + oracle.NCLOB: _OracleUnicodeTextNCLOB, + oracle.ROWID: _OracleRowid, + }, + ) + + execute_sequence_format = list + + _cx_oracle_threaded = None + + _cursor_var_unicode_kwargs = util.immutabledict() + + @util.deprecated_params( + threaded=( + "1.3", + "The 'threaded' parameter to the cx_oracle/oracledb dialect " + "is deprecated as a dialect-level argument, and will be removed " + "in a future release. As of version 1.3, it defaults to False " + "rather than True. The 'threaded' option can be passed to " + "cx_Oracle directly in the URL query string passed to " + ":func:`_sa.create_engine`.", + ) + ) + def __init__( + self, + auto_convert_lobs=True, + coerce_to_decimal=True, + arraysize=None, + encoding_errors=None, + threaded=None, + **kwargs, + ): + OracleDialect.__init__(self, **kwargs) + self.arraysize = arraysize + self.encoding_errors = encoding_errors + if encoding_errors: + self._cursor_var_unicode_kwargs = { + "encodingErrors": encoding_errors + } + if threaded is not None: + self._cx_oracle_threaded = threaded + self.auto_convert_lobs = auto_convert_lobs + self.coerce_to_decimal = coerce_to_decimal + if self._use_nchar_for_unicode: + self.colspecs = self.colspecs.copy() + self.colspecs[sqltypes.Unicode] = _OracleUnicodeStringNCHAR + self.colspecs[sqltypes.UnicodeText] = _OracleUnicodeTextNCLOB + + dbapi_module = self.dbapi + self._load_version(dbapi_module) + + if dbapi_module is not None: + # these constants will first be seen in SQLAlchemy datatypes + # coming from the get_dbapi_type() method. We then + # will place the following types into setinputsizes() calls + # on each statement. Oracle constants that are not in this + # list will not be put into setinputsizes(). + self.include_set_input_sizes = { + dbapi_module.DATETIME, + dbapi_module.DB_TYPE_NVARCHAR, # used for CLOB, NCLOB + dbapi_module.DB_TYPE_RAW, # used for BLOB + dbapi_module.NCLOB, # not currently used except for OUT param + dbapi_module.CLOB, # not currently used except for OUT param + dbapi_module.LOB, # not currently used + dbapi_module.BLOB, # not currently used except for OUT param + dbapi_module.NCHAR, + dbapi_module.FIXED_NCHAR, + dbapi_module.FIXED_CHAR, + dbapi_module.TIMESTAMP, + int, # _OracleInteger, + # _OracleBINARY_FLOAT, _OracleBINARY_DOUBLE, + dbapi_module.NATIVE_FLOAT, + } + + self._paramval = lambda value: value.getvalue() + + def _load_version(self, dbapi_module): + version = (0, 0, 0) + if dbapi_module is not None: + m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", dbapi_module.version) + if m: + version = tuple( + int(x) for x in m.group(1, 2, 3) if x is not None + ) + self.cx_oracle_ver = version + if self.cx_oracle_ver < (8,) and self.cx_oracle_ver > (0, 0, 0): + raise exc.InvalidRequestError( + "cx_Oracle version 8 and above are supported" + ) + + @classmethod + def import_dbapi(cls): + import cx_Oracle + + return cx_Oracle + + def initialize(self, connection): + super().initialize(connection) + self._detect_decimal_char(connection) + + def get_isolation_level(self, dbapi_connection): + # sources: + + # general idea of transaction id, have to start one, etc. + # https://stackoverflow.com/questions/10711204/how-to-check-isoloation-level + + # how to decode xid cols from v$transaction to match + # https://asktom.oracle.com/pls/apex/f?p=100:11:0::::P11_QUESTION_ID:9532779900346079444 + + # Oracle tuple comparison without using IN: + # https://www.sql-workbench.eu/comparison/tuple_comparison.html + + with dbapi_connection.cursor() as cursor: + # this is the only way to ensure a transaction is started without + # actually running DML. There's no way to see the configured + # isolation level without getting it from v$transaction which + # means transaction has to be started. + outval = cursor.var(str) + cursor.execute( + """ + begin + :trans_id := dbms_transaction.local_transaction_id( TRUE ); + end; + """, + {"trans_id": outval}, + ) + trans_id = outval.getvalue() + xidusn, xidslot, xidsqn = trans_id.split(".", 2) + + cursor.execute( + "SELECT CASE BITAND(t.flag, POWER(2, 28)) " + "WHEN 0 THEN 'READ COMMITTED' " + "ELSE 'SERIALIZABLE' END AS isolation_level " + "FROM v$transaction t WHERE " + "(t.xidusn, t.xidslot, t.xidsqn) = " + "((:xidusn, :xidslot, :xidsqn))", + {"xidusn": xidusn, "xidslot": xidslot, "xidsqn": xidsqn}, + ) + row = cursor.fetchone() + if row is None: + raise exc.InvalidRequestError( + "could not retrieve isolation level" + ) + result = row[0] + + return result + + def get_isolation_level_values(self, dbapi_connection): + return super().get_isolation_level_values(dbapi_connection) + [ + "AUTOCOMMIT" + ] + + def set_isolation_level(self, dbapi_connection, level): + if level == "AUTOCOMMIT": + dbapi_connection.autocommit = True + else: + dbapi_connection.autocommit = False + dbapi_connection.rollback() + with dbapi_connection.cursor() as cursor: + cursor.execute(f"ALTER SESSION SET ISOLATION_LEVEL={level}") + + def _detect_decimal_char(self, connection): + # we have the option to change this setting upon connect, + # or just look at what it is upon connect and convert. + # to minimize the chance of interference with changes to + # NLS_TERRITORY or formatting behavior of the DB, we opt + # to just look at it + + dbapi_connection = connection.connection + + with dbapi_connection.cursor() as cursor: + # issue #8744 + # nls_session_parameters is not available in some Oracle + # modes like "mount mode". But then, v$nls_parameters is not + # available if the connection doesn't have SYSDBA priv. + # + # simplify the whole thing and just use the method that we were + # doing in the test suite already, selecting a number + + def output_type_handler( + cursor, name, defaultType, size, precision, scale + ): + return cursor.var( + self.dbapi.STRING, 255, arraysize=cursor.arraysize + ) + + cursor.outputtypehandler = output_type_handler + cursor.execute("SELECT 1.1 FROM DUAL") + value = cursor.fetchone()[0] + + decimal_char = value.lstrip("0")[1] + assert not decimal_char[0].isdigit() + + self._decimal_char = decimal_char + + if self._decimal_char != ".": + _detect_decimal = self._detect_decimal + _to_decimal = self._to_decimal + + self._detect_decimal = lambda value: _detect_decimal( + value.replace(self._decimal_char, ".") + ) + self._to_decimal = lambda value: _to_decimal( + value.replace(self._decimal_char, ".") + ) + + def _detect_decimal(self, value): + if "." in value: + return self._to_decimal(value) + else: + return int(value) + + _to_decimal = decimal.Decimal + + def _generate_connection_outputtype_handler(self): + """establish the default outputtypehandler established at the + connection level. + + """ + + dialect = self + cx_Oracle = dialect.dbapi + + number_handler = _OracleNUMBER( + asdecimal=True + )._cx_oracle_outputtypehandler(dialect) + float_handler = _OracleNUMBER( + asdecimal=False + )._cx_oracle_outputtypehandler(dialect) + + def output_type_handler( + cursor, name, default_type, size, precision, scale + ): + if ( + default_type == cx_Oracle.NUMBER + and default_type is not cx_Oracle.NATIVE_FLOAT + ): + if not dialect.coerce_to_decimal: + return None + elif precision == 0 and scale in (0, -127): + # ambiguous type, this occurs when selecting + # numbers from deep subqueries + return cursor.var( + cx_Oracle.STRING, + 255, + outconverter=dialect._detect_decimal, + arraysize=cursor.arraysize, + ) + elif precision and scale > 0: + return number_handler( + cursor, name, default_type, size, precision, scale + ) + else: + return float_handler( + cursor, name, default_type, size, precision, scale + ) + + # if unicode options were specified, add a decoder, otherwise + # cx_Oracle should return Unicode + elif ( + dialect._cursor_var_unicode_kwargs + and default_type + in ( + cx_Oracle.STRING, + cx_Oracle.FIXED_CHAR, + ) + and default_type is not cx_Oracle.CLOB + and default_type is not cx_Oracle.NCLOB + ): + return cursor.var( + str, + size, + cursor.arraysize, + **dialect._cursor_var_unicode_kwargs, + ) + + elif dialect.auto_convert_lobs and default_type in ( + cx_Oracle.CLOB, + cx_Oracle.NCLOB, + ): + return cursor.var( + cx_Oracle.DB_TYPE_NVARCHAR, + _CX_ORACLE_MAGIC_LOB_SIZE, + cursor.arraysize, + **dialect._cursor_var_unicode_kwargs, + ) + + elif dialect.auto_convert_lobs and default_type in ( + cx_Oracle.BLOB, + ): + return cursor.var( + cx_Oracle.DB_TYPE_RAW, + _CX_ORACLE_MAGIC_LOB_SIZE, + cursor.arraysize, + ) + + return output_type_handler + + def on_connect(self): + output_type_handler = self._generate_connection_outputtype_handler() + + def on_connect(conn): + conn.outputtypehandler = output_type_handler + + return on_connect + + def create_connect_args(self, url): + opts = dict(url.query) + + for opt in ("use_ansi", "auto_convert_lobs"): + if opt in opts: + util.warn_deprecated( + f"{self.driver} dialect option {opt!r} should only be " + "passed to create_engine directly, not within the URL " + "string", + version="1.3", + ) + util.coerce_kw_type(opts, opt, bool) + setattr(self, opt, opts.pop(opt)) + + database = url.database + service_name = opts.pop("service_name", None) + if database or service_name: + # if we have a database, then we have a remote host + port = url.port + if port: + port = int(port) + else: + port = 1521 + + if database and service_name: + raise exc.InvalidRequestError( + '"service_name" option shouldn\'t ' + 'be used with a "database" part of the url' + ) + if database: + makedsn_kwargs = {"sid": database} + if service_name: + makedsn_kwargs = {"service_name": service_name} + + dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs) + else: + # we have a local tnsname + dsn = url.host + + if dsn is not None: + opts["dsn"] = dsn + if url.password is not None: + opts["password"] = url.password + if url.username is not None: + opts["user"] = url.username + + if self._cx_oracle_threaded is not None: + opts.setdefault("threaded", self._cx_oracle_threaded) + + def convert_cx_oracle_constant(value): + if isinstance(value, str): + try: + int_val = int(value) + except ValueError: + value = value.upper() + return getattr(self.dbapi, value) + else: + return int_val + else: + return value + + util.coerce_kw_type(opts, "mode", convert_cx_oracle_constant) + util.coerce_kw_type(opts, "threaded", bool) + util.coerce_kw_type(opts, "events", bool) + util.coerce_kw_type(opts, "purity", convert_cx_oracle_constant) + return ([], opts) + + def _get_server_version_info(self, connection): + return tuple(int(x) for x in connection.connection.version.split(".")) + + def is_disconnect(self, e, connection, cursor): + (error,) = e.args + if isinstance( + e, (self.dbapi.InterfaceError, self.dbapi.DatabaseError) + ) and "not connected" in str(e): + return True + + if hasattr(error, "code") and error.code in { + 28, + 3114, + 3113, + 3135, + 1033, + 2396, + }: + # ORA-00028: your session has been killed + # ORA-03114: not connected to ORACLE + # ORA-03113: end-of-file on communication channel + # ORA-03135: connection lost contact + # ORA-01033: ORACLE initialization or shutdown in progress + # ORA-02396: exceeded maximum idle time, please connect again + # TODO: Others ? + return True + + if re.match(r"^(?:DPI-1010|DPI-1080|DPY-1001|DPY-4011)", str(e)): + # DPI-1010: not connected + # DPI-1080: connection was closed by ORA-3113 + # python-oracledb's DPY-1001: not connected to database + # python-oracledb's DPY-4011: the database or network closed the + # connection + # TODO: others? + return True + + return False + + def create_xid(self): + id_ = random.randint(0, 2**128) + return (0x1234, "%032x" % id_, "%032x" % 9) + + def do_executemany(self, cursor, statement, parameters, context=None): + if isinstance(parameters, tuple): + parameters = list(parameters) + cursor.executemany(statement, parameters) + + def do_begin_twophase(self, connection, xid): + connection.connection.begin(*xid) + connection.connection.info["cx_oracle_xid"] = xid + + def do_prepare_twophase(self, connection, xid): + result = connection.connection.prepare() + connection.info["cx_oracle_prepared"] = result + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + self.do_rollback(connection.connection) + # TODO: need to end XA state here + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if not is_prepared: + self.do_commit(connection.connection) + else: + if recover: + raise NotImplementedError( + "2pc recovery not implemented for cx_Oracle" + ) + oci_prepared = connection.info["cx_oracle_prepared"] + if oci_prepared: + self.do_commit(connection.connection) + # TODO: need to end XA state here + + def do_set_input_sizes(self, cursor, list_of_tuples, context): + if self.positional: + # not usually used, here to support if someone is modifying + # the dialect to use positional style + cursor.setinputsizes( + *[dbtype for key, dbtype, sqltype in list_of_tuples] + ) + else: + collection = ( + (key, dbtype) + for key, dbtype, sqltype in list_of_tuples + if dbtype + ) + + cursor.setinputsizes(**{key: dbtype for key, dbtype in collection}) + + def do_recover_twophase(self, connection): + raise NotImplementedError( + "recover two phase query for cx_Oracle not implemented" + ) + + +dialect = OracleDialect_cx_oracle diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/dictionary.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/dictionary.py new file mode 100644 index 00000000..63479b9f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/dictionary.py @@ -0,0 +1,507 @@ +# dialects/oracle/dictionary.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from .types import DATE +from .types import LONG +from .types import NUMBER +from .types import RAW +from .types import VARCHAR2 +from ... import Column +from ... import MetaData +from ... import Table +from ... import table +from ...sql.sqltypes import CHAR + +# constants +DB_LINK_PLACEHOLDER = "__$sa_dblink$__" +# tables +dual = table("dual") +dictionary_meta = MetaData() + +# NOTE: all the dictionary_meta are aliases because oracle does not like +# using the full table@dblink for every column in query, and complains with +# ORA-00960: ambiguous column naming in select list +all_tables = Table( + "all_tables" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("tablespace_name", VARCHAR2(30)), + Column("cluster_name", VARCHAR2(128)), + Column("iot_name", VARCHAR2(128)), + Column("status", VARCHAR2(8)), + Column("pct_free", NUMBER), + Column("pct_used", NUMBER), + Column("ini_trans", NUMBER), + Column("max_trans", NUMBER), + Column("initial_extent", NUMBER), + Column("next_extent", NUMBER), + Column("min_extents", NUMBER), + Column("max_extents", NUMBER), + Column("pct_increase", NUMBER), + Column("freelists", NUMBER), + Column("freelist_groups", NUMBER), + Column("logging", VARCHAR2(3)), + Column("backed_up", VARCHAR2(1)), + Column("num_rows", NUMBER), + Column("blocks", NUMBER), + Column("empty_blocks", NUMBER), + Column("avg_space", NUMBER), + Column("chain_cnt", NUMBER), + Column("avg_row_len", NUMBER), + Column("avg_space_freelist_blocks", NUMBER), + Column("num_freelist_blocks", NUMBER), + Column("degree", VARCHAR2(10)), + Column("instances", VARCHAR2(10)), + Column("cache", VARCHAR2(5)), + Column("table_lock", VARCHAR2(8)), + Column("sample_size", NUMBER), + Column("last_analyzed", DATE), + Column("partitioned", VARCHAR2(3)), + Column("iot_type", VARCHAR2(12)), + Column("temporary", VARCHAR2(1)), + Column("secondary", VARCHAR2(1)), + Column("nested", VARCHAR2(3)), + Column("buffer_pool", VARCHAR2(7)), + Column("flash_cache", VARCHAR2(7)), + Column("cell_flash_cache", VARCHAR2(7)), + Column("row_movement", VARCHAR2(8)), + Column("global_stats", VARCHAR2(3)), + Column("user_stats", VARCHAR2(3)), + Column("duration", VARCHAR2(15)), + Column("skip_corrupt", VARCHAR2(8)), + Column("monitoring", VARCHAR2(3)), + Column("cluster_owner", VARCHAR2(128)), + Column("dependencies", VARCHAR2(8)), + Column("compression", VARCHAR2(8)), + Column("compress_for", VARCHAR2(30)), + Column("dropped", VARCHAR2(3)), + Column("read_only", VARCHAR2(3)), + Column("segment_created", VARCHAR2(3)), + Column("result_cache", VARCHAR2(7)), + Column("clustering", VARCHAR2(3)), + Column("activity_tracking", VARCHAR2(23)), + Column("dml_timestamp", VARCHAR2(25)), + Column("has_identity", VARCHAR2(3)), + Column("container_data", VARCHAR2(3)), + Column("inmemory", VARCHAR2(8)), + Column("inmemory_priority", VARCHAR2(8)), + Column("inmemory_distribute", VARCHAR2(15)), + Column("inmemory_compression", VARCHAR2(17)), + Column("inmemory_duplicate", VARCHAR2(13)), + Column("default_collation", VARCHAR2(100)), + Column("duplicated", VARCHAR2(1)), + Column("sharded", VARCHAR2(1)), + Column("externally_sharded", VARCHAR2(1)), + Column("externally_duplicated", VARCHAR2(1)), + Column("external", VARCHAR2(3)), + Column("hybrid", VARCHAR2(3)), + Column("cellmemory", VARCHAR2(24)), + Column("containers_default", VARCHAR2(3)), + Column("container_map", VARCHAR2(3)), + Column("extended_data_link", VARCHAR2(3)), + Column("extended_data_link_map", VARCHAR2(3)), + Column("inmemory_service", VARCHAR2(12)), + Column("inmemory_service_name", VARCHAR2(1000)), + Column("container_map_object", VARCHAR2(3)), + Column("memoptimize_read", VARCHAR2(8)), + Column("memoptimize_write", VARCHAR2(8)), + Column("has_sensitive_column", VARCHAR2(3)), + Column("admit_null", VARCHAR2(3)), + Column("data_link_dml_enabled", VARCHAR2(3)), + Column("logical_replication", VARCHAR2(8)), +).alias("a_tables") + +all_views = Table( + "all_views" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("view_name", VARCHAR2(128), nullable=False), + Column("text_length", NUMBER), + Column("text", LONG), + Column("text_vc", VARCHAR2(4000)), + Column("type_text_length", NUMBER), + Column("type_text", VARCHAR2(4000)), + Column("oid_text_length", NUMBER), + Column("oid_text", VARCHAR2(4000)), + Column("view_type_owner", VARCHAR2(128)), + Column("view_type", VARCHAR2(128)), + Column("superview_name", VARCHAR2(128)), + Column("editioning_view", VARCHAR2(1)), + Column("read_only", VARCHAR2(1)), + Column("container_data", VARCHAR2(1)), + Column("bequeath", VARCHAR2(12)), + Column("origin_con_id", VARCHAR2(256)), + Column("default_collation", VARCHAR2(100)), + Column("containers_default", VARCHAR2(3)), + Column("container_map", VARCHAR2(3)), + Column("extended_data_link", VARCHAR2(3)), + Column("extended_data_link_map", VARCHAR2(3)), + Column("has_sensitive_column", VARCHAR2(3)), + Column("admit_null", VARCHAR2(3)), + Column("pdb_local_only", VARCHAR2(3)), +).alias("a_views") + +all_sequences = Table( + "all_sequences" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("sequence_owner", VARCHAR2(128), nullable=False), + Column("sequence_name", VARCHAR2(128), nullable=False), + Column("min_value", NUMBER), + Column("max_value", NUMBER), + Column("increment_by", NUMBER, nullable=False), + Column("cycle_flag", VARCHAR2(1)), + Column("order_flag", VARCHAR2(1)), + Column("cache_size", NUMBER, nullable=False), + Column("last_number", NUMBER, nullable=False), + Column("scale_flag", VARCHAR2(1)), + Column("extend_flag", VARCHAR2(1)), + Column("sharded_flag", VARCHAR2(1)), + Column("session_flag", VARCHAR2(1)), + Column("keep_value", VARCHAR2(1)), +).alias("a_sequences") + +all_users = Table( + "all_users" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("username", VARCHAR2(128), nullable=False), + Column("user_id", NUMBER, nullable=False), + Column("created", DATE, nullable=False), + Column("common", VARCHAR2(3)), + Column("oracle_maintained", VARCHAR2(1)), + Column("inherited", VARCHAR2(3)), + Column("default_collation", VARCHAR2(100)), + Column("implicit", VARCHAR2(3)), + Column("all_shard", VARCHAR2(3)), + Column("external_shard", VARCHAR2(3)), +).alias("a_users") + +all_mviews = Table( + "all_mviews" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("mview_name", VARCHAR2(128), nullable=False), + Column("container_name", VARCHAR2(128), nullable=False), + Column("query", LONG), + Column("query_len", NUMBER(38)), + Column("updatable", VARCHAR2(1)), + Column("update_log", VARCHAR2(128)), + Column("master_rollback_seg", VARCHAR2(128)), + Column("master_link", VARCHAR2(128)), + Column("rewrite_enabled", VARCHAR2(1)), + Column("rewrite_capability", VARCHAR2(9)), + Column("refresh_mode", VARCHAR2(6)), + Column("refresh_method", VARCHAR2(8)), + Column("build_mode", VARCHAR2(9)), + Column("fast_refreshable", VARCHAR2(18)), + Column("last_refresh_type", VARCHAR2(8)), + Column("last_refresh_date", DATE), + Column("last_refresh_end_time", DATE), + Column("staleness", VARCHAR2(19)), + Column("after_fast_refresh", VARCHAR2(19)), + Column("unknown_prebuilt", VARCHAR2(1)), + Column("unknown_plsql_func", VARCHAR2(1)), + Column("unknown_external_table", VARCHAR2(1)), + Column("unknown_consider_fresh", VARCHAR2(1)), + Column("unknown_import", VARCHAR2(1)), + Column("unknown_trusted_fd", VARCHAR2(1)), + Column("compile_state", VARCHAR2(19)), + Column("use_no_index", VARCHAR2(1)), + Column("stale_since", DATE), + Column("num_pct_tables", NUMBER), + Column("num_fresh_pct_regions", NUMBER), + Column("num_stale_pct_regions", NUMBER), + Column("segment_created", VARCHAR2(3)), + Column("evaluation_edition", VARCHAR2(128)), + Column("unusable_before", VARCHAR2(128)), + Column("unusable_beginning", VARCHAR2(128)), + Column("default_collation", VARCHAR2(100)), + Column("on_query_computation", VARCHAR2(1)), + Column("auto", VARCHAR2(3)), +).alias("a_mviews") + +all_tab_identity_cols = Table( + "all_tab_identity_cols" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("column_name", VARCHAR2(128), nullable=False), + Column("generation_type", VARCHAR2(10)), + Column("sequence_name", VARCHAR2(128), nullable=False), + Column("identity_options", VARCHAR2(298)), +).alias("a_tab_identity_cols") + +all_tab_cols = Table( + "all_tab_cols" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("column_name", VARCHAR2(128), nullable=False), + Column("data_type", VARCHAR2(128)), + Column("data_type_mod", VARCHAR2(3)), + Column("data_type_owner", VARCHAR2(128)), + Column("data_length", NUMBER, nullable=False), + Column("data_precision", NUMBER), + Column("data_scale", NUMBER), + Column("nullable", VARCHAR2(1)), + Column("column_id", NUMBER), + Column("default_length", NUMBER), + Column("data_default", LONG), + Column("num_distinct", NUMBER), + Column("low_value", RAW(1000)), + Column("high_value", RAW(1000)), + Column("density", NUMBER), + Column("num_nulls", NUMBER), + Column("num_buckets", NUMBER), + Column("last_analyzed", DATE), + Column("sample_size", NUMBER), + Column("character_set_name", VARCHAR2(44)), + Column("char_col_decl_length", NUMBER), + Column("global_stats", VARCHAR2(3)), + Column("user_stats", VARCHAR2(3)), + Column("avg_col_len", NUMBER), + Column("char_length", NUMBER), + Column("char_used", VARCHAR2(1)), + Column("v80_fmt_image", VARCHAR2(3)), + Column("data_upgraded", VARCHAR2(3)), + Column("hidden_column", VARCHAR2(3)), + Column("virtual_column", VARCHAR2(3)), + Column("segment_column_id", NUMBER), + Column("internal_column_id", NUMBER, nullable=False), + Column("histogram", VARCHAR2(15)), + Column("qualified_col_name", VARCHAR2(4000)), + Column("user_generated", VARCHAR2(3)), + Column("default_on_null", VARCHAR2(3)), + Column("identity_column", VARCHAR2(3)), + Column("evaluation_edition", VARCHAR2(128)), + Column("unusable_before", VARCHAR2(128)), + Column("unusable_beginning", VARCHAR2(128)), + Column("collation", VARCHAR2(100)), + Column("collated_column_id", NUMBER), +).alias("a_tab_cols") + +all_tab_comments = Table( + "all_tab_comments" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("table_type", VARCHAR2(11)), + Column("comments", VARCHAR2(4000)), + Column("origin_con_id", NUMBER), +).alias("a_tab_comments") + +all_col_comments = Table( + "all_col_comments" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("column_name", VARCHAR2(128), nullable=False), + Column("comments", VARCHAR2(4000)), + Column("origin_con_id", NUMBER), +).alias("a_col_comments") + +all_mview_comments = Table( + "all_mview_comments" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("mview_name", VARCHAR2(128), nullable=False), + Column("comments", VARCHAR2(4000)), +).alias("a_mview_comments") + +all_ind_columns = Table( + "all_ind_columns" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("index_owner", VARCHAR2(128), nullable=False), + Column("index_name", VARCHAR2(128), nullable=False), + Column("table_owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("column_name", VARCHAR2(4000)), + Column("column_position", NUMBER, nullable=False), + Column("column_length", NUMBER, nullable=False), + Column("char_length", NUMBER), + Column("descend", VARCHAR2(4)), + Column("collated_column_id", NUMBER), +).alias("a_ind_columns") + +all_indexes = Table( + "all_indexes" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("index_name", VARCHAR2(128), nullable=False), + Column("index_type", VARCHAR2(27)), + Column("table_owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("table_type", CHAR(11)), + Column("uniqueness", VARCHAR2(9)), + Column("compression", VARCHAR2(13)), + Column("prefix_length", NUMBER), + Column("tablespace_name", VARCHAR2(30)), + Column("ini_trans", NUMBER), + Column("max_trans", NUMBER), + Column("initial_extent", NUMBER), + Column("next_extent", NUMBER), + Column("min_extents", NUMBER), + Column("max_extents", NUMBER), + Column("pct_increase", NUMBER), + Column("pct_threshold", NUMBER), + Column("include_column", NUMBER), + Column("freelists", NUMBER), + Column("freelist_groups", NUMBER), + Column("pct_free", NUMBER), + Column("logging", VARCHAR2(3)), + Column("blevel", NUMBER), + Column("leaf_blocks", NUMBER), + Column("distinct_keys", NUMBER), + Column("avg_leaf_blocks_per_key", NUMBER), + Column("avg_data_blocks_per_key", NUMBER), + Column("clustering_factor", NUMBER), + Column("status", VARCHAR2(8)), + Column("num_rows", NUMBER), + Column("sample_size", NUMBER), + Column("last_analyzed", DATE), + Column("degree", VARCHAR2(40)), + Column("instances", VARCHAR2(40)), + Column("partitioned", VARCHAR2(3)), + Column("temporary", VARCHAR2(1)), + Column("generated", VARCHAR2(1)), + Column("secondary", VARCHAR2(1)), + Column("buffer_pool", VARCHAR2(7)), + Column("flash_cache", VARCHAR2(7)), + Column("cell_flash_cache", VARCHAR2(7)), + Column("user_stats", VARCHAR2(3)), + Column("duration", VARCHAR2(15)), + Column("pct_direct_access", NUMBER), + Column("ityp_owner", VARCHAR2(128)), + Column("ityp_name", VARCHAR2(128)), + Column("parameters", VARCHAR2(1000)), + Column("global_stats", VARCHAR2(3)), + Column("domidx_status", VARCHAR2(12)), + Column("domidx_opstatus", VARCHAR2(6)), + Column("funcidx_status", VARCHAR2(8)), + Column("join_index", VARCHAR2(3)), + Column("iot_redundant_pkey_elim", VARCHAR2(3)), + Column("dropped", VARCHAR2(3)), + Column("visibility", VARCHAR2(9)), + Column("domidx_management", VARCHAR2(14)), + Column("segment_created", VARCHAR2(3)), + Column("orphaned_entries", VARCHAR2(3)), + Column("indexing", VARCHAR2(7)), + Column("auto", VARCHAR2(3)), +).alias("a_indexes") + +all_ind_expressions = Table( + "all_ind_expressions" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("index_owner", VARCHAR2(128), nullable=False), + Column("index_name", VARCHAR2(128), nullable=False), + Column("table_owner", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("column_expression", LONG), + Column("column_position", NUMBER, nullable=False), +).alias("a_ind_expressions") + +all_constraints = Table( + "all_constraints" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128)), + Column("constraint_name", VARCHAR2(128)), + Column("constraint_type", VARCHAR2(1)), + Column("table_name", VARCHAR2(128)), + Column("search_condition", LONG), + Column("search_condition_vc", VARCHAR2(4000)), + Column("r_owner", VARCHAR2(128)), + Column("r_constraint_name", VARCHAR2(128)), + Column("delete_rule", VARCHAR2(9)), + Column("status", VARCHAR2(8)), + Column("deferrable", VARCHAR2(14)), + Column("deferred", VARCHAR2(9)), + Column("validated", VARCHAR2(13)), + Column("generated", VARCHAR2(14)), + Column("bad", VARCHAR2(3)), + Column("rely", VARCHAR2(4)), + Column("last_change", DATE), + Column("index_owner", VARCHAR2(128)), + Column("index_name", VARCHAR2(128)), + Column("invalid", VARCHAR2(7)), + Column("view_related", VARCHAR2(14)), + Column("origin_con_id", VARCHAR2(256)), +).alias("a_constraints") + +all_cons_columns = Table( + "all_cons_columns" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("constraint_name", VARCHAR2(128), nullable=False), + Column("table_name", VARCHAR2(128), nullable=False), + Column("column_name", VARCHAR2(4000)), + Column("position", NUMBER), +).alias("a_cons_columns") + +# TODO figure out if it's still relevant, since there is no mention from here +# https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/ALL_DB_LINKS.html +# original note: +# using user_db_links here since all_db_links appears +# to have more restricted permissions. +# https://docs.oracle.com/cd/B28359_01/server.111/b28310/ds_admin005.htm +# will need to hear from more users if we are doing +# the right thing here. See [ticket:2619] +all_db_links = Table( + "all_db_links" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("db_link", VARCHAR2(128), nullable=False), + Column("username", VARCHAR2(128)), + Column("host", VARCHAR2(2000)), + Column("created", DATE, nullable=False), + Column("hidden", VARCHAR2(3)), + Column("shard_internal", VARCHAR2(3)), + Column("valid", VARCHAR2(3)), + Column("intra_cdb", VARCHAR2(3)), +).alias("a_db_links") + +all_synonyms = Table( + "all_synonyms" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128)), + Column("synonym_name", VARCHAR2(128)), + Column("table_owner", VARCHAR2(128)), + Column("table_name", VARCHAR2(128)), + Column("db_link", VARCHAR2(128)), + Column("origin_con_id", VARCHAR2(256)), +).alias("a_synonyms") + +all_objects = Table( + "all_objects" + DB_LINK_PLACEHOLDER, + dictionary_meta, + Column("owner", VARCHAR2(128), nullable=False), + Column("object_name", VARCHAR2(128), nullable=False), + Column("subobject_name", VARCHAR2(128)), + Column("object_id", NUMBER, nullable=False), + Column("data_object_id", NUMBER), + Column("object_type", VARCHAR2(23)), + Column("created", DATE, nullable=False), + Column("last_ddl_time", DATE, nullable=False), + Column("timestamp", VARCHAR2(19)), + Column("status", VARCHAR2(7)), + Column("temporary", VARCHAR2(1)), + Column("generated", VARCHAR2(1)), + Column("secondary", VARCHAR2(1)), + Column("namespace", NUMBER, nullable=False), + Column("edition_name", VARCHAR2(128)), + Column("sharing", VARCHAR2(13)), + Column("editionable", VARCHAR2(1)), + Column("oracle_maintained", VARCHAR2(1)), + Column("application", VARCHAR2(1)), + Column("default_collation", VARCHAR2(100)), + Column("duplicated", VARCHAR2(1)), + Column("sharded", VARCHAR2(1)), + Column("created_appid", NUMBER), + Column("created_vsnid", NUMBER), + Column("modified_appid", NUMBER), + Column("modified_vsnid", NUMBER), +).alias("a_objects") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/oracledb.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/oracledb.py new file mode 100644 index 00000000..0667ed76 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/oracledb.py @@ -0,0 +1,431 @@ +# dialects/oracle/oracledb.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: oracle+oracledb + :name: python-oracledb + :dbapi: oracledb + :connectstring: oracle+oracledb://user:pass@hostname:port[/dbname][?service_name=[&key=value&key=value...]] + :url: https://oracle.github.io/python-oracledb/ + +Description +----------- + +python-oracledb is released by Oracle to supersede the cx_Oracle driver. +It is fully compatible with cx_Oracle and features both a "thin" client +mode that requires no dependencies, as well as a "thick" mode that uses +the Oracle Client Interface in the same way as cx_Oracle. + +.. seealso:: + + :ref:`cx_oracle` - all of cx_Oracle's notes apply to the oracledb driver + as well, with the exception that oracledb supports two phase transactions. + +The SQLAlchemy ``oracledb`` dialect provides both a sync and an async +implementation under the same dialect name. The proper version is +selected depending on how the engine is created: + +* calling :func:`_sa.create_engine` with ``oracle+oracledb://...`` will + automatically select the sync version, e.g.:: + + from sqlalchemy import create_engine + sync_engine = create_engine("oracle+oracledb://scott:tiger@localhost/?service_name=XEPDB1") + +* calling :func:`_asyncio.create_async_engine` with + ``oracle+oracledb://...`` will automatically select the async version, + e.g.:: + + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("oracle+oracledb://scott:tiger@localhost/?service_name=XEPDB1") + +The asyncio version of the dialect may also be specified explicitly using the +``oracledb_async`` suffix, as:: + + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("oracle+oracledb_async://scott:tiger@localhost/?service_name=XEPDB1") + +.. versionadded:: 2.0.25 added support for the async version of oracledb. + +Thick mode support +------------------ + +By default the ``python-oracledb`` is started in thin mode, that does not +require oracle client libraries to be installed in the system. The +``python-oracledb`` driver also support a "thick" mode, that behaves +similarly to ``cx_oracle`` and requires that Oracle Client Interface (OCI) +is installed. + +To enable this mode, the user may call ``oracledb.init_oracle_client`` +manually, or by passing the parameter ``thick_mode=True`` to +:func:`_sa.create_engine`. To pass custom arguments to ``init_oracle_client``, +like the ``lib_dir`` path, a dict may be passed to this parameter, as in:: + + engine = sa.create_engine("oracle+oracledb://...", thick_mode={ + "lib_dir": "/path/to/oracle/client/lib", "driver_name": "my-app" + }) + +.. seealso:: + + https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.init_oracle_client + +Two Phase Transactions Supported +-------------------------------- + +Two phase transactions are fully supported under oracledb. Starting with +oracledb 2.3 two phase transactions are supported also in thin mode. APIs +for two phase transactions are provided at the Core level via +:meth:`_engine.Connection.begin_twophase` and :paramref:`_orm.Session.twophase` +for transparent ORM use. + +.. versionchanged:: 2.0.32 added support for two phase transactions + +.. versionadded:: 2.0.0 added support for oracledb driver. + +""" # noqa +from __future__ import annotations + +import collections +import re +from typing import Any +from typing import TYPE_CHECKING + +from . import cx_oracle as _cx_oracle +from ... import exc +from ... import pool +from ...connectors.asyncio import AsyncAdapt_dbapi_connection +from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor +from ...connectors.asyncio import AsyncAdaptFallback_dbapi_connection +from ...engine import default +from ...util import asbool +from ...util import await_fallback +from ...util import await_only + +if TYPE_CHECKING: + from oracledb import AsyncConnection + from oracledb import AsyncCursor + + +class OracleExecutionContext_oracledb( + _cx_oracle.OracleExecutionContext_cx_oracle +): + pass + + +class OracleDialect_oracledb(_cx_oracle.OracleDialect_cx_oracle): + supports_statement_cache = True + execution_ctx_cls = OracleExecutionContext_oracledb + + driver = "oracledb" + _min_version = (1,) + + def __init__( + self, + auto_convert_lobs=True, + coerce_to_decimal=True, + arraysize=None, + encoding_errors=None, + thick_mode=None, + **kwargs, + ): + super().__init__( + auto_convert_lobs, + coerce_to_decimal, + arraysize, + encoding_errors, + **kwargs, + ) + + if self.dbapi is not None and ( + thick_mode or isinstance(thick_mode, dict) + ): + kw = thick_mode if isinstance(thick_mode, dict) else {} + self.dbapi.init_oracle_client(**kw) + + @classmethod + def import_dbapi(cls): + import oracledb + + return oracledb + + @classmethod + def is_thin_mode(cls, connection): + return connection.connection.dbapi_connection.thin + + @classmethod + def get_async_dialect_cls(cls, url): + return OracleDialectAsync_oracledb + + def _load_version(self, dbapi_module): + version = (0, 0, 0) + if dbapi_module is not None: + m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", dbapi_module.version) + if m: + version = tuple( + int(x) for x in m.group(1, 2, 3) if x is not None + ) + self.oracledb_ver = version + if ( + self.oracledb_ver > (0, 0, 0) + and self.oracledb_ver < self._min_version + ): + raise exc.InvalidRequestError( + f"oracledb version {self._min_version} and above are supported" + ) + + def do_begin_twophase(self, connection, xid): + conn_xis = connection.connection.xid(*xid) + connection.connection.tpc_begin(conn_xis) + connection.connection.info["oracledb_xid"] = conn_xis + + def do_prepare_twophase(self, connection, xid): + should_commit = connection.connection.tpc_prepare() + connection.info["oracledb_should_commit"] = should_commit + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if recover: + conn_xid = connection.connection.xid(*xid) + else: + conn_xid = None + connection.connection.tpc_rollback(conn_xid) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + conn_xid = None + if not is_prepared: + should_commit = connection.connection.tpc_prepare() + elif recover: + conn_xid = connection.connection.xid(*xid) + should_commit = True + else: + should_commit = connection.info["oracledb_should_commit"] + if should_commit: + connection.connection.tpc_commit(conn_xid) + + def do_recover_twophase(self, connection): + return [ + # oracledb seems to return bytes + ( + fi, + gti.decode() if isinstance(gti, bytes) else gti, + bq.decode() if isinstance(bq, bytes) else bq, + ) + for fi, gti, bq in connection.connection.tpc_recover() + ] + + +class AsyncAdapt_oracledb_cursor(AsyncAdapt_dbapi_cursor): + _cursor: AsyncCursor + __slots__ = () + + @property + def outputtypehandler(self): + return self._cursor.outputtypehandler + + @outputtypehandler.setter + def outputtypehandler(self, value): + self._cursor.outputtypehandler = value + + def var(self, *args, **kwargs): + return self._cursor.var(*args, **kwargs) + + def close(self): + self._rows.clear() + self._cursor.close() + + def setinputsizes(self, *args: Any, **kwargs: Any) -> Any: + return self._cursor.setinputsizes(*args, **kwargs) + + def _aenter_cursor(self, cursor: AsyncCursor) -> AsyncCursor: + try: + return cursor.__enter__() + except Exception as error: + self._adapt_connection._handle_exception(error) + + async def _execute_async(self, operation, parameters): + # override to not use mutex, oracledb already has mutex + + if parameters is None: + result = await self._cursor.execute(operation) + else: + result = await self._cursor.execute(operation, parameters) + + if self._cursor.description and not self.server_side: + self._rows = collections.deque(await self._cursor.fetchall()) + return result + + async def _executemany_async( + self, + operation, + seq_of_parameters, + ): + # override to not use mutex, oracledb already has mutex + return await self._cursor.executemany(operation, seq_of_parameters) + + def __enter__(self): + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self.close() + + +class AsyncAdapt_oracledb_ss_cursor( + AsyncAdapt_dbapi_ss_cursor, AsyncAdapt_oracledb_cursor +): + __slots__ = () + + def close(self) -> None: + if self._cursor is not None: + self._cursor.close() + self._cursor = None # type: ignore + + +class AsyncAdapt_oracledb_connection(AsyncAdapt_dbapi_connection): + _connection: AsyncConnection + __slots__ = () + + thin = True + + _cursor_cls = AsyncAdapt_oracledb_cursor + _ss_cursor_cls = None + + @property + def autocommit(self): + return self._connection.autocommit + + @autocommit.setter + def autocommit(self, value): + self._connection.autocommit = value + + @property + def outputtypehandler(self): + return self._connection.outputtypehandler + + @outputtypehandler.setter + def outputtypehandler(self, value): + self._connection.outputtypehandler = value + + @property + def version(self): + return self._connection.version + + @property + def stmtcachesize(self): + return self._connection.stmtcachesize + + @stmtcachesize.setter + def stmtcachesize(self, value): + self._connection.stmtcachesize = value + + def cursor(self): + return AsyncAdapt_oracledb_cursor(self) + + def ss_cursor(self): + return AsyncAdapt_oracledb_ss_cursor(self) + + def xid(self, *args: Any, **kwargs: Any) -> Any: + return self._connection.xid(*args, **kwargs) + + def tpc_begin(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_begin(*args, **kwargs)) + + def tpc_commit(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_commit(*args, **kwargs)) + + def tpc_prepare(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_prepare(*args, **kwargs)) + + def tpc_recover(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_recover(*args, **kwargs)) + + def tpc_rollback(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_rollback(*args, **kwargs)) + + +class AsyncAdaptFallback_oracledb_connection( + AsyncAdaptFallback_dbapi_connection, AsyncAdapt_oracledb_connection +): + __slots__ = () + + +class OracledbAdaptDBAPI: + def __init__(self, oracledb) -> None: + self.oracledb = oracledb + + for k, v in self.oracledb.__dict__.items(): + if k != "connect": + self.__dict__[k] = v + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop("async_creator_fn", self.oracledb.connect_async) + + if asbool(async_fallback): + return AsyncAdaptFallback_oracledb_connection( + self, await_fallback(creator_fn(*arg, **kw)) + ) + + else: + return AsyncAdapt_oracledb_connection( + self, await_only(creator_fn(*arg, **kw)) + ) + + +class OracleExecutionContextAsync_oracledb(OracleExecutionContext_oracledb): + # restore default create cursor + create_cursor = default.DefaultExecutionContext.create_cursor + + def create_default_cursor(self): + # copy of OracleExecutionContext_cx_oracle.create_cursor + c = self._dbapi_connection.cursor() + if self.dialect.arraysize: + c.arraysize = self.dialect.arraysize + + return c + + def create_server_side_cursor(self): + c = self._dbapi_connection.ss_cursor() + if self.dialect.arraysize: + c.arraysize = self.dialect.arraysize + + return c + + +class OracleDialectAsync_oracledb(OracleDialect_oracledb): + is_async = True + supports_server_side_cursors = True + supports_statement_cache = True + execution_ctx_cls = OracleExecutionContextAsync_oracledb + + _min_version = (2,) + + # thick_mode mode is not supported by asyncio, oracledb will raise + @classmethod + def import_dbapi(cls): + import oracledb + + return OracledbAdaptDBAPI(oracledb) + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def get_driver_connection(self, connection): + return connection._connection + + +dialect = OracleDialect_oracledb +dialect_async = OracleDialectAsync_oracledb diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/provision.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/provision.py new file mode 100644 index 00000000..b33c1525 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/provision.py @@ -0,0 +1,220 @@ +# dialects/oracle/provision.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from ... import create_engine +from ... import exc +from ... import inspect +from ...engine import url as sa_url +from ...testing.provision import configure_follower +from ...testing.provision import create_db +from ...testing.provision import drop_all_schema_objects_post_tables +from ...testing.provision import drop_all_schema_objects_pre_tables +from ...testing.provision import drop_db +from ...testing.provision import follower_url_from_main +from ...testing.provision import log +from ...testing.provision import post_configure_engine +from ...testing.provision import run_reap_dbs +from ...testing.provision import set_default_schema_on_connection +from ...testing.provision import stop_test_class_outside_fixtures +from ...testing.provision import temp_table_keyword_args +from ...testing.provision import update_db_opts + + +@create_db.for_db("oracle") +def _oracle_create_db(cfg, eng, ident): + # NOTE: make sure you've run "ALTER DATABASE default tablespace users" or + # similar, so that the default tablespace is not "system"; reflection will + # fail otherwise + with eng.begin() as conn: + conn.exec_driver_sql("create user %s identified by xe" % ident) + conn.exec_driver_sql("create user %s_ts1 identified by xe" % ident) + conn.exec_driver_sql("create user %s_ts2 identified by xe" % ident) + conn.exec_driver_sql("grant dba to %s" % (ident,)) + conn.exec_driver_sql("grant unlimited tablespace to %s" % ident) + conn.exec_driver_sql("grant unlimited tablespace to %s_ts1" % ident) + conn.exec_driver_sql("grant unlimited tablespace to %s_ts2" % ident) + # these are needed to create materialized views + conn.exec_driver_sql("grant create table to %s" % ident) + conn.exec_driver_sql("grant create table to %s_ts1" % ident) + conn.exec_driver_sql("grant create table to %s_ts2" % ident) + + +@configure_follower.for_db("oracle") +def _oracle_configure_follower(config, ident): + config.test_schema = "%s_ts1" % ident + config.test_schema_2 = "%s_ts2" % ident + + +def _ora_drop_ignore(conn, dbname): + try: + conn.exec_driver_sql("drop user %s cascade" % dbname) + log.info("Reaped db: %s", dbname) + return True + except exc.DatabaseError as err: + log.warning("couldn't drop db: %s", err) + return False + + +@drop_all_schema_objects_pre_tables.for_db("oracle") +def _ora_drop_all_schema_objects_pre_tables(cfg, eng): + _purge_recyclebin(eng) + _purge_recyclebin(eng, cfg.test_schema) + + +@drop_all_schema_objects_post_tables.for_db("oracle") +def _ora_drop_all_schema_objects_post_tables(cfg, eng): + with eng.begin() as conn: + for syn in conn.dialect._get_synonyms(conn, None, None, None): + conn.exec_driver_sql(f"drop synonym {syn['synonym_name']}") + + for syn in conn.dialect._get_synonyms( + conn, cfg.test_schema, None, None + ): + conn.exec_driver_sql( + f"drop synonym {cfg.test_schema}.{syn['synonym_name']}" + ) + + for tmp_table in inspect(conn).get_temp_table_names(): + conn.exec_driver_sql(f"drop table {tmp_table}") + + +@drop_db.for_db("oracle") +def _oracle_drop_db(cfg, eng, ident): + with eng.begin() as conn: + # cx_Oracle seems to occasionally leak open connections when a large + # suite it run, even if we confirm we have zero references to + # connection objects. + # while there is a "kill session" command in Oracle, + # it unfortunately does not release the connection sufficiently. + _ora_drop_ignore(conn, ident) + _ora_drop_ignore(conn, "%s_ts1" % ident) + _ora_drop_ignore(conn, "%s_ts2" % ident) + + +@stop_test_class_outside_fixtures.for_db("oracle") +def _ora_stop_test_class_outside_fixtures(config, db, cls): + try: + _purge_recyclebin(db) + except exc.DatabaseError as err: + log.warning("purge recyclebin command failed: %s", err) + + # clear statement cache on all connections that were used + # https://github.com/oracle/python-cx_Oracle/issues/519 + + for cx_oracle_conn in _all_conns: + try: + sc = cx_oracle_conn.stmtcachesize + except db.dialect.dbapi.InterfaceError: + # connection closed + pass + else: + cx_oracle_conn.stmtcachesize = 0 + cx_oracle_conn.stmtcachesize = sc + _all_conns.clear() + + +def _purge_recyclebin(eng, schema=None): + with eng.begin() as conn: + if schema is None: + # run magic command to get rid of identity sequences + # https://floo.bar/2019/11/29/drop-the-underlying-sequence-of-an-identity-column/ # noqa: E501 + conn.exec_driver_sql("purge recyclebin") + else: + # per user: https://community.oracle.com/tech/developers/discussion/2255402/how-to-clear-dba-recyclebin-for-a-particular-user # noqa: E501 + for owner, object_name, type_ in conn.exec_driver_sql( + "select owner, object_name,type from " + "dba_recyclebin where owner=:schema and type='TABLE'", + {"schema": conn.dialect.denormalize_name(schema)}, + ).all(): + conn.exec_driver_sql(f'purge {type_} {owner}."{object_name}"') + + +_all_conns = set() + + +@post_configure_engine.for_db("oracle") +def _oracle_post_configure_engine(url, engine, follower_ident): + from sqlalchemy import event + + @event.listens_for(engine, "checkout") + def checkout(dbapi_con, con_record, con_proxy): + _all_conns.add(dbapi_con) + + @event.listens_for(engine, "checkin") + def checkin(dbapi_connection, connection_record): + # work around cx_Oracle issue: + # https://github.com/oracle/python-cx_Oracle/issues/530 + # invalidate oracle connections that had 2pc set up + if "cx_oracle_xid" in connection_record.info: + connection_record.invalidate() + + +@run_reap_dbs.for_db("oracle") +def _reap_oracle_dbs(url, idents): + log.info("db reaper connecting to %r", url) + eng = create_engine(url) + with eng.begin() as conn: + log.info("identifiers in file: %s", ", ".join(idents)) + + to_reap = conn.exec_driver_sql( + "select u.username from all_users u where username " + "like 'TEST_%' and not exists (select username " + "from v$session where username=u.username)" + ) + all_names = {username.lower() for (username,) in to_reap} + to_drop = set() + for name in all_names: + if name.endswith("_ts1") or name.endswith("_ts2"): + continue + elif name in idents: + to_drop.add(name) + if "%s_ts1" % name in all_names: + to_drop.add("%s_ts1" % name) + if "%s_ts2" % name in all_names: + to_drop.add("%s_ts2" % name) + + dropped = total = 0 + for total, username in enumerate(to_drop, 1): + if _ora_drop_ignore(conn, username): + dropped += 1 + log.info( + "Dropped %d out of %d stale databases detected", dropped, total + ) + + +@follower_url_from_main.for_db("oracle") +def _oracle_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + return url.set(username=ident, password="xe") + + +@temp_table_keyword_args.for_db("oracle") +def _oracle_temp_table_keyword_args(cfg, eng): + return { + "prefixes": ["GLOBAL TEMPORARY"], + "oracle_on_commit": "PRESERVE ROWS", + } + + +@set_default_schema_on_connection.for_db("oracle") +def _oracle_set_default_schema_on_connection( + cfg, dbapi_connection, schema_name +): + cursor = dbapi_connection.cursor() + cursor.execute("ALTER SESSION SET CURRENT_SCHEMA=%s" % schema_name) + cursor.close() + + +@update_db_opts.for_db("oracle") +def _update_db_opts(db_url, db_opts, options): + """Set database options (db_opts) for a test database that we created.""" + if ( + options.oracledb_thick_mode + and sa_url.make_url(db_url).get_driver_name() == "oracledb" + ): + db_opts["thick_mode"] = True diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/types.py new file mode 100644 index 00000000..36caaa05 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/oracle/types.py @@ -0,0 +1,287 @@ +# dialects/oracle/types.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from __future__ import annotations + +import datetime as dt +from typing import Optional +from typing import Type +from typing import TYPE_CHECKING + +from ... import exc +from ...sql import sqltypes +from ...types import NVARCHAR +from ...types import VARCHAR + +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql.type_api import _LiteralProcessorType + + +class RAW(sqltypes._Binary): + __visit_name__ = "RAW" + + +OracleRaw = RAW + + +class NCLOB(sqltypes.Text): + __visit_name__ = "NCLOB" + + +class VARCHAR2(VARCHAR): + __visit_name__ = "VARCHAR2" + + +NVARCHAR2 = NVARCHAR + + +class NUMBER(sqltypes.Numeric, sqltypes.Integer): + __visit_name__ = "NUMBER" + + def __init__(self, precision=None, scale=None, asdecimal=None): + if asdecimal is None: + asdecimal = bool(scale and scale > 0) + + super().__init__(precision=precision, scale=scale, asdecimal=asdecimal) + + def adapt(self, impltype): + ret = super().adapt(impltype) + # leave a hint for the DBAPI handler + ret._is_oracle_number = True + return ret + + @property + def _type_affinity(self): + if bool(self.scale and self.scale > 0): + return sqltypes.Numeric + else: + return sqltypes.Integer + + +class FLOAT(sqltypes.FLOAT): + """Oracle FLOAT. + + This is the same as :class:`_sqltypes.FLOAT` except that + an Oracle-specific :paramref:`_oracle.FLOAT.binary_precision` + parameter is accepted, and + the :paramref:`_sqltypes.Float.precision` parameter is not accepted. + + Oracle FLOAT types indicate precision in terms of "binary precision", which + defaults to 126. For a REAL type, the value is 63. This parameter does not + cleanly map to a specific number of decimal places but is roughly + equivalent to the desired number of decimal places divided by 0.3103. + + .. versionadded:: 2.0 + + """ + + __visit_name__ = "FLOAT" + + def __init__( + self, + binary_precision=None, + asdecimal=False, + decimal_return_scale=None, + ): + r""" + Construct a FLOAT + + :param binary_precision: Oracle binary precision value to be rendered + in DDL. This may be approximated to the number of decimal characters + using the formula "decimal precision = 0.30103 * binary precision". + The default value used by Oracle for FLOAT / DOUBLE PRECISION is 126. + + :param asdecimal: See :paramref:`_sqltypes.Float.asdecimal` + + :param decimal_return_scale: See + :paramref:`_sqltypes.Float.decimal_return_scale` + + """ + super().__init__( + asdecimal=asdecimal, decimal_return_scale=decimal_return_scale + ) + self.binary_precision = binary_precision + + +class BINARY_DOUBLE(sqltypes.Double): + __visit_name__ = "BINARY_DOUBLE" + + +class BINARY_FLOAT(sqltypes.Float): + __visit_name__ = "BINARY_FLOAT" + + +class BFILE(sqltypes.LargeBinary): + __visit_name__ = "BFILE" + + +class LONG(sqltypes.Text): + __visit_name__ = "LONG" + + +class _OracleDateLiteralRender: + def _literal_processor_datetime(self, dialect): + def process(value): + if getattr(value, "microsecond", None): + value = ( + f"""TO_TIMESTAMP""" + f"""('{value.isoformat().replace("T", " ")}', """ + """'YYYY-MM-DD HH24:MI:SS.FF')""" + ) + else: + value = ( + f"""TO_DATE""" + f"""('{value.isoformat().replace("T", " ")}', """ + """'YYYY-MM-DD HH24:MI:SS')""" + ) + return value + + return process + + def _literal_processor_date(self, dialect): + def process(value): + if getattr(value, "microsecond", None): + value = ( + f"""TO_TIMESTAMP""" + f"""('{value.isoformat().split("T")[0]}', """ + """'YYYY-MM-DD')""" + ) + else: + value = ( + f"""TO_DATE""" + f"""('{value.isoformat().split("T")[0]}', """ + """'YYYY-MM-DD')""" + ) + return value + + return process + + +class DATE(_OracleDateLiteralRender, sqltypes.DateTime): + """Provide the oracle DATE type. + + This type has no special Python behavior, except that it subclasses + :class:`_types.DateTime`; this is to suit the fact that the Oracle + ``DATE`` type supports a time value. + + """ + + __visit_name__ = "DATE" + + def literal_processor(self, dialect): + return self._literal_processor_datetime(dialect) + + def _compare_type_affinity(self, other): + return other._type_affinity in (sqltypes.DateTime, sqltypes.Date) + + +class _OracleDate(_OracleDateLiteralRender, sqltypes.Date): + def literal_processor(self, dialect): + return self._literal_processor_date(dialect) + + +class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval): + __visit_name__ = "INTERVAL" + + def __init__(self, day_precision=None, second_precision=None): + """Construct an INTERVAL. + + Note that only DAY TO SECOND intervals are currently supported. + This is due to a lack of support for YEAR TO MONTH intervals + within available DBAPIs. + + :param day_precision: the day precision value. this is the number of + digits to store for the day field. Defaults to "2" + :param second_precision: the second precision value. this is the + number of digits to store for the fractional seconds field. + Defaults to "6". + + """ + self.day_precision = day_precision + self.second_precision = second_precision + + @classmethod + def _adapt_from_generic_interval(cls, interval): + return INTERVAL( + day_precision=interval.day_precision, + second_precision=interval.second_precision, + ) + + @classmethod + def adapt_emulated_to_native( + cls, interval: sqltypes.Interval, **kw # type: ignore[override] + ): + return INTERVAL( + day_precision=interval.day_precision, + second_precision=interval.second_precision, + ) + + @property + def _type_affinity(self): + return sqltypes.Interval + + def as_generic(self, allow_nulltype=False): + return sqltypes.Interval( + native=True, + second_precision=self.second_precision, + day_precision=self.day_precision, + ) + + @property + def python_type(self) -> Type[dt.timedelta]: + return dt.timedelta + + def literal_processor( + self, dialect: Dialect + ) -> Optional[_LiteralProcessorType[dt.timedelta]]: + def process(value: dt.timedelta) -> str: + return f"NUMTODSINTERVAL({value.total_seconds()}, 'SECOND')" + + return process + + +class TIMESTAMP(sqltypes.TIMESTAMP): + """Oracle implementation of ``TIMESTAMP``, which supports additional + Oracle-specific modes + + .. versionadded:: 2.0 + + """ + + def __init__(self, timezone: bool = False, local_timezone: bool = False): + """Construct a new :class:`_oracle.TIMESTAMP`. + + :param timezone: boolean. Indicates that the TIMESTAMP type should + use Oracle's ``TIMESTAMP WITH TIME ZONE`` datatype. + + :param local_timezone: boolean. Indicates that the TIMESTAMP type + should use Oracle's ``TIMESTAMP WITH LOCAL TIME ZONE`` datatype. + + + """ + if timezone and local_timezone: + raise exc.ArgumentError( + "timezone and local_timezone are mutually exclusive" + ) + super().__init__(timezone=timezone) + self.local_timezone = local_timezone + + +class ROWID(sqltypes.TypeEngine): + """Oracle ROWID type. + + When used in a cast() or similar, generates ROWID. + + """ + + __visit_name__ = "ROWID" + + +class _OracleBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): + return dbapi.NUMBER diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__init__.py new file mode 100644 index 00000000..325ea886 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__init__.py @@ -0,0 +1,167 @@ +# dialects/postgresql/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from types import ModuleType + +from . import array as arraylib # noqa # keep above base and other dialects +from . import asyncpg # noqa +from . import base +from . import pg8000 # noqa +from . import psycopg # noqa +from . import psycopg2 # noqa +from . import psycopg2cffi # noqa +from .array import All +from .array import Any +from .array import ARRAY +from .array import array +from .base import BIGINT +from .base import BOOLEAN +from .base import CHAR +from .base import DATE +from .base import DOMAIN +from .base import DOUBLE_PRECISION +from .base import FLOAT +from .base import INTEGER +from .base import NUMERIC +from .base import REAL +from .base import SMALLINT +from .base import TEXT +from .base import UUID +from .base import VARCHAR +from .dml import Insert +from .dml import insert +from .ext import aggregate_order_by +from .ext import array_agg +from .ext import ExcludeConstraint +from .ext import phraseto_tsquery +from .ext import plainto_tsquery +from .ext import to_tsquery +from .ext import to_tsvector +from .ext import ts_headline +from .ext import websearch_to_tsquery +from .hstore import HSTORE +from .hstore import hstore +from .json import JSON +from .json import JSONB +from .json import JSONPATH +from .named_types import CreateDomainType +from .named_types import CreateEnumType +from .named_types import DropDomainType +from .named_types import DropEnumType +from .named_types import ENUM +from .named_types import NamedType +from .ranges import AbstractMultiRange +from .ranges import AbstractRange +from .ranges import AbstractSingleRange +from .ranges import DATEMULTIRANGE +from .ranges import DATERANGE +from .ranges import INT4MULTIRANGE +from .ranges import INT4RANGE +from .ranges import INT8MULTIRANGE +from .ranges import INT8RANGE +from .ranges import MultiRange +from .ranges import NUMMULTIRANGE +from .ranges import NUMRANGE +from .ranges import Range +from .ranges import TSMULTIRANGE +from .ranges import TSRANGE +from .ranges import TSTZMULTIRANGE +from .ranges import TSTZRANGE +from .types import BIT +from .types import BYTEA +from .types import CIDR +from .types import CITEXT +from .types import INET +from .types import INTERVAL +from .types import MACADDR +from .types import MACADDR8 +from .types import MONEY +from .types import OID +from .types import REGCLASS +from .types import REGCONFIG +from .types import TIME +from .types import TIMESTAMP +from .types import TSQUERY +from .types import TSVECTOR + + +# Alias psycopg also as psycopg_async +psycopg_async = type( + "psycopg_async", (ModuleType,), {"dialect": psycopg.dialect_async} +) + +base.dialect = dialect = psycopg2.dialect + + +__all__ = ( + "INTEGER", + "BIGINT", + "SMALLINT", + "VARCHAR", + "CHAR", + "TEXT", + "NUMERIC", + "FLOAT", + "REAL", + "INET", + "CIDR", + "CITEXT", + "UUID", + "BIT", + "MACADDR", + "MACADDR8", + "MONEY", + "OID", + "REGCLASS", + "REGCONFIG", + "TSQUERY", + "TSVECTOR", + "DOUBLE_PRECISION", + "TIMESTAMP", + "TIME", + "DATE", + "BYTEA", + "BOOLEAN", + "INTERVAL", + "ARRAY", + "ENUM", + "DOMAIN", + "dialect", + "array", + "HSTORE", + "hstore", + "INT4RANGE", + "INT8RANGE", + "NUMRANGE", + "DATERANGE", + "INT4MULTIRANGE", + "INT8MULTIRANGE", + "NUMMULTIRANGE", + "DATEMULTIRANGE", + "TSVECTOR", + "TSRANGE", + "TSTZRANGE", + "TSMULTIRANGE", + "TSTZMULTIRANGE", + "JSON", + "JSONB", + "JSONPATH", + "Any", + "All", + "DropEnumType", + "DropDomainType", + "CreateDomainType", + "NamedType", + "CreateEnumType", + "ExcludeConstraint", + "Range", + "aggregate_order_by", + "array_agg", + "insert", + "Insert", +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..19321a8b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc new file mode 100644 index 00000000..eae2dd43 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc new file mode 100644 index 00000000..868cf097 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc new file mode 100644 index 00000000..dedab63a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..e43ea1db Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc new file mode 100644 index 00000000..f8f9c001 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc new file mode 100644 index 00000000..ffd352f2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc new file mode 100644 index 00000000..d4abe2d5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc new file mode 100644 index 00000000..6d9ccfcc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc new file mode 100644 index 00000000..7d80d6aa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc new file mode 100644 index 00000000..e87b2573 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc new file mode 100644 index 00000000..8e456a33 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc new file mode 100644 index 00000000..5695df2a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc new file mode 100644 index 00000000..96c226f1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc new file mode 100644 index 00000000..c3ebf098 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc new file mode 100644 index 00000000..d6acbb20 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc new file mode 100644 index 00000000..3f954c2a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc new file mode 100644 index 00000000..49e56733 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..ac039257 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/_psycopg_common.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/_psycopg_common.py new file mode 100644 index 00000000..46858c9f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/_psycopg_common.py @@ -0,0 +1,187 @@ +# dialects/postgresql/_psycopg_common.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from __future__ import annotations + +import decimal + +from .array import ARRAY as PGARRAY +from .base import _DECIMAL_TYPES +from .base import _FLOAT_TYPES +from .base import _INT_TYPES +from .base import PGDialect +from .base import PGExecutionContext +from .hstore import HSTORE +from .pg_catalog import _SpaceVector +from .pg_catalog import INT2VECTOR +from .pg_catalog import OIDVECTOR +from ... import exc +from ... import types as sqltypes +from ... import util +from ...engine import processors + +_server_side_id = util.counter() + + +class _PsycopgNumeric(sqltypes.Numeric): + def bind_processor(self, dialect): + return None + + def result_processor(self, dialect, coltype): + if self.asdecimal: + if coltype in _FLOAT_TYPES: + return processors.to_decimal_processor_factory( + decimal.Decimal, self._effective_decimal_return_scale + ) + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + # psycopg returns Decimal natively for 1700 + return None + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype + ) + else: + if coltype in _FLOAT_TYPES: + # psycopg returns float natively for 701 + return None + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + return processors.to_float + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype + ) + + +class _PsycopgFloat(_PsycopgNumeric): + __visit_name__ = "float" + + +class _PsycopgHStore(HSTORE): + def bind_processor(self, dialect): + if dialect._has_native_hstore: + return None + else: + return super().bind_processor(dialect) + + def result_processor(self, dialect, coltype): + if dialect._has_native_hstore: + return None + else: + return super().result_processor(dialect, coltype) + + +class _PsycopgARRAY(PGARRAY): + render_bind_cast = True + + +class _PsycopgINT2VECTOR(_SpaceVector, INT2VECTOR): + pass + + +class _PsycopgOIDVECTOR(_SpaceVector, OIDVECTOR): + pass + + +class _PGExecutionContext_common_psycopg(PGExecutionContext): + def create_server_side_cursor(self): + # use server-side cursors: + # psycopg + # https://www.psycopg.org/psycopg3/docs/advanced/cursors.html#server-side-cursors + # psycopg2 + # https://www.psycopg.org/docs/usage.html#server-side-cursors + ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:]) + return self._dbapi_connection.cursor(ident) + + +class _PGDialect_common_psycopg(PGDialect): + supports_statement_cache = True + supports_server_side_cursors = True + + default_paramstyle = "pyformat" + + _has_native_hstore = True + + colspecs = util.update_copy( + PGDialect.colspecs, + { + sqltypes.Numeric: _PsycopgNumeric, + sqltypes.Float: _PsycopgFloat, + HSTORE: _PsycopgHStore, + sqltypes.ARRAY: _PsycopgARRAY, + INT2VECTOR: _PsycopgINT2VECTOR, + OIDVECTOR: _PsycopgOIDVECTOR, + }, + ) + + def __init__( + self, + client_encoding=None, + use_native_hstore=True, + **kwargs, + ): + PGDialect.__init__(self, **kwargs) + if not use_native_hstore: + self._has_native_hstore = False + self.use_native_hstore = use_native_hstore + self.client_encoding = client_encoding + + def create_connect_args(self, url): + opts = url.translate_connect_args(username="user", database="dbname") + + multihosts, multiports = self._split_multihost_from_url(url) + + if opts or url.query: + if not opts: + opts = {} + if "port" in opts: + opts["port"] = int(opts["port"]) + opts.update(url.query) + + if multihosts: + opts["host"] = ",".join(multihosts) + comma_ports = ",".join(str(p) if p else "" for p in multiports) + if comma_ports: + opts["port"] = comma_ports + return ([], opts) + else: + # no connection arguments whatsoever; psycopg2.connect() + # requires that "dsn" be present as a blank string. + return ([""], opts) + + def get_isolation_level_values(self, dbapi_connection): + return ( + "AUTOCOMMIT", + "READ COMMITTED", + "READ UNCOMMITTED", + "REPEATABLE READ", + "SERIALIZABLE", + ) + + def set_deferrable(self, connection, value): + connection.deferrable = value + + def get_deferrable(self, connection): + return connection.deferrable + + def _do_autocommit(self, connection, value): + connection.autocommit = value + + def do_ping(self, dbapi_connection): + cursor = None + before_autocommit = dbapi_connection.autocommit + + if not before_autocommit: + dbapi_connection.autocommit = True + cursor = dbapi_connection.cursor() + try: + cursor.execute(self._dialect_specific_select_one) + finally: + cursor.close() + if not before_autocommit and not dbapi_connection.closed: + dbapi_connection.autocommit = before_autocommit + + return True diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/array.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/array.py new file mode 100644 index 00000000..1d63655e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/array.py @@ -0,0 +1,425 @@ +# dialects/postgresql/array.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from __future__ import annotations + +import re +from typing import Any +from typing import Optional +from typing import TypeVar + +from .operators import CONTAINED_BY +from .operators import CONTAINS +from .operators import OVERLAP +from ... import types as sqltypes +from ... import util +from ...sql import expression +from ...sql import operators +from ...sql._typing import _TypeEngineArgument + + +_T = TypeVar("_T", bound=Any) + + +def Any(other, arrexpr, operator=operators.eq): + """A synonym for the ARRAY-level :meth:`.ARRAY.Comparator.any` method. + See that method for details. + + """ + + return arrexpr.any(other, operator) + + +def All(other, arrexpr, operator=operators.eq): + """A synonym for the ARRAY-level :meth:`.ARRAY.Comparator.all` method. + See that method for details. + + """ + + return arrexpr.all(other, operator) + + +class array(expression.ExpressionClauseList[_T]): + """A PostgreSQL ARRAY literal. + + This is used to produce ARRAY literals in SQL expressions, e.g.:: + + from sqlalchemy.dialects.postgresql import array + from sqlalchemy.dialects import postgresql + from sqlalchemy import select, func + + stmt = select(array([1,2]) + array([3,4,5])) + + print(stmt.compile(dialect=postgresql.dialect())) + + Produces the SQL:: + + SELECT ARRAY[%(param_1)s, %(param_2)s] || + ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1 + + An instance of :class:`.array` will always have the datatype + :class:`_types.ARRAY`. The "inner" type of the array is inferred from + the values present, unless the ``type_`` keyword argument is passed:: + + array(['foo', 'bar'], type_=CHAR) + + Multidimensional arrays are produced by nesting :class:`.array` constructs. + The dimensionality of the final :class:`_types.ARRAY` + type is calculated by + recursively adding the dimensions of the inner :class:`_types.ARRAY` + type:: + + stmt = select( + array([ + array([1, 2]), array([3, 4]), array([column('q'), column('x')]) + ]) + ) + print(stmt.compile(dialect=postgresql.dialect())) + + Produces:: + + SELECT ARRAY[ARRAY[%(param_1)s, %(param_2)s], + ARRAY[%(param_3)s, %(param_4)s], ARRAY[q, x]] AS anon_1 + + .. versionadded:: 1.3.6 added support for multidimensional array literals + + .. seealso:: + + :class:`_postgresql.ARRAY` + + """ + + __visit_name__ = "array" + + stringify_dialect = "postgresql" + inherit_cache = True + + def __init__(self, clauses, **kw): + type_arg = kw.pop("type_", None) + super().__init__(operators.comma_op, *clauses, **kw) + + self._type_tuple = [arg.type for arg in self.clauses] + + main_type = ( + type_arg + if type_arg is not None + else self._type_tuple[0] if self._type_tuple else sqltypes.NULLTYPE + ) + + if isinstance(main_type, ARRAY): + self.type = ARRAY( + main_type.item_type, + dimensions=( + main_type.dimensions + 1 + if main_type.dimensions is not None + else 2 + ), + ) + else: + self.type = ARRAY(main_type) + + @property + def _select_iterable(self): + return (self,) + + def _bind_param(self, operator, obj, _assume_scalar=False, type_=None): + if _assume_scalar or operator is operators.getitem: + return expression.BindParameter( + None, + obj, + _compared_to_operator=operator, + type_=type_, + _compared_to_type=self.type, + unique=True, + ) + + else: + return array( + [ + self._bind_param( + operator, o, _assume_scalar=True, type_=type_ + ) + for o in obj + ] + ) + + def self_group(self, against=None): + if against in (operators.any_op, operators.all_op, operators.getitem): + return expression.Grouping(self) + else: + return self + + +class ARRAY(sqltypes.ARRAY): + """PostgreSQL ARRAY type. + + The :class:`_postgresql.ARRAY` type is constructed in the same way + as the core :class:`_types.ARRAY` type; a member type is required, and a + number of dimensions is recommended if the type is to be used for more + than one dimension:: + + from sqlalchemy.dialects import postgresql + + mytable = Table("mytable", metadata, + Column("data", postgresql.ARRAY(Integer, dimensions=2)) + ) + + The :class:`_postgresql.ARRAY` type provides all operations defined on the + core :class:`_types.ARRAY` type, including support for "dimensions", + indexed access, and simple matching such as + :meth:`.types.ARRAY.Comparator.any` and + :meth:`.types.ARRAY.Comparator.all`. :class:`_postgresql.ARRAY` + class also + provides PostgreSQL-specific methods for containment operations, including + :meth:`.postgresql.ARRAY.Comparator.contains` + :meth:`.postgresql.ARRAY.Comparator.contained_by`, and + :meth:`.postgresql.ARRAY.Comparator.overlap`, e.g.:: + + mytable.c.data.contains([1, 2]) + + Indexed access is one-based by default, to match that of PostgreSQL; + for zero-based indexed access, set + :paramref:`_postgresql.ARRAY.zero_indexes`. + + Additionally, the :class:`_postgresql.ARRAY` + type does not work directly in + conjunction with the :class:`.ENUM` type. For a workaround, see the + special type at :ref:`postgresql_array_of_enum`. + + .. container:: topic + + **Detecting Changes in ARRAY columns when using the ORM** + + The :class:`_postgresql.ARRAY` type, when used with the SQLAlchemy ORM, + does not detect in-place mutations to the array. In order to detect + these, the :mod:`sqlalchemy.ext.mutable` extension must be used, using + the :class:`.MutableList` class:: + + from sqlalchemy.dialects.postgresql import ARRAY + from sqlalchemy.ext.mutable import MutableList + + class SomeOrmClass(Base): + # ... + + data = Column(MutableList.as_mutable(ARRAY(Integer))) + + This extension will allow "in-place" changes such to the array + such as ``.append()`` to produce events which will be detected by the + unit of work. Note that changes to elements **inside** the array, + including subarrays that are mutated in place, are **not** detected. + + Alternatively, assigning a new array value to an ORM element that + replaces the old one will always trigger a change event. + + .. seealso:: + + :class:`_types.ARRAY` - base array type + + :class:`_postgresql.array` - produces a literal array value. + + """ + + def __init__( + self, + item_type: _TypeEngineArgument[Any], + as_tuple: bool = False, + dimensions: Optional[int] = None, + zero_indexes: bool = False, + ): + """Construct an ARRAY. + + E.g.:: + + Column('myarray', ARRAY(Integer)) + + Arguments are: + + :param item_type: The data type of items of this array. Note that + dimensionality is irrelevant here, so multi-dimensional arrays like + ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as + ``ARRAY(ARRAY(Integer))`` or such. + + :param as_tuple=False: Specify whether return results + should be converted to tuples from lists. DBAPIs such + as psycopg2 return lists by default. When tuples are + returned, the results are hashable. + + :param dimensions: if non-None, the ARRAY will assume a fixed + number of dimensions. This will cause the DDL emitted for this + ARRAY to include the exact number of bracket clauses ``[]``, + and will also optimize the performance of the type overall. + Note that PG arrays are always implicitly "non-dimensioned", + meaning they can store any number of dimensions no matter how + they were declared. + + :param zero_indexes=False: when True, index values will be converted + between Python zero-based and PostgreSQL one-based indexes, e.g. + a value of one will be added to all index values before passing + to the database. + + """ + if isinstance(item_type, ARRAY): + raise ValueError( + "Do not nest ARRAY types; ARRAY(basetype) " + "handles multi-dimensional arrays of basetype" + ) + if isinstance(item_type, type): + item_type = item_type() + self.item_type = item_type + self.as_tuple = as_tuple + self.dimensions = dimensions + self.zero_indexes = zero_indexes + + class Comparator(sqltypes.ARRAY.Comparator): + """Define comparison operations for :class:`_types.ARRAY`. + + Note that these operations are in addition to those provided + by the base :class:`.types.ARRAY.Comparator` class, including + :meth:`.types.ARRAY.Comparator.any` and + :meth:`.types.ARRAY.Comparator.all`. + + """ + + def contains(self, other, **kwargs): + """Boolean expression. Test if elements are a superset of the + elements of the argument array expression. + + kwargs may be ignored by this operator but are required for API + conformance. + """ + return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) + + def contained_by(self, other): + """Boolean expression. Test if elements are a proper subset of the + elements of the argument array expression. + """ + return self.operate( + CONTAINED_BY, other, result_type=sqltypes.Boolean + ) + + def overlap(self, other): + """Boolean expression. Test if array has elements in common with + an argument array expression. + """ + return self.operate(OVERLAP, other, result_type=sqltypes.Boolean) + + comparator_factory = Comparator + + @property + def hashable(self): + return self.as_tuple + + @property + def python_type(self): + return list + + def compare_values(self, x, y): + return x == y + + @util.memoized_property + def _against_native_enum(self): + return ( + isinstance(self.item_type, sqltypes.Enum) + and self.item_type.native_enum + ) + + def literal_processor(self, dialect): + item_proc = self.item_type.dialect_impl(dialect).literal_processor( + dialect + ) + if item_proc is None: + return None + + def to_str(elements): + return f"ARRAY[{', '.join(elements)}]" + + def process(value): + inner = self._apply_item_processor( + value, item_proc, self.dimensions, to_str + ) + return inner + + return process + + def bind_processor(self, dialect): + item_proc = self.item_type.dialect_impl(dialect).bind_processor( + dialect + ) + + def process(value): + if value is None: + return value + else: + return self._apply_item_processor( + value, item_proc, self.dimensions, list + ) + + return process + + def result_processor(self, dialect, coltype): + item_proc = self.item_type.dialect_impl(dialect).result_processor( + dialect, coltype + ) + + def process(value): + if value is None: + return value + else: + return self._apply_item_processor( + value, + item_proc, + self.dimensions, + tuple if self.as_tuple else list, + ) + + if self._against_native_enum: + super_rp = process + pattern = re.compile(r"^{(.*)}$") + + def handle_raw_string(value): + inner = pattern.match(value).group(1) + return _split_enum_values(inner) + + def process(value): + if value is None: + return value + # isinstance(value, str) is required to handle + # the case where a TypeDecorator for and Array of Enum is + # used like was required in sa < 1.3.17 + return super_rp( + handle_raw_string(value) + if isinstance(value, str) + else value + ) + + return process + + +def _split_enum_values(array_string): + if '"' not in array_string: + # no escape char is present so it can just split on the comma + return array_string.split(",") if array_string else [] + + # handles quoted strings from: + # r'abc,"quoted","also\\\\quoted", "quoted, comma", "esc \" quot", qpr' + # returns + # ['abc', 'quoted', 'also\\quoted', 'quoted, comma', 'esc " quot', 'qpr'] + text = array_string.replace(r"\"", "_$ESC_QUOTE$_") + text = text.replace(r"\\", "\\") + result = [] + on_quotes = re.split(r'(")', text) + in_quotes = False + for tok in on_quotes: + if tok == '"': + in_quotes = not in_quotes + elif in_quotes: + result.append(tok.replace("_$ESC_QUOTE$_", '"')) + else: + result.extend(re.findall(r"([^\s,]+),?", tok)) + return result diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py new file mode 100644 index 00000000..53ebe603 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py @@ -0,0 +1,1278 @@ +# dialects/postgresql/asyncpg.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: postgresql+asyncpg + :name: asyncpg + :dbapi: asyncpg + :connectstring: postgresql+asyncpg://user:password@host:port/dbname[?key=value&key=value...] + :url: https://magicstack.github.io/asyncpg/ + +The asyncpg dialect is SQLAlchemy's first Python asyncio dialect. + +Using a special asyncio mediation layer, the asyncpg dialect is usable +as the backend for the :ref:`SQLAlchemy asyncio ` +extension package. + +This dialect should normally be used only with the +:func:`_asyncio.create_async_engine` engine creation function:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname") + +.. versionadded:: 1.4 + +.. note:: + + By default asyncpg does not decode the ``json`` and ``jsonb`` types and + returns them as strings. SQLAlchemy sets default type decoder for ``json`` + and ``jsonb`` types using the python builtin ``json.loads`` function. + The json implementation used can be changed by setting the attribute + ``json_deserializer`` when creating the engine with + :func:`create_engine` or :func:`create_async_engine`. + +.. _asyncpg_multihost: + +Multihost Connections +-------------------------- + +The asyncpg dialect features support for multiple fallback hosts in the +same way as that of the psycopg2 and psycopg dialects. The +syntax is the same, +using ``host=:`` combinations as additional query string arguments; +however, there is no default port, so all hosts must have a complete port number +present, otherwise an exception is raised:: + + engine = create_async_engine( + "postgresql+asyncpg://user:password@/dbname?host=HostA:5432&host=HostB:5432&host=HostC:5432" + ) + +For complete background on this syntax, see :ref:`psycopg2_multi_host`. + +.. versionadded:: 2.0.18 + +.. seealso:: + + :ref:`psycopg2_multi_host` + +.. _asyncpg_prepared_statement_cache: + +Prepared Statement Cache +-------------------------- + +The asyncpg SQLAlchemy dialect makes use of ``asyncpg.connection.prepare()`` +for all statements. The prepared statement objects are cached after +construction which appears to grant a 10% or more performance improvement for +statement invocation. The cache is on a per-DBAPI connection basis, which +means that the primary storage for prepared statements is within DBAPI +connections pooled within the connection pool. The size of this cache +defaults to 100 statements per DBAPI connection and may be adjusted using the +``prepared_statement_cache_size`` DBAPI argument (note that while this argument +is implemented by SQLAlchemy, it is part of the DBAPI emulation portion of the +asyncpg dialect, therefore is handled as a DBAPI argument, not a dialect +argument):: + + + engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=500") + +To disable the prepared statement cache, use a value of zero:: + + engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=0") + +.. versionadded:: 1.4.0b2 Added ``prepared_statement_cache_size`` for asyncpg. + + +.. warning:: The ``asyncpg`` database driver necessarily uses caches for + PostgreSQL type OIDs, which become stale when custom PostgreSQL datatypes + such as ``ENUM`` objects are changed via DDL operations. Additionally, + prepared statements themselves which are optionally cached by SQLAlchemy's + driver as described above may also become "stale" when DDL has been emitted + to the PostgreSQL database which modifies the tables or other objects + involved in a particular prepared statement. + + The SQLAlchemy asyncpg dialect will invalidate these caches within its local + process when statements that represent DDL are emitted on a local + connection, but this is only controllable within a single Python process / + database engine. If DDL changes are made from other database engines + and/or processes, a running application may encounter asyncpg exceptions + ``InvalidCachedStatementError`` and/or ``InternalServerError("cache lookup + failed for type ")`` if it refers to pooled database connections which + operated upon the previous structures. The SQLAlchemy asyncpg dialect will + recover from these error cases when the driver raises these exceptions by + clearing its internal caches as well as those of the asyncpg driver in + response to them, but cannot prevent them from being raised in the first + place if the cached prepared statement or asyncpg type caches have gone + stale, nor can it retry the statement as the PostgreSQL transaction is + invalidated when these errors occur. + +.. _asyncpg_prepared_statement_name: + +Prepared Statement Name with PGBouncer +-------------------------------------- + +By default, asyncpg enumerates prepared statements in numeric order, which +can lead to errors if a name has already been taken for another prepared +statement. This issue can arise if your application uses database proxies +such as PgBouncer to handle connections. One possible workaround is to +use dynamic prepared statement names, which asyncpg now supports through +an optional ``name`` value for the statement name. This allows you to +generate your own unique names that won't conflict with existing ones. +To achieve this, you can provide a function that will be called every time +a prepared statement is prepared:: + + from uuid import uuid4 + + engine = create_async_engine( + "postgresql+asyncpg://user:pass@somepgbouncer/dbname", + poolclass=NullPool, + connect_args={ + 'prepared_statement_name_func': lambda: f'__asyncpg_{uuid4()}__', + }, + ) + +.. seealso:: + + https://github.com/MagicStack/asyncpg/issues/837 + + https://github.com/sqlalchemy/sqlalchemy/issues/6467 + +.. warning:: When using PGBouncer, to prevent a buildup of useless prepared statements in + your application, it's important to use the :class:`.NullPool` pool + class, and to configure PgBouncer to use `DISCARD `_ + when returning connections. The DISCARD command is used to release resources held by the db connection, + including prepared statements. Without proper setup, prepared statements can + accumulate quickly and cause performance issues. + +Disabling the PostgreSQL JIT to improve ENUM datatype handling +--------------------------------------------------------------- + +Asyncpg has an `issue `_ when +using PostgreSQL ENUM datatypes, where upon the creation of new database +connections, an expensive query may be emitted in order to retrieve metadata +regarding custom types which has been shown to negatively affect performance. +To mitigate this issue, the PostgreSQL "jit" setting may be disabled from the +client using this setting passed to :func:`_asyncio.create_async_engine`:: + + engine = create_async_engine( + "postgresql+asyncpg://user:password@localhost/tmp", + connect_args={"server_settings": {"jit": "off"}}, + ) + +.. seealso:: + + https://github.com/MagicStack/asyncpg/issues/727 + +""" # noqa + +from __future__ import annotations + +from collections import deque +import decimal +import json as _py_json +import re +import time + +from . import json +from . import ranges +from .array import ARRAY as PGARRAY +from .base import _DECIMAL_TYPES +from .base import _FLOAT_TYPES +from .base import _INT_TYPES +from .base import ENUM +from .base import INTERVAL +from .base import OID +from .base import PGCompiler +from .base import PGDialect +from .base import PGExecutionContext +from .base import PGIdentifierPreparer +from .base import REGCLASS +from .base import REGCONFIG +from .types import BIT +from .types import BYTEA +from .types import CITEXT +from ... import exc +from ... import pool +from ... import util +from ...engine import AdaptedConnection +from ...engine import processors +from ...sql import sqltypes +from ...util.concurrency import asyncio +from ...util.concurrency import await_fallback +from ...util.concurrency import await_only + + +class AsyncpgARRAY(PGARRAY): + render_bind_cast = True + + +class AsyncpgString(sqltypes.String): + render_bind_cast = True + + +class AsyncpgREGCONFIG(REGCONFIG): + render_bind_cast = True + + +class AsyncpgTime(sqltypes.Time): + render_bind_cast = True + + +class AsyncpgBit(BIT): + render_bind_cast = True + + +class AsyncpgByteA(BYTEA): + render_bind_cast = True + + +class AsyncpgDate(sqltypes.Date): + render_bind_cast = True + + +class AsyncpgDateTime(sqltypes.DateTime): + render_bind_cast = True + + +class AsyncpgBoolean(sqltypes.Boolean): + render_bind_cast = True + + +class AsyncPgInterval(INTERVAL): + render_bind_cast = True + + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): + return AsyncPgInterval(precision=interval.second_precision) + + +class AsyncPgEnum(ENUM): + render_bind_cast = True + + +class AsyncpgInteger(sqltypes.Integer): + render_bind_cast = True + + +class AsyncpgBigInteger(sqltypes.BigInteger): + render_bind_cast = True + + +class AsyncpgJSON(json.JSON): + render_bind_cast = True + + def result_processor(self, dialect, coltype): + return None + + +class AsyncpgJSONB(json.JSONB): + render_bind_cast = True + + def result_processor(self, dialect, coltype): + return None + + +class AsyncpgJSONIndexType(sqltypes.JSON.JSONIndexType): + pass + + +class AsyncpgJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + __visit_name__ = "json_int_index" + + render_bind_cast = True + + +class AsyncpgJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + __visit_name__ = "json_str_index" + + render_bind_cast = True + + +class AsyncpgJSONPathType(json.JSONPathType): + def bind_processor(self, dialect): + def process(value): + if isinstance(value, str): + # If it's already a string assume that it's in json path + # format. This allows using cast with json paths literals + return value + elif value: + tokens = [str(elem) for elem in value] + return tokens + else: + return [] + + return process + + +class AsyncpgNumeric(sqltypes.Numeric): + render_bind_cast = True + + def bind_processor(self, dialect): + return None + + def result_processor(self, dialect, coltype): + if self.asdecimal: + if coltype in _FLOAT_TYPES: + return processors.to_decimal_processor_factory( + decimal.Decimal, self._effective_decimal_return_scale + ) + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + # pg8000 returns Decimal natively for 1700 + return None + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype + ) + else: + if coltype in _FLOAT_TYPES: + # pg8000 returns float natively for 701 + return None + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + return processors.to_float + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype + ) + + +class AsyncpgFloat(AsyncpgNumeric, sqltypes.Float): + __visit_name__ = "float" + render_bind_cast = True + + +class AsyncpgREGCLASS(REGCLASS): + render_bind_cast = True + + +class AsyncpgOID(OID): + render_bind_cast = True + + +class AsyncpgCHAR(sqltypes.CHAR): + render_bind_cast = True + + +class _AsyncpgRange(ranges.AbstractSingleRangeImpl): + def bind_processor(self, dialect): + asyncpg_Range = dialect.dbapi.asyncpg.Range + + def to_range(value): + if isinstance(value, ranges.Range): + value = asyncpg_Range( + value.lower, + value.upper, + lower_inc=value.bounds[0] == "[", + upper_inc=value.bounds[1] == "]", + empty=value.empty, + ) + return value + + return to_range + + def result_processor(self, dialect, coltype): + def to_range(value): + if value is not None: + empty = value.isempty + value = ranges.Range( + value.lower, + value.upper, + bounds=f"{'[' if empty or value.lower_inc else '('}" # type: ignore # noqa: E501 + f"{']' if not empty and value.upper_inc else ')'}", + empty=empty, + ) + return value + + return to_range + + +class _AsyncpgMultiRange(ranges.AbstractMultiRangeImpl): + def bind_processor(self, dialect): + asyncpg_Range = dialect.dbapi.asyncpg.Range + + NoneType = type(None) + + def to_range(value): + if isinstance(value, (str, NoneType)): + return value + + def to_range(value): + if isinstance(value, ranges.Range): + value = asyncpg_Range( + value.lower, + value.upper, + lower_inc=value.bounds[0] == "[", + upper_inc=value.bounds[1] == "]", + empty=value.empty, + ) + return value + + return [to_range(element) for element in value] + + return to_range + + def result_processor(self, dialect, coltype): + def to_range_array(value): + def to_range(rvalue): + if rvalue is not None: + empty = rvalue.isempty + rvalue = ranges.Range( + rvalue.lower, + rvalue.upper, + bounds=f"{'[' if empty or rvalue.lower_inc else '('}" # type: ignore # noqa: E501 + f"{']' if not empty and rvalue.upper_inc else ')'}", + empty=empty, + ) + return rvalue + + if value is not None: + value = ranges.MultiRange(to_range(elem) for elem in value) + + return value + + return to_range_array + + +class PGExecutionContext_asyncpg(PGExecutionContext): + def handle_dbapi_exception(self, e): + if isinstance( + e, + ( + self.dialect.dbapi.InvalidCachedStatementError, + self.dialect.dbapi.InternalServerError, + ), + ): + self.dialect._invalidate_schema_cache() + + def pre_exec(self): + if self.isddl: + self.dialect._invalidate_schema_cache() + + self.cursor._invalidate_schema_cache_asof = ( + self.dialect._invalidate_schema_cache_asof + ) + + if not self.compiled: + return + + def create_server_side_cursor(self): + return self._dbapi_connection.cursor(server_side=True) + + +class PGCompiler_asyncpg(PGCompiler): + pass + + +class PGIdentifierPreparer_asyncpg(PGIdentifierPreparer): + pass + + +class AsyncAdapt_asyncpg_cursor: + __slots__ = ( + "_adapt_connection", + "_connection", + "_rows", + "description", + "arraysize", + "rowcount", + "_cursor", + "_invalidate_schema_cache_asof", + ) + + server_side = False + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self._rows = deque() + self._cursor = None + self.description = None + self.arraysize = 1 + self.rowcount = -1 + self._invalidate_schema_cache_asof = 0 + + def close(self): + self._rows.clear() + + def _handle_exception(self, error): + self._adapt_connection._handle_exception(error) + + async def _prepare_and_execute(self, operation, parameters): + adapt_connection = self._adapt_connection + + async with adapt_connection._execute_mutex: + if not adapt_connection._started: + await adapt_connection._start_transaction() + + if parameters is None: + parameters = () + + try: + prepared_stmt, attributes = await adapt_connection._prepare( + operation, self._invalidate_schema_cache_asof + ) + + if attributes: + self.description = [ + ( + attr.name, + attr.type.oid, + None, + None, + None, + None, + None, + ) + for attr in attributes + ] + else: + self.description = None + + if self.server_side: + self._cursor = await prepared_stmt.cursor(*parameters) + self.rowcount = -1 + else: + self._rows = deque(await prepared_stmt.fetch(*parameters)) + status = prepared_stmt.get_statusmsg() + + reg = re.match( + r"(?:SELECT|UPDATE|DELETE|INSERT \d+) (\d+)", status + ) + if reg: + self.rowcount = int(reg.group(1)) + else: + self.rowcount = -1 + + except Exception as error: + self._handle_exception(error) + + async def _executemany(self, operation, seq_of_parameters): + adapt_connection = self._adapt_connection + + self.description = None + async with adapt_connection._execute_mutex: + await adapt_connection._check_type_cache_invalidation( + self._invalidate_schema_cache_asof + ) + + if not adapt_connection._started: + await adapt_connection._start_transaction() + + try: + return await self._connection.executemany( + operation, seq_of_parameters + ) + except Exception as error: + self._handle_exception(error) + + def execute(self, operation, parameters=None): + self._adapt_connection.await_( + self._prepare_and_execute(operation, parameters) + ) + + def executemany(self, operation, seq_of_parameters): + return self._adapt_connection.await_( + self._executemany(operation, seq_of_parameters) + ) + + def setinputsizes(self, *inputsizes): + raise NotImplementedError() + + def __iter__(self): + while self._rows: + yield self._rows.popleft() + + def fetchone(self): + if self._rows: + return self._rows.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + size = self.arraysize + + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] + + def fetchall(self): + retval = list(self._rows) + self._rows.clear() + return retval + + +class AsyncAdapt_asyncpg_ss_cursor(AsyncAdapt_asyncpg_cursor): + server_side = True + __slots__ = ("_rowbuffer",) + + def __init__(self, adapt_connection): + super().__init__(adapt_connection) + self._rowbuffer = deque() + + def close(self): + self._cursor = None + self._rowbuffer.clear() + + def _buffer_rows(self): + assert self._cursor is not None + new_rows = self._adapt_connection.await_(self._cursor.fetch(50)) + self._rowbuffer.extend(new_rows) + + def __aiter__(self): + return self + + async def __anext__(self): + while True: + while self._rowbuffer: + yield self._rowbuffer.popleft() + + self._buffer_rows() + if not self._rowbuffer: + break + + def fetchone(self): + if not self._rowbuffer: + self._buffer_rows() + if not self._rowbuffer: + return None + return self._rowbuffer.popleft() + + def fetchmany(self, size=None): + if size is None: + return self.fetchall() + + if not self._rowbuffer: + self._buffer_rows() + + assert self._cursor is not None + rb = self._rowbuffer + lb = len(rb) + if size > lb: + rb.extend( + self._adapt_connection.await_(self._cursor.fetch(size - lb)) + ) + + return [rb.popleft() for _ in range(min(size, len(rb)))] + + def fetchall(self): + ret = list(self._rowbuffer) + ret.extend(self._adapt_connection.await_(self._all())) + self._rowbuffer.clear() + return ret + + async def _all(self): + rows = [] + + # TODO: looks like we have to hand-roll some kind of batching here. + # hardcoding for the moment but this should be improved. + while True: + batch = await self._cursor.fetch(1000) + if batch: + rows.extend(batch) + continue + else: + break + return rows + + def executemany(self, operation, seq_of_parameters): + raise NotImplementedError( + "server side cursor doesn't support executemany yet" + ) + + +class AsyncAdapt_asyncpg_connection(AdaptedConnection): + __slots__ = ( + "dbapi", + "isolation_level", + "_isolation_setting", + "readonly", + "deferrable", + "_transaction", + "_started", + "_prepared_statement_cache", + "_prepared_statement_name_func", + "_invalidate_schema_cache_asof", + "_execute_mutex", + ) + + await_ = staticmethod(await_only) + + def __init__( + self, + dbapi, + connection, + prepared_statement_cache_size=100, + prepared_statement_name_func=None, + ): + self.dbapi = dbapi + self._connection = connection + self.isolation_level = self._isolation_setting = "read_committed" + self.readonly = False + self.deferrable = False + self._transaction = None + self._started = False + self._invalidate_schema_cache_asof = time.time() + self._execute_mutex = asyncio.Lock() + + if prepared_statement_cache_size: + self._prepared_statement_cache = util.LRUCache( + prepared_statement_cache_size + ) + else: + self._prepared_statement_cache = None + + if prepared_statement_name_func: + self._prepared_statement_name_func = prepared_statement_name_func + else: + self._prepared_statement_name_func = self._default_name_func + + async def _check_type_cache_invalidation(self, invalidate_timestamp): + if invalidate_timestamp > self._invalidate_schema_cache_asof: + await self._connection.reload_schema_state() + self._invalidate_schema_cache_asof = invalidate_timestamp + + async def _prepare(self, operation, invalidate_timestamp): + await self._check_type_cache_invalidation(invalidate_timestamp) + + cache = self._prepared_statement_cache + if cache is None: + prepared_stmt = await self._connection.prepare( + operation, name=self._prepared_statement_name_func() + ) + attributes = prepared_stmt.get_attributes() + return prepared_stmt, attributes + + # asyncpg uses a type cache for the "attributes" which seems to go + # stale independently of the PreparedStatement itself, so place that + # collection in the cache as well. + if operation in cache: + prepared_stmt, attributes, cached_timestamp = cache[operation] + + # preparedstatements themselves also go stale for certain DDL + # changes such as size of a VARCHAR changing, so there is also + # a cross-connection invalidation timestamp + if cached_timestamp > invalidate_timestamp: + return prepared_stmt, attributes + + prepared_stmt = await self._connection.prepare( + operation, name=self._prepared_statement_name_func() + ) + attributes = prepared_stmt.get_attributes() + cache[operation] = (prepared_stmt, attributes, time.time()) + + return prepared_stmt, attributes + + def _handle_exception(self, error): + if self._connection.is_closed(): + self._transaction = None + self._started = False + + if not isinstance(error, AsyncAdapt_asyncpg_dbapi.Error): + exception_mapping = self.dbapi._asyncpg_error_translate + + for super_ in type(error).__mro__: + if super_ in exception_mapping: + translated_error = exception_mapping[super_]( + "%s: %s" % (type(error), error) + ) + translated_error.pgcode = translated_error.sqlstate = ( + getattr(error, "sqlstate", None) + ) + raise translated_error from error + else: + raise error + else: + raise error + + @property + def autocommit(self): + return self.isolation_level == "autocommit" + + @autocommit.setter + def autocommit(self, value): + if value: + self.isolation_level = "autocommit" + else: + self.isolation_level = self._isolation_setting + + def ping(self): + try: + _ = self.await_(self._async_ping()) + except Exception as error: + self._handle_exception(error) + + async def _async_ping(self): + if self._transaction is None and self.isolation_level != "autocommit": + # create a tranasction explicitly to support pgbouncer + # transaction mode. See #10226 + tr = self._connection.transaction() + await tr.start() + try: + await self._connection.fetchrow(";") + finally: + await tr.rollback() + else: + await self._connection.fetchrow(";") + + def set_isolation_level(self, level): + if self._started: + self.rollback() + self.isolation_level = self._isolation_setting = level + + async def _start_transaction(self): + if self.isolation_level == "autocommit": + return + + try: + self._transaction = self._connection.transaction( + isolation=self.isolation_level, + readonly=self.readonly, + deferrable=self.deferrable, + ) + await self._transaction.start() + except Exception as error: + self._handle_exception(error) + else: + self._started = True + + def cursor(self, server_side=False): + if server_side: + return AsyncAdapt_asyncpg_ss_cursor(self) + else: + return AsyncAdapt_asyncpg_cursor(self) + + async def _rollback_and_discard(self): + try: + await self._transaction.rollback() + finally: + # if asyncpg .rollback() was actually called, then whether or + # not it raised or succeeded, the transation is done, discard it + self._transaction = None + self._started = False + + async def _commit_and_discard(self): + try: + await self._transaction.commit() + finally: + # if asyncpg .commit() was actually called, then whether or + # not it raised or succeeded, the transation is done, discard it + self._transaction = None + self._started = False + + def rollback(self): + if self._started: + try: + self.await_(self._rollback_and_discard()) + self._transaction = None + self._started = False + except Exception as error: + # don't dereference asyncpg transaction if we didn't + # actually try to call rollback() on it + self._handle_exception(error) + + def commit(self): + if self._started: + try: + self.await_(self._commit_and_discard()) + self._transaction = None + self._started = False + except Exception as error: + # don't dereference asyncpg transaction if we didn't + # actually try to call commit() on it + self._handle_exception(error) + + def close(self): + self.rollback() + + self.await_(self._connection.close()) + + def terminate(self): + if util.concurrency.in_greenlet(): + # in a greenlet; this is the connection was invalidated + # case. + try: + # try to gracefully close; see #10717 + # timeout added in asyncpg 0.14.0 December 2017 + self.await_(self._connection.close(timeout=2)) + except ( + asyncio.TimeoutError, + asyncio.CancelledError, + OSError, + self.dbapi.asyncpg.PostgresError, + ): + # in the case where we are recycling an old connection + # that may have already been disconnected, close() will + # fail with the above timeout. in this case, terminate + # the connection without any further waiting. + # see issue #8419 + self._connection.terminate() + else: + # not in a greenlet; this is the gc cleanup case + self._connection.terminate() + self._started = False + + @staticmethod + def _default_name_func(): + return None + + +class AsyncAdaptFallback_asyncpg_connection(AsyncAdapt_asyncpg_connection): + __slots__ = () + + await_ = staticmethod(await_fallback) + + +class AsyncAdapt_asyncpg_dbapi: + def __init__(self, asyncpg): + self.asyncpg = asyncpg + self.paramstyle = "numeric_dollar" + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop("async_creator_fn", self.asyncpg.connect) + prepared_statement_cache_size = kw.pop( + "prepared_statement_cache_size", 100 + ) + prepared_statement_name_func = kw.pop( + "prepared_statement_name_func", None + ) + + if util.asbool(async_fallback): + return AsyncAdaptFallback_asyncpg_connection( + self, + await_fallback(creator_fn(*arg, **kw)), + prepared_statement_cache_size=prepared_statement_cache_size, + prepared_statement_name_func=prepared_statement_name_func, + ) + else: + return AsyncAdapt_asyncpg_connection( + self, + await_only(creator_fn(*arg, **kw)), + prepared_statement_cache_size=prepared_statement_cache_size, + prepared_statement_name_func=prepared_statement_name_func, + ) + + class Error(Exception): + pass + + class Warning(Exception): # noqa + pass + + class InterfaceError(Error): + pass + + class DatabaseError(Error): + pass + + class InternalError(DatabaseError): + pass + + class OperationalError(DatabaseError): + pass + + class ProgrammingError(DatabaseError): + pass + + class IntegrityError(DatabaseError): + pass + + class DataError(DatabaseError): + pass + + class NotSupportedError(DatabaseError): + pass + + class InternalServerError(InternalError): + pass + + class InvalidCachedStatementError(NotSupportedError): + def __init__(self, message): + super().__init__( + message + " (SQLAlchemy asyncpg dialect will now invalidate " + "all prepared caches in response to this exception)", + ) + + # pep-249 datatype placeholders. As of SQLAlchemy 2.0 these aren't + # used, however the test suite looks for these in a few cases. + STRING = util.symbol("STRING") + NUMBER = util.symbol("NUMBER") + DATETIME = util.symbol("DATETIME") + + @util.memoized_property + def _asyncpg_error_translate(self): + import asyncpg + + return { + asyncpg.exceptions.IntegrityConstraintViolationError: self.IntegrityError, # noqa: E501 + asyncpg.exceptions.PostgresError: self.Error, + asyncpg.exceptions.SyntaxOrAccessError: self.ProgrammingError, + asyncpg.exceptions.InterfaceError: self.InterfaceError, + asyncpg.exceptions.InvalidCachedStatementError: self.InvalidCachedStatementError, # noqa: E501 + asyncpg.exceptions.InternalServerError: self.InternalServerError, + } + + def Binary(self, value): + return value + + +class PGDialect_asyncpg(PGDialect): + driver = "asyncpg" + supports_statement_cache = True + + supports_server_side_cursors = True + + render_bind_cast = True + has_terminate = True + + default_paramstyle = "numeric_dollar" + supports_sane_multi_rowcount = False + execution_ctx_cls = PGExecutionContext_asyncpg + statement_compiler = PGCompiler_asyncpg + preparer = PGIdentifierPreparer_asyncpg + + colspecs = util.update_copy( + PGDialect.colspecs, + { + sqltypes.String: AsyncpgString, + sqltypes.ARRAY: AsyncpgARRAY, + BIT: AsyncpgBit, + CITEXT: CITEXT, + REGCONFIG: AsyncpgREGCONFIG, + sqltypes.Time: AsyncpgTime, + sqltypes.Date: AsyncpgDate, + sqltypes.DateTime: AsyncpgDateTime, + sqltypes.Interval: AsyncPgInterval, + INTERVAL: AsyncPgInterval, + sqltypes.Boolean: AsyncpgBoolean, + sqltypes.Integer: AsyncpgInteger, + sqltypes.BigInteger: AsyncpgBigInteger, + sqltypes.Numeric: AsyncpgNumeric, + sqltypes.Float: AsyncpgFloat, + sqltypes.JSON: AsyncpgJSON, + sqltypes.LargeBinary: AsyncpgByteA, + json.JSONB: AsyncpgJSONB, + sqltypes.JSON.JSONPathType: AsyncpgJSONPathType, + sqltypes.JSON.JSONIndexType: AsyncpgJSONIndexType, + sqltypes.JSON.JSONIntIndexType: AsyncpgJSONIntIndexType, + sqltypes.JSON.JSONStrIndexType: AsyncpgJSONStrIndexType, + sqltypes.Enum: AsyncPgEnum, + OID: AsyncpgOID, + REGCLASS: AsyncpgREGCLASS, + sqltypes.CHAR: AsyncpgCHAR, + ranges.AbstractSingleRange: _AsyncpgRange, + ranges.AbstractMultiRange: _AsyncpgMultiRange, + }, + ) + is_async = True + _invalidate_schema_cache_asof = 0 + + def _invalidate_schema_cache(self): + self._invalidate_schema_cache_asof = time.time() + + @util.memoized_property + def _dbapi_version(self): + if self.dbapi and hasattr(self.dbapi, "__version__"): + return tuple( + [ + int(x) + for x in re.findall( + r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__ + ) + ] + ) + else: + return (99, 99, 99) + + @classmethod + def import_dbapi(cls): + return AsyncAdapt_asyncpg_dbapi(__import__("asyncpg")) + + @util.memoized_property + def _isolation_lookup(self): + return { + "AUTOCOMMIT": "autocommit", + "READ COMMITTED": "read_committed", + "REPEATABLE READ": "repeatable_read", + "SERIALIZABLE": "serializable", + } + + def get_isolation_level_values(self, dbapi_connection): + return list(self._isolation_lookup) + + def set_isolation_level(self, dbapi_connection, level): + dbapi_connection.set_isolation_level(self._isolation_lookup[level]) + + def set_readonly(self, connection, value): + connection.readonly = value + + def get_readonly(self, connection): + return connection.readonly + + def set_deferrable(self, connection, value): + connection.deferrable = value + + def get_deferrable(self, connection): + return connection.deferrable + + def do_terminate(self, dbapi_connection) -> None: + dbapi_connection.terminate() + + def create_connect_args(self, url): + opts = url.translate_connect_args(username="user") + multihosts, multiports = self._split_multihost_from_url(url) + + opts.update(url.query) + + if multihosts: + assert multiports + if len(multihosts) == 1: + opts["host"] = multihosts[0] + if multiports[0] is not None: + opts["port"] = multiports[0] + elif not all(multihosts): + raise exc.ArgumentError( + "All hosts are required to be present" + " for asyncpg multiple host URL" + ) + elif not all(multiports): + raise exc.ArgumentError( + "All ports are required to be present" + " for asyncpg multiple host URL" + ) + else: + opts["host"] = list(multihosts) + opts["port"] = list(multiports) + else: + util.coerce_kw_type(opts, "port", int) + util.coerce_kw_type(opts, "prepared_statement_cache_size", int) + return ([], opts) + + def do_ping(self, dbapi_connection): + dbapi_connection.ping() + return True + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if util.asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def is_disconnect(self, e, connection, cursor): + if connection: + return connection._connection.is_closed() + else: + return isinstance( + e, self.dbapi.InterfaceError + ) and "connection is closed" in str(e) + + async def setup_asyncpg_json_codec(self, conn): + """set up JSON codec for asyncpg. + + This occurs for all new connections and + can be overridden by third party dialects. + + .. versionadded:: 1.4.27 + + """ + + asyncpg_connection = conn._connection + deserializer = self._json_deserializer or _py_json.loads + + def _json_decoder(bin_value): + return deserializer(bin_value.decode()) + + await asyncpg_connection.set_type_codec( + "json", + encoder=str.encode, + decoder=_json_decoder, + schema="pg_catalog", + format="binary", + ) + + async def setup_asyncpg_jsonb_codec(self, conn): + """set up JSONB codec for asyncpg. + + This occurs for all new connections and + can be overridden by third party dialects. + + .. versionadded:: 1.4.27 + + """ + + asyncpg_connection = conn._connection + deserializer = self._json_deserializer or _py_json.loads + + def _jsonb_encoder(str_value): + # \x01 is the prefix for jsonb used by PostgreSQL. + # asyncpg requires it when format='binary' + return b"\x01" + str_value.encode() + + deserializer = self._json_deserializer or _py_json.loads + + def _jsonb_decoder(bin_value): + # the byte is the \x01 prefix for jsonb used by PostgreSQL. + # asyncpg returns it when format='binary' + return deserializer(bin_value[1:].decode()) + + await asyncpg_connection.set_type_codec( + "jsonb", + encoder=_jsonb_encoder, + decoder=_jsonb_decoder, + schema="pg_catalog", + format="binary", + ) + + async def _disable_asyncpg_inet_codecs(self, conn): + asyncpg_connection = conn._connection + + await asyncpg_connection.set_type_codec( + "inet", + encoder=lambda s: s, + decoder=lambda s: s, + schema="pg_catalog", + format="text", + ) + + await asyncpg_connection.set_type_codec( + "cidr", + encoder=lambda s: s, + decoder=lambda s: s, + schema="pg_catalog", + format="text", + ) + + def on_connect(self): + """on_connect for asyncpg + + A major component of this for asyncpg is to set up type decoders at the + asyncpg level. + + See https://github.com/MagicStack/asyncpg/issues/623 for + notes on JSON/JSONB implementation. + + """ + + super_connect = super().on_connect() + + def connect(conn): + conn.await_(self.setup_asyncpg_json_codec(conn)) + conn.await_(self.setup_asyncpg_jsonb_codec(conn)) + + if self._native_inet_types is False: + conn.await_(self._disable_asyncpg_inet_codecs(conn)) + if super_connect is not None: + super_connect(conn) + + return connect + + def get_driver_connection(self, connection): + return connection._connection + + +dialect = PGDialect_asyncpg diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/base.py new file mode 100644 index 00000000..4ab3ca24 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/base.py @@ -0,0 +1,5007 @@ +# dialects/postgresql/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: postgresql + :name: PostgreSQL + :full_support: 12, 13, 14, 15 + :normal_support: 9.6+ + :best_effort: 9+ + +.. _postgresql_sequences: + +Sequences/SERIAL/IDENTITY +------------------------- + +PostgreSQL supports sequences, and SQLAlchemy uses these as the default means +of creating new primary key values for integer-based primary key columns. When +creating tables, SQLAlchemy will issue the ``SERIAL`` datatype for +integer-based primary key columns, which generates a sequence and server side +default corresponding to the column. + +To specify a specific named sequence to be used for primary key generation, +use the :func:`~sqlalchemy.schema.Sequence` construct:: + + Table( + "sometable", + metadata, + Column( + "id", Integer, Sequence("some_id_seq", start=1), primary_key=True + ) + ) + +When SQLAlchemy issues a single INSERT statement, to fulfill the contract of +having the "last insert identifier" available, a RETURNING clause is added to +the INSERT statement which specifies the primary key columns should be +returned after the statement completes. The RETURNING functionality only takes +place if PostgreSQL 8.2 or later is in use. As a fallback approach, the +sequence, whether specified explicitly or implicitly via ``SERIAL``, is +executed independently beforehand, the returned value to be used in the +subsequent insert. Note that when an +:func:`~sqlalchemy.sql.expression.insert()` construct is executed using +"executemany" semantics, the "last inserted identifier" functionality does not +apply; no RETURNING clause is emitted nor is the sequence pre-executed in this +case. + + +PostgreSQL 10 and above IDENTITY columns +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL 10 and above have a new IDENTITY feature that supersedes the use +of SERIAL. The :class:`_schema.Identity` construct in a +:class:`_schema.Column` can be used to control its behavior:: + + from sqlalchemy import Table, Column, MetaData, Integer, Computed + + metadata = MetaData() + + data = Table( + "data", + metadata, + Column( + 'id', Integer, Identity(start=42, cycle=True), primary_key=True + ), + Column('data', String) + ) + +The CREATE TABLE for the above :class:`_schema.Table` object would be: + +.. sourcecode:: sql + + CREATE TABLE data ( + id INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 42 CYCLE), + data VARCHAR, + PRIMARY KEY (id) + ) + +.. versionchanged:: 1.4 Added :class:`_schema.Identity` construct + in a :class:`_schema.Column` to specify the option of an autoincrementing + column. + +.. note:: + + Previous versions of SQLAlchemy did not have built-in support for rendering + of IDENTITY, and could use the following compilation hook to replace + occurrences of SERIAL with IDENTITY:: + + from sqlalchemy.schema import CreateColumn + from sqlalchemy.ext.compiler import compiles + + + @compiles(CreateColumn, 'postgresql') + def use_identity(element, compiler, **kw): + text = compiler.visit_create_column(element, **kw) + text = text.replace( + "SERIAL", "INT GENERATED BY DEFAULT AS IDENTITY" + ) + return text + + Using the above, a table such as:: + + t = Table( + 't', m, + Column('id', Integer, primary_key=True), + Column('data', String) + ) + + Will generate on the backing database as:: + + CREATE TABLE t ( + id INT GENERATED BY DEFAULT AS IDENTITY, + data VARCHAR, + PRIMARY KEY (id) + ) + +.. _postgresql_ss_cursors: + +Server Side Cursors +------------------- + +Server-side cursor support is available for the psycopg2, asyncpg +dialects and may also be available in others. + +Server side cursors are enabled on a per-statement basis by using the +:paramref:`.Connection.execution_options.stream_results` connection execution +option:: + + with engine.connect() as conn: + result = conn.execution_options(stream_results=True).execute(text("select * from table")) + +Note that some kinds of SQL statements may not be supported with +server side cursors; generally, only SQL statements that return rows should be +used with this option. + +.. deprecated:: 1.4 The dialect-level server_side_cursors flag is deprecated + and will be removed in a future release. Please use the + :paramref:`_engine.Connection.stream_results` execution option for + unbuffered cursor support. + +.. seealso:: + + :ref:`engine_stream_results` + +.. _postgresql_isolation_level: + +Transaction Isolation Level +--------------------------- + +Most SQLAlchemy dialects support setting of transaction isolation level +using the :paramref:`_sa.create_engine.isolation_level` parameter +at the :func:`_sa.create_engine` level, and at the :class:`_engine.Connection` +level via the :paramref:`.Connection.execution_options.isolation_level` +parameter. + +For PostgreSQL dialects, this feature works either by making use of the +DBAPI-specific features, such as psycopg2's isolation level flags which will +embed the isolation level setting inline with the ``"BEGIN"`` statement, or for +DBAPIs with no direct support by emitting ``SET SESSION CHARACTERISTICS AS +TRANSACTION ISOLATION LEVEL `` ahead of the ``"BEGIN"`` statement +emitted by the DBAPI. For the special AUTOCOMMIT isolation level, +DBAPI-specific techniques are used which is typically an ``.autocommit`` +flag on the DBAPI connection object. + +To set isolation level using :func:`_sa.create_engine`:: + + engine = create_engine( + "postgresql+pg8000://scott:tiger@localhost/test", + isolation_level = "REPEATABLE READ" + ) + +To set using per-connection execution options:: + + with engine.connect() as conn: + conn = conn.execution_options( + isolation_level="REPEATABLE READ" + ) + with conn.begin(): + # ... work with transaction + +There are also more options for isolation level configurations, such as +"sub-engine" objects linked to a main :class:`_engine.Engine` which each apply +different isolation level settings. See the discussion at +:ref:`dbapi_autocommit` for background. + +Valid values for ``isolation_level`` on most PostgreSQL dialects include: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +.. seealso:: + + :ref:`dbapi_autocommit` + + :ref:`postgresql_readonly_deferrable` + + :ref:`psycopg2_isolation_level` + + :ref:`pg8000_isolation_level` + +.. _postgresql_readonly_deferrable: + +Setting READ ONLY / DEFERRABLE +------------------------------ + +Most PostgreSQL dialects support setting the "READ ONLY" and "DEFERRABLE" +characteristics of the transaction, which is in addition to the isolation level +setting. These two attributes can be established either in conjunction with or +independently of the isolation level by passing the ``postgresql_readonly`` and +``postgresql_deferrable`` flags with +:meth:`_engine.Connection.execution_options`. The example below illustrates +passing the ``"SERIALIZABLE"`` isolation level at the same time as setting +"READ ONLY" and "DEFERRABLE":: + + with engine.connect() as conn: + conn = conn.execution_options( + isolation_level="SERIALIZABLE", + postgresql_readonly=True, + postgresql_deferrable=True + ) + with conn.begin(): + # ... work with transaction + +Note that some DBAPIs such as asyncpg only support "readonly" with +SERIALIZABLE isolation. + +.. versionadded:: 1.4 added support for the ``postgresql_readonly`` + and ``postgresql_deferrable`` execution options. + +.. _postgresql_reset_on_return: + +Temporary Table / Resource Reset for Connection Pooling +------------------------------------------------------- + +The :class:`.QueuePool` connection pool implementation used +by the SQLAlchemy :class:`.Engine` object includes +:ref:`reset on return ` behavior that will invoke +the DBAPI ``.rollback()`` method when connections are returned to the pool. +While this rollback will clear out the immediate state used by the previous +transaction, it does not cover a wider range of session-level state, including +temporary tables as well as other server state such as prepared statement +handles and statement caches. The PostgreSQL database includes a variety +of commands which may be used to reset this state, including +``DISCARD``, ``RESET``, ``DEALLOCATE``, and ``UNLISTEN``. + + +To install +one or more of these commands as the means of performing reset-on-return, +the :meth:`.PoolEvents.reset` event hook may be used, as demonstrated +in the example below. The implementation +will end transactions in progress as well as discard temporary tables +using the ``CLOSE``, ``RESET`` and ``DISCARD`` commands; see the PostgreSQL +documentation for background on what each of these statements do. + +The :paramref:`_sa.create_engine.pool_reset_on_return` parameter +is set to ``None`` so that the custom scheme can replace the default behavior +completely. The custom hook implementation calls ``.rollback()`` in any case, +as it's usually important that the DBAPI's own tracking of commit/rollback +will remain consistent with the state of the transaction:: + + + from sqlalchemy import create_engine + from sqlalchemy import event + + postgresql_engine = create_engine( + "postgresql+pyscopg2://scott:tiger@hostname/dbname", + + # disable default reset-on-return scheme + pool_reset_on_return=None, + ) + + + @event.listens_for(postgresql_engine, "reset") + def _reset_postgresql(dbapi_connection, connection_record, reset_state): + if not reset_state.terminate_only: + dbapi_connection.execute("CLOSE ALL") + dbapi_connection.execute("RESET ALL") + dbapi_connection.execute("DISCARD TEMP") + + # so that the DBAPI itself knows that the connection has been + # reset + dbapi_connection.rollback() + +.. versionchanged:: 2.0.0b3 Added additional state arguments to + the :meth:`.PoolEvents.reset` event and additionally ensured the event + is invoked for all "reset" occurrences, so that it's appropriate + as a place for custom "reset" handlers. Previous schemes which + use the :meth:`.PoolEvents.checkin` handler remain usable as well. + +.. seealso:: + + :ref:`pool_reset_on_return` - in the :ref:`pooling_toplevel` documentation + +.. _postgresql_alternate_search_path: + +Setting Alternate Search Paths on Connect +------------------------------------------ + +The PostgreSQL ``search_path`` variable refers to the list of schema names +that will be implicitly referenced when a particular table or other +object is referenced in a SQL statement. As detailed in the next section +:ref:`postgresql_schema_reflection`, SQLAlchemy is generally organized around +the concept of keeping this variable at its default value of ``public``, +however, in order to have it set to any arbitrary name or names when connections +are used automatically, the "SET SESSION search_path" command may be invoked +for all connections in a pool using the following event handler, as discussed +at :ref:`schema_set_default_connections`:: + + from sqlalchemy import event + from sqlalchemy import create_engine + + engine = create_engine("postgresql+psycopg2://scott:tiger@host/dbname") + + @event.listens_for(engine, "connect", insert=True) + def set_search_path(dbapi_connection, connection_record): + existing_autocommit = dbapi_connection.autocommit + dbapi_connection.autocommit = True + cursor = dbapi_connection.cursor() + cursor.execute("SET SESSION search_path='%s'" % schema_name) + cursor.close() + dbapi_connection.autocommit = existing_autocommit + +The reason the recipe is complicated by use of the ``.autocommit`` DBAPI +attribute is so that when the ``SET SESSION search_path`` directive is invoked, +it is invoked outside of the scope of any transaction and therefore will not +be reverted when the DBAPI connection has a rollback. + +.. seealso:: + + :ref:`schema_set_default_connections` - in the :ref:`metadata_toplevel` documentation + + + + +.. _postgresql_schema_reflection: + +Remote-Schema Table Introspection and PostgreSQL search_path +------------------------------------------------------------ + +.. admonition:: Section Best Practices Summarized + + keep the ``search_path`` variable set to its default of ``public``, without + any other schema names. Ensure the username used to connect **does not** + match remote schemas, or ensure the ``"$user"`` token is **removed** from + ``search_path``. For other schema names, name these explicitly + within :class:`_schema.Table` definitions. Alternatively, the + ``postgresql_ignore_search_path`` option will cause all reflected + :class:`_schema.Table` objects to have a :attr:`_schema.Table.schema` + attribute set up. + +The PostgreSQL dialect can reflect tables from any schema, as outlined in +:ref:`metadata_reflection_schemas`. + +In all cases, the first thing SQLAlchemy does when reflecting tables is +to **determine the default schema for the current database connection**. +It does this using the PostgreSQL ``current_schema()`` +function, illustated below using a PostgreSQL client session (i.e. using +the ``psql`` tool):: + + test=> select current_schema(); + current_schema + ---------------- + public + (1 row) + +Above we see that on a plain install of PostgreSQL, the default schema name +is the name ``public``. + +However, if your database username **matches the name of a schema**, PostgreSQL's +default is to then **use that name as the default schema**. Below, we log in +using the username ``scott``. When we create a schema named ``scott``, **it +implicitly changes the default schema**:: + + test=> select current_schema(); + current_schema + ---------------- + public + (1 row) + + test=> create schema scott; + CREATE SCHEMA + test=> select current_schema(); + current_schema + ---------------- + scott + (1 row) + +The behavior of ``current_schema()`` is derived from the +`PostgreSQL search path +`_ +variable ``search_path``, which in modern PostgreSQL versions defaults to this:: + + test=> show search_path; + search_path + ----------------- + "$user", public + (1 row) + +Where above, the ``"$user"`` variable will inject the current username as the +default schema, if one exists. Otherwise, ``public`` is used. + +When a :class:`_schema.Table` object is reflected, if it is present in the +schema indicated by the ``current_schema()`` function, **the schema name assigned +to the ".schema" attribute of the Table is the Python "None" value**. Otherwise, the +".schema" attribute will be assigned the string name of that schema. + +With regards to tables which these :class:`_schema.Table` +objects refer to via foreign key constraint, a decision must be made as to how +the ``.schema`` is represented in those remote tables, in the case where that +remote schema name is also a member of the current ``search_path``. + +By default, the PostgreSQL dialect mimics the behavior encouraged by +PostgreSQL's own ``pg_get_constraintdef()`` builtin procedure. This function +returns a sample definition for a particular foreign key constraint, +omitting the referenced schema name from that definition when the name is +also in the PostgreSQL schema search path. The interaction below +illustrates this behavior:: + + test=> CREATE TABLE test_schema.referred(id INTEGER PRIMARY KEY); + CREATE TABLE + test=> CREATE TABLE referring( + test(> id INTEGER PRIMARY KEY, + test(> referred_id INTEGER REFERENCES test_schema.referred(id)); + CREATE TABLE + test=> SET search_path TO public, test_schema; + test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM + test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n + test-> ON n.oid = c.relnamespace + test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid + test-> WHERE c.relname='referring' AND r.contype = 'f' + test-> ; + pg_get_constraintdef + --------------------------------------------------- + FOREIGN KEY (referred_id) REFERENCES referred(id) + (1 row) + +Above, we created a table ``referred`` as a member of the remote schema +``test_schema``, however when we added ``test_schema`` to the +PG ``search_path`` and then asked ``pg_get_constraintdef()`` for the +``FOREIGN KEY`` syntax, ``test_schema`` was not included in the output of +the function. + +On the other hand, if we set the search path back to the typical default +of ``public``:: + + test=> SET search_path TO public; + SET + +The same query against ``pg_get_constraintdef()`` now returns the fully +schema-qualified name for us:: + + test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM + test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n + test-> ON n.oid = c.relnamespace + test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid + test-> WHERE c.relname='referring' AND r.contype = 'f'; + pg_get_constraintdef + --------------------------------------------------------------- + FOREIGN KEY (referred_id) REFERENCES test_schema.referred(id) + (1 row) + +SQLAlchemy will by default use the return value of ``pg_get_constraintdef()`` +in order to determine the remote schema name. That is, if our ``search_path`` +were set to include ``test_schema``, and we invoked a table +reflection process as follows:: + + >>> from sqlalchemy import Table, MetaData, create_engine, text + >>> engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") + >>> with engine.connect() as conn: + ... conn.execute(text("SET search_path TO test_schema, public")) + ... metadata_obj = MetaData() + ... referring = Table('referring', metadata_obj, + ... autoload_with=conn) + ... + + +The above process would deliver to the :attr:`_schema.MetaData.tables` +collection +``referred`` table named **without** the schema:: + + >>> metadata_obj.tables['referred'].schema is None + True + +To alter the behavior of reflection such that the referred schema is +maintained regardless of the ``search_path`` setting, use the +``postgresql_ignore_search_path`` option, which can be specified as a +dialect-specific argument to both :class:`_schema.Table` as well as +:meth:`_schema.MetaData.reflect`:: + + >>> with engine.connect() as conn: + ... conn.execute(text("SET search_path TO test_schema, public")) + ... metadata_obj = MetaData() + ... referring = Table('referring', metadata_obj, + ... autoload_with=conn, + ... postgresql_ignore_search_path=True) + ... + + +We will now have ``test_schema.referred`` stored as schema-qualified:: + + >>> metadata_obj.tables['test_schema.referred'].schema + 'test_schema' + +.. sidebar:: Best Practices for PostgreSQL Schema reflection + + The description of PostgreSQL schema reflection behavior is complex, and + is the product of many years of dealing with widely varied use cases and + user preferences. But in fact, there's no need to understand any of it if + you just stick to the simplest use pattern: leave the ``search_path`` set + to its default of ``public`` only, never refer to the name ``public`` as + an explicit schema name otherwise, and refer to all other schema names + explicitly when building up a :class:`_schema.Table` object. The options + described here are only for those users who can't, or prefer not to, stay + within these guidelines. + +.. seealso:: + + :ref:`reflection_schema_qualified_interaction` - discussion of the issue + from a backend-agnostic perspective + + `The Schema Search Path + `_ + - on the PostgreSQL website. + +INSERT/UPDATE...RETURNING +------------------------- + +The dialect supports PG 8.2's ``INSERT..RETURNING``, ``UPDATE..RETURNING`` and +``DELETE..RETURNING`` syntaxes. ``INSERT..RETURNING`` is used by default +for single-row INSERT statements in order to fetch newly generated +primary key identifiers. To specify an explicit ``RETURNING`` clause, +use the :meth:`._UpdateBase.returning` method on a per-statement basis:: + + # INSERT..RETURNING + result = table.insert().returning(table.c.col1, table.c.col2).\ + values(name='foo') + print(result.fetchall()) + + # UPDATE..RETURNING + result = table.update().returning(table.c.col1, table.c.col2).\ + where(table.c.name=='foo').values(name='bar') + print(result.fetchall()) + + # DELETE..RETURNING + result = table.delete().returning(table.c.col1, table.c.col2).\ + where(table.c.name=='foo') + print(result.fetchall()) + +.. _postgresql_insert_on_conflict: + +INSERT...ON CONFLICT (Upsert) +------------------------------ + +Starting with version 9.5, PostgreSQL allows "upserts" (update or insert) of +rows into a table via the ``ON CONFLICT`` clause of the ``INSERT`` statement. A +candidate row will only be inserted if that row does not violate any unique +constraints. In the case of a unique constraint violation, a secondary action +can occur which can be either "DO UPDATE", indicating that the data in the +target row should be updated, or "DO NOTHING", which indicates to silently skip +this row. + +Conflicts are determined using existing unique constraints and indexes. These +constraints may be identified either using their name as stated in DDL, +or they may be inferred by stating the columns and conditions that comprise +the indexes. + +SQLAlchemy provides ``ON CONFLICT`` support via the PostgreSQL-specific +:func:`_postgresql.insert()` function, which provides +the generative methods :meth:`_postgresql.Insert.on_conflict_do_update` +and :meth:`~.postgresql.Insert.on_conflict_do_nothing`: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy.dialects.postgresql import insert + >>> insert_stmt = insert(my_table).values( + ... id='some_existing_id', + ... data='inserted value') + >>> do_nothing_stmt = insert_stmt.on_conflict_do_nothing( + ... index_elements=['id'] + ... ) + >>> print(do_nothing_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT (id) DO NOTHING + {stop} + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... constraint='pk_my_table', + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT ON CONSTRAINT pk_my_table DO UPDATE SET data = %(param_1)s + +.. seealso:: + + `INSERT .. ON CONFLICT + `_ + - in the PostgreSQL documentation. + +Specifying the Target +^^^^^^^^^^^^^^^^^^^^^ + +Both methods supply the "target" of the conflict using either the +named constraint or by column inference: + +* The :paramref:`_postgresql.Insert.on_conflict_do_update.index_elements` argument + specifies a sequence containing string column names, :class:`_schema.Column` + objects, and/or SQL expression elements, which would identify a unique + index: + + .. sourcecode:: pycon+sql + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT (id) DO UPDATE SET data = %(param_1)s + {stop} + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... index_elements=[my_table.c.id], + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT (id) DO UPDATE SET data = %(param_1)s + +* When using :paramref:`_postgresql.Insert.on_conflict_do_update.index_elements` to + infer an index, a partial index can be inferred by also specifying the + use the :paramref:`_postgresql.Insert.on_conflict_do_update.index_where` parameter: + + .. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(user_email='a@b.com', data='inserted data') + >>> stmt = stmt.on_conflict_do_update( + ... index_elements=[my_table.c.user_email], + ... index_where=my_table.c.user_email.like('%@gmail.com'), + ... set_=dict(data=stmt.excluded.data) + ... ) + >>> print(stmt) + {printsql}INSERT INTO my_table (data, user_email) + VALUES (%(data)s, %(user_email)s) ON CONFLICT (user_email) + WHERE user_email LIKE %(user_email_1)s DO UPDATE SET data = excluded.data + +* The :paramref:`_postgresql.Insert.on_conflict_do_update.constraint` argument is + used to specify an index directly rather than inferring it. This can be + the name of a UNIQUE constraint, a PRIMARY KEY constraint, or an INDEX: + + .. sourcecode:: pycon+sql + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... constraint='my_table_idx_1', + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT ON CONSTRAINT my_table_idx_1 DO UPDATE SET data = %(param_1)s + {stop} + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... constraint='my_table_pk', + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT ON CONSTRAINT my_table_pk DO UPDATE SET data = %(param_1)s + {stop} + +* The :paramref:`_postgresql.Insert.on_conflict_do_update.constraint` argument may + also refer to a SQLAlchemy construct representing a constraint, + e.g. :class:`.UniqueConstraint`, :class:`.PrimaryKeyConstraint`, + :class:`.Index`, or :class:`.ExcludeConstraint`. In this use, + if the constraint has a name, it is used directly. Otherwise, if the + constraint is unnamed, then inference will be used, where the expressions + and optional WHERE clause of the constraint will be spelled out in the + construct. This use is especially convenient + to refer to the named or unnamed primary key of a :class:`_schema.Table` + using the + :attr:`_schema.Table.primary_key` attribute: + + .. sourcecode:: pycon+sql + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... constraint=my_table.primary_key, + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT (id) DO UPDATE SET data = %(param_1)s + +The SET Clause +^^^^^^^^^^^^^^^ + +``ON CONFLICT...DO UPDATE`` is used to perform an update of the already +existing row, using any combination of new values as well as values +from the proposed insertion. These values are specified using the +:paramref:`_postgresql.Insert.on_conflict_do_update.set_` parameter. This +parameter accepts a dictionary which consists of direct values +for UPDATE: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> do_update_stmt = stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value') + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT (id) DO UPDATE SET data = %(param_1)s + +.. warning:: + + The :meth:`_expression.Insert.on_conflict_do_update` + method does **not** take into + account Python-side default UPDATE values or generation functions, e.g. + those specified using :paramref:`_schema.Column.onupdate`. + These values will not be exercised for an ON CONFLICT style of UPDATE, + unless they are manually specified in the + :paramref:`_postgresql.Insert.on_conflict_do_update.set_` dictionary. + +Updating using the Excluded INSERT Values +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In order to refer to the proposed insertion row, the special alias +:attr:`~.postgresql.Insert.excluded` is available as an attribute on +the :class:`_postgresql.Insert` object; this object is a +:class:`_expression.ColumnCollection` +which alias contains all columns of the target +table: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values( + ... id='some_id', + ... data='inserted value', + ... author='jlh' + ... ) + >>> do_update_stmt = stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value', author=stmt.excluded.author) + ... ) + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data, author) + VALUES (%(id)s, %(data)s, %(author)s) + ON CONFLICT (id) DO UPDATE SET data = %(param_1)s, author = excluded.author + +Additional WHERE Criteria +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The :meth:`_expression.Insert.on_conflict_do_update` method also accepts +a WHERE clause using the :paramref:`_postgresql.Insert.on_conflict_do_update.where` +parameter, which will limit those rows which receive an UPDATE: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values( + ... id='some_id', + ... data='inserted value', + ... author='jlh' + ... ) + >>> on_update_stmt = stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value', author=stmt.excluded.author), + ... where=(my_table.c.status == 2) + ... ) + >>> print(on_update_stmt) + {printsql}INSERT INTO my_table (id, data, author) + VALUES (%(id)s, %(data)s, %(author)s) + ON CONFLICT (id) DO UPDATE SET data = %(param_1)s, author = excluded.author + WHERE my_table.status = %(status_1)s + +Skipping Rows with DO NOTHING +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``ON CONFLICT`` may be used to skip inserting a row entirely +if any conflict with a unique or exclusion constraint occurs; below +this is illustrated using the +:meth:`~.postgresql.Insert.on_conflict_do_nothing` method: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = stmt.on_conflict_do_nothing(index_elements=['id']) + >>> print(stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT (id) DO NOTHING + +If ``DO NOTHING`` is used without specifying any columns or constraint, +it has the effect of skipping the INSERT for any unique or exclusion +constraint violation which occurs: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = stmt.on_conflict_do_nothing() + >>> print(stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) + ON CONFLICT DO NOTHING + +.. _postgresql_match: + +Full Text Search +---------------- + +PostgreSQL's full text search system is available through the use of the +:data:`.func` namespace, combined with the use of custom operators +via the :meth:`.Operators.bool_op` method. For simple cases with some +degree of cross-backend compatibility, the :meth:`.Operators.match` operator +may also be used. + +.. _postgresql_simple_match: + +Simple plain text matching with ``match()`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The :meth:`.Operators.match` operator provides for cross-compatible simple +text matching. For the PostgreSQL backend, it's hardcoded to generate +an expression using the ``@@`` operator in conjunction with the +``plainto_tsquery()`` PostgreSQL function. + +On the PostgreSQL dialect, an expression like the following:: + + select(sometable.c.text.match("search string")) + +would emit to the database:: + + SELECT text @@ plainto_tsquery('search string') FROM table + +Above, passing a plain string to :meth:`.Operators.match` will automatically +make use of ``plainto_tsquery()`` to specify the type of tsquery. This +establishes basic database cross-compatibility for :meth:`.Operators.match` +with other backends. + +.. versionchanged:: 2.0 The default tsquery generation function used by the + PostgreSQL dialect with :meth:`.Operators.match` is ``plainto_tsquery()``. + + To render exactly what was rendered in 1.4, use the following form:: + + from sqlalchemy import func + + select( + sometable.c.text.bool_op("@@")(func.to_tsquery("search string")) + ) + + Which would emit:: + + SELECT text @@ to_tsquery('search string') FROM table + +Using PostgreSQL full text functions and operators directly +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Text search operations beyond the simple use of :meth:`.Operators.match` +may make use of the :data:`.func` namespace to generate PostgreSQL full-text +functions, in combination with :meth:`.Operators.bool_op` to generate +any boolean operator. + +For example, the query:: + + select( + func.to_tsquery('cat').bool_op("@>")(func.to_tsquery('cat & rat')) + ) + +would generate: + +.. sourcecode:: sql + + SELECT to_tsquery('cat') @> to_tsquery('cat & rat') + + +The :class:`_postgresql.TSVECTOR` type can provide for explicit CAST:: + + from sqlalchemy.dialects.postgresql import TSVECTOR + from sqlalchemy import select, cast + select(cast("some text", TSVECTOR)) + +produces a statement equivalent to:: + + SELECT CAST('some text' AS TSVECTOR) AS anon_1 + +The ``func`` namespace is augmented by the PostgreSQL dialect to set up +correct argument and return types for most full text search functions. +These functions are used automatically by the :attr:`_sql.func` namespace +assuming the ``sqlalchemy.dialects.postgresql`` package has been imported, +or :func:`_sa.create_engine` has been invoked using a ``postgresql`` +dialect. These functions are documented at: + +* :class:`_postgresql.to_tsvector` +* :class:`_postgresql.to_tsquery` +* :class:`_postgresql.plainto_tsquery` +* :class:`_postgresql.phraseto_tsquery` +* :class:`_postgresql.websearch_to_tsquery` +* :class:`_postgresql.ts_headline` + +Specifying the "regconfig" with ``match()`` or custom operators +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL's ``plainto_tsquery()`` function accepts an optional +"regconfig" argument that is used to instruct PostgreSQL to use a +particular pre-computed GIN or GiST index in order to perform the search. +When using :meth:`.Operators.match`, this additional parameter may be +specified using the ``postgresql_regconfig`` parameter, such as:: + + select(mytable.c.id).where( + mytable.c.title.match('somestring', postgresql_regconfig='english') + ) + +Which would emit:: + + SELECT mytable.id FROM mytable + WHERE mytable.title @@ plainto_tsquery('english', 'somestring') + +When using other PostgreSQL search functions with :data:`.func`, the +"regconfig" parameter may be passed directly as the initial argument:: + + select(mytable.c.id).where( + func.to_tsvector("english", mytable.c.title).bool_op("@@")( + func.to_tsquery("english", "somestring") + ) + ) + +produces a statement equivalent to:: + + SELECT mytable.id FROM mytable + WHERE to_tsvector('english', mytable.title) @@ + to_tsquery('english', 'somestring') + +It is recommended that you use the ``EXPLAIN ANALYZE...`` tool from +PostgreSQL to ensure that you are generating queries with SQLAlchemy that +take full advantage of any indexes you may have created for full text search. + +.. seealso:: + + `Full Text Search `_ - in the PostgreSQL documentation + + +FROM ONLY ... +------------- + +The dialect supports PostgreSQL's ONLY keyword for targeting only a particular +table in an inheritance hierarchy. This can be used to produce the +``SELECT ... FROM ONLY``, ``UPDATE ONLY ...``, and ``DELETE FROM ONLY ...`` +syntaxes. It uses SQLAlchemy's hints mechanism:: + + # SELECT ... FROM ONLY ... + result = table.select().with_hint(table, 'ONLY', 'postgresql') + print(result.fetchall()) + + # UPDATE ONLY ... + table.update(values=dict(foo='bar')).with_hint('ONLY', + dialect_name='postgresql') + + # DELETE FROM ONLY ... + table.delete().with_hint('ONLY', dialect_name='postgresql') + + +.. _postgresql_indexes: + +PostgreSQL-Specific Index Options +--------------------------------- + +Several extensions to the :class:`.Index` construct are available, specific +to the PostgreSQL dialect. + +Covering Indexes +^^^^^^^^^^^^^^^^ + +The ``postgresql_include`` option renders INCLUDE(colname) for the given +string names:: + + Index("my_index", table.c.x, postgresql_include=['y']) + +would render the index as ``CREATE INDEX my_index ON table (x) INCLUDE (y)`` + +Note that this feature requires PostgreSQL 11 or later. + +.. versionadded:: 1.4 + +.. _postgresql_partial_indexes: + +Partial Indexes +^^^^^^^^^^^^^^^ + +Partial indexes add criterion to the index definition so that the index is +applied to a subset of rows. These can be specified on :class:`.Index` +using the ``postgresql_where`` keyword argument:: + + Index('my_index', my_table.c.id, postgresql_where=my_table.c.value > 10) + +.. _postgresql_operator_classes: + +Operator Classes +^^^^^^^^^^^^^^^^ + +PostgreSQL allows the specification of an *operator class* for each column of +an index (see +https://www.postgresql.org/docs/current/interactive/indexes-opclass.html). +The :class:`.Index` construct allows these to be specified via the +``postgresql_ops`` keyword argument:: + + Index( + 'my_index', my_table.c.id, my_table.c.data, + postgresql_ops={ + 'data': 'text_pattern_ops', + 'id': 'int4_ops' + }) + +Note that the keys in the ``postgresql_ops`` dictionaries are the +"key" name of the :class:`_schema.Column`, i.e. the name used to access it from +the ``.c`` collection of :class:`_schema.Table`, which can be configured to be +different than the actual name of the column as expressed in the database. + +If ``postgresql_ops`` is to be used against a complex SQL expression such +as a function call, then to apply to the column it must be given a label +that is identified in the dictionary by name, e.g.:: + + Index( + 'my_index', my_table.c.id, + func.lower(my_table.c.data).label('data_lower'), + postgresql_ops={ + 'data_lower': 'text_pattern_ops', + 'id': 'int4_ops' + }) + +Operator classes are also supported by the +:class:`_postgresql.ExcludeConstraint` construct using the +:paramref:`_postgresql.ExcludeConstraint.ops` parameter. See that parameter for +details. + +.. versionadded:: 1.3.21 added support for operator classes with + :class:`_postgresql.ExcludeConstraint`. + + +Index Types +^^^^^^^^^^^ + +PostgreSQL provides several index types: B-Tree, Hash, GiST, and GIN, as well +as the ability for users to create their own (see +https://www.postgresql.org/docs/current/static/indexes-types.html). These can be +specified on :class:`.Index` using the ``postgresql_using`` keyword argument:: + + Index('my_index', my_table.c.data, postgresql_using='gin') + +The value passed to the keyword argument will be simply passed through to the +underlying CREATE INDEX command, so it *must* be a valid index type for your +version of PostgreSQL. + +.. _postgresql_index_storage: + +Index Storage Parameters +^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL allows storage parameters to be set on indexes. The storage +parameters available depend on the index method used by the index. Storage +parameters can be specified on :class:`.Index` using the ``postgresql_with`` +keyword argument:: + + Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50}) + +PostgreSQL allows to define the tablespace in which to create the index. +The tablespace can be specified on :class:`.Index` using the +``postgresql_tablespace`` keyword argument:: + + Index('my_index', my_table.c.data, postgresql_tablespace='my_tablespace') + +Note that the same option is available on :class:`_schema.Table` as well. + +.. _postgresql_index_concurrently: + +Indexes with CONCURRENTLY +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The PostgreSQL index option CONCURRENTLY is supported by passing the +flag ``postgresql_concurrently`` to the :class:`.Index` construct:: + + tbl = Table('testtbl', m, Column('data', Integer)) + + idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) + +The above index construct will render DDL for CREATE INDEX, assuming +PostgreSQL 8.2 or higher is detected or for a connection-less dialect, as:: + + CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data) + +For DROP INDEX, assuming PostgreSQL 9.2 or higher is detected or for +a connection-less dialect, it will emit:: + + DROP INDEX CONCURRENTLY test_idx1 + +When using CONCURRENTLY, the PostgreSQL database requires that the statement +be invoked outside of a transaction block. The Python DBAPI enforces that +even for a single statement, a transaction is present, so to use this +construct, the DBAPI's "autocommit" mode must be used:: + + metadata = MetaData() + table = Table( + "foo", metadata, + Column("id", String)) + index = Index( + "foo_idx", table.c.id, postgresql_concurrently=True) + + with engine.connect() as conn: + with conn.execution_options(isolation_level='AUTOCOMMIT'): + table.create(conn) + +.. seealso:: + + :ref:`postgresql_isolation_level` + +.. _postgresql_index_reflection: + +PostgreSQL Index Reflection +--------------------------- + +The PostgreSQL database creates a UNIQUE INDEX implicitly whenever the +UNIQUE CONSTRAINT construct is used. When inspecting a table using +:class:`_reflection.Inspector`, the :meth:`_reflection.Inspector.get_indexes` +and the :meth:`_reflection.Inspector.get_unique_constraints` +will report on these +two constructs distinctly; in the case of the index, the key +``duplicates_constraint`` will be present in the index entry if it is +detected as mirroring a constraint. When performing reflection using +``Table(..., autoload_with=engine)``, the UNIQUE INDEX is **not** returned +in :attr:`_schema.Table.indexes` when it is detected as mirroring a +:class:`.UniqueConstraint` in the :attr:`_schema.Table.constraints` collection +. + +Special Reflection Options +-------------------------- + +The :class:`_reflection.Inspector` +used for the PostgreSQL backend is an instance +of :class:`.PGInspector`, which offers additional methods:: + + from sqlalchemy import create_engine, inspect + + engine = create_engine("postgresql+psycopg2://localhost/test") + insp = inspect(engine) # will be a PGInspector + + print(insp.get_enums()) + +.. autoclass:: PGInspector + :members: + +.. _postgresql_table_options: + +PostgreSQL Table Options +------------------------ + +Several options for CREATE TABLE are supported directly by the PostgreSQL +dialect in conjunction with the :class:`_schema.Table` construct: + +* ``INHERITS``:: + + Table("some_table", metadata, ..., postgresql_inherits="some_supertable") + + Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...)) + +* ``ON COMMIT``:: + + Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS') + +* ``PARTITION BY``:: + + Table("some_table", metadata, ..., + postgresql_partition_by='LIST (part_column)') + + .. versionadded:: 1.2.6 + +* ``TABLESPACE``:: + + Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') + + The above option is also available on the :class:`.Index` construct. + +* ``USING``:: + + Table("some_table", metadata, ..., postgresql_using='heap') + + .. versionadded:: 2.0.26 + +* ``WITH OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=True) + +* ``WITHOUT OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=False) + +.. seealso:: + + `PostgreSQL CREATE TABLE options + `_ - + in the PostgreSQL documentation. + +.. _postgresql_constraint_options: + +PostgreSQL Constraint Options +----------------------------- + +The following option(s) are supported by the PostgreSQL dialect in conjunction +with selected constraint constructs: + +* ``NOT VALID``: This option applies towards CHECK and FOREIGN KEY constraints + when the constraint is being added to an existing table via ALTER TABLE, + and has the effect that existing rows are not scanned during the ALTER + operation against the constraint being added. + + When using a SQL migration tool such as `Alembic `_ + that renders ALTER TABLE constructs, the ``postgresql_not_valid`` argument + may be specified as an additional keyword argument within the operation + that creates the constraint, as in the following Alembic example:: + + def update(): + op.create_foreign_key( + "fk_user_address", + "address", + "user", + ["user_id"], + ["id"], + postgresql_not_valid=True + ) + + The keyword is ultimately accepted directly by the + :class:`_schema.CheckConstraint`, :class:`_schema.ForeignKeyConstraint` + and :class:`_schema.ForeignKey` constructs; when using a tool like + Alembic, dialect-specific keyword arguments are passed through to + these constructs from the migration operation directives:: + + CheckConstraint("some_field IS NOT NULL", postgresql_not_valid=True) + + ForeignKeyConstraint(["some_id"], ["some_table.some_id"], postgresql_not_valid=True) + + .. versionadded:: 1.4.32 + + .. seealso:: + + `PostgreSQL ALTER TABLE options + `_ - + in the PostgreSQL documentation. + +.. _postgresql_table_valued_overview: + +Table values, Table and Column valued functions, Row and Tuple objects +----------------------------------------------------------------------- + +PostgreSQL makes great use of modern SQL forms such as table-valued functions, +tables and rows as values. These constructs are commonly used as part +of PostgreSQL's support for complex datatypes such as JSON, ARRAY, and other +datatypes. SQLAlchemy's SQL expression language has native support for +most table-valued and row-valued forms. + +.. _postgresql_table_valued: + +Table-Valued Functions +^^^^^^^^^^^^^^^^^^^^^^^ + +Many PostgreSQL built-in functions are intended to be used in the FROM clause +of a SELECT statement, and are capable of returning table rows or sets of table +rows. A large portion of PostgreSQL's JSON functions for example such as +``json_array_elements()``, ``json_object_keys()``, ``json_each_text()``, +``json_each()``, ``json_to_record()``, ``json_populate_recordset()`` use such +forms. These classes of SQL function calling forms in SQLAlchemy are available +using the :meth:`_functions.FunctionElement.table_valued` method in conjunction +with :class:`_functions.Function` objects generated from the :data:`_sql.func` +namespace. + +Examples from PostgreSQL's reference documentation follow below: + +* ``json_each()``: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import select, func + >>> stmt = select(func.json_each('{"a":"foo", "b":"bar"}').table_valued("key", "value")) + >>> print(stmt) + {printsql}SELECT anon_1.key, anon_1.value + FROM json_each(:json_each_1) AS anon_1 + +* ``json_populate_record()``: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import select, func, literal_column + >>> stmt = select( + ... func.json_populate_record( + ... literal_column("null::myrowtype"), + ... '{"a":1,"b":2}' + ... ).table_valued("a", "b", name="x") + ... ) + >>> print(stmt) + {printsql}SELECT x.a, x.b + FROM json_populate_record(null::myrowtype, :json_populate_record_1) AS x + +* ``json_to_record()`` - this form uses a PostgreSQL specific form of derived + columns in the alias, where we may make use of :func:`_sql.column` elements with + types to produce them. The :meth:`_functions.FunctionElement.table_valued` + method produces a :class:`_sql.TableValuedAlias` construct, and the method + :meth:`_sql.TableValuedAlias.render_derived` method sets up the derived + columns specification: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import select, func, column, Integer, Text + >>> stmt = select( + ... func.json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}').table_valued( + ... column("a", Integer), column("b", Text), column("d", Text), + ... ).render_derived(name="x", with_types=True) + ... ) + >>> print(stmt) + {printsql}SELECT x.a, x.b, x.d + FROM json_to_record(:json_to_record_1) AS x(a INTEGER, b TEXT, d TEXT) + +* ``WITH ORDINALITY`` - part of the SQL standard, ``WITH ORDINALITY`` adds an + ordinal counter to the output of a function and is accepted by a limited set + of PostgreSQL functions including ``unnest()`` and ``generate_series()``. The + :meth:`_functions.FunctionElement.table_valued` method accepts a keyword + parameter ``with_ordinality`` for this purpose, which accepts the string name + that will be applied to the "ordinality" column: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import select, func + >>> stmt = select( + ... func.generate_series(4, 1, -1). + ... table_valued("value", with_ordinality="ordinality"). + ... render_derived() + ... ) + >>> print(stmt) + {printsql}SELECT anon_1.value, anon_1.ordinality + FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) + WITH ORDINALITY AS anon_1(value, ordinality) + +.. versionadded:: 1.4.0b2 + +.. seealso:: + + :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial` + +.. _postgresql_column_valued: + +Column Valued Functions +^^^^^^^^^^^^^^^^^^^^^^^ + +Similar to the table valued function, a column valued function is present +in the FROM clause, but delivers itself to the columns clause as a single +scalar value. PostgreSQL functions such as ``json_array_elements()``, +``unnest()`` and ``generate_series()`` may use this form. Column valued functions are available using the +:meth:`_functions.FunctionElement.column_valued` method of :class:`_functions.FunctionElement`: + +* ``json_array_elements()``: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import select, func + >>> stmt = select(func.json_array_elements('["one", "two"]').column_valued("x")) + >>> print(stmt) + {printsql}SELECT x + FROM json_array_elements(:json_array_elements_1) AS x + +* ``unnest()`` - in order to generate a PostgreSQL ARRAY literal, the + :func:`_postgresql.array` construct may be used: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy.dialects.postgresql import array + >>> from sqlalchemy import select, func + >>> stmt = select(func.unnest(array([1, 2])).column_valued()) + >>> print(stmt) + {printsql}SELECT anon_1 + FROM unnest(ARRAY[%(param_1)s, %(param_2)s]) AS anon_1 + + The function can of course be used against an existing table-bound column + that's of type :class:`_types.ARRAY`: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import table, column, ARRAY, Integer + >>> from sqlalchemy import select, func + >>> t = table("t", column('value', ARRAY(Integer))) + >>> stmt = select(func.unnest(t.c.value).column_valued("unnested_value")) + >>> print(stmt) + {printsql}SELECT unnested_value + FROM unnest(t.value) AS unnested_value + +.. seealso:: + + :ref:`tutorial_functions_column_valued` - in the :ref:`unified_tutorial` + + +Row Types +^^^^^^^^^ + +Built-in support for rendering a ``ROW`` may be approximated using +``func.ROW`` with the :attr:`_sa.func` namespace, or by using the +:func:`_sql.tuple_` construct: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import table, column, func, tuple_ + >>> t = table("t", column("id"), column("fk")) + >>> stmt = t.select().where( + ... tuple_(t.c.id, t.c.fk) > (1,2) + ... ).where( + ... func.ROW(t.c.id, t.c.fk) < func.ROW(3, 7) + ... ) + >>> print(stmt) + {printsql}SELECT t.id, t.fk + FROM t + WHERE (t.id, t.fk) > (:param_1, :param_2) AND ROW(t.id, t.fk) < ROW(:ROW_1, :ROW_2) + +.. seealso:: + + `PostgreSQL Row Constructors + `_ + + `PostgreSQL Row Constructor Comparison + `_ + +Table Types passed to Functions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL supports passing a table as an argument to a function, which is +known as a "record" type. SQLAlchemy :class:`_sql.FromClause` objects +such as :class:`_schema.Table` support this special form using the +:meth:`_sql.FromClause.table_valued` method, which is comparable to the +:meth:`_functions.FunctionElement.table_valued` method except that the collection +of columns is already established by that of the :class:`_sql.FromClause` +itself: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import table, column, func, select + >>> a = table( "a", column("id"), column("x"), column("y")) + >>> stmt = select(func.row_to_json(a.table_valued())) + >>> print(stmt) + {printsql}SELECT row_to_json(a) AS row_to_json_1 + FROM a + +.. versionadded:: 1.4.0b2 + + + +""" # noqa: E501 + +from __future__ import annotations + +from collections import defaultdict +from functools import lru_cache +import re +from typing import Any +from typing import cast +from typing import List +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from . import arraylib as _array +from . import json as _json +from . import pg_catalog +from . import ranges as _ranges +from .ext import _regconfig_fn +from .ext import aggregate_order_by +from .hstore import HSTORE +from .named_types import CreateDomainType as CreateDomainType # noqa: F401 +from .named_types import CreateEnumType as CreateEnumType # noqa: F401 +from .named_types import DOMAIN as DOMAIN # noqa: F401 +from .named_types import DropDomainType as DropDomainType # noqa: F401 +from .named_types import DropEnumType as DropEnumType # noqa: F401 +from .named_types import ENUM as ENUM # noqa: F401 +from .named_types import NamedType as NamedType # noqa: F401 +from .types import _DECIMAL_TYPES # noqa: F401 +from .types import _FLOAT_TYPES # noqa: F401 +from .types import _INT_TYPES # noqa: F401 +from .types import BIT as BIT +from .types import BYTEA as BYTEA +from .types import CIDR as CIDR +from .types import CITEXT as CITEXT +from .types import INET as INET +from .types import INTERVAL as INTERVAL +from .types import MACADDR as MACADDR +from .types import MACADDR8 as MACADDR8 +from .types import MONEY as MONEY +from .types import OID as OID +from .types import PGBit as PGBit # noqa: F401 +from .types import PGCidr as PGCidr # noqa: F401 +from .types import PGInet as PGInet # noqa: F401 +from .types import PGInterval as PGInterval # noqa: F401 +from .types import PGMacAddr as PGMacAddr # noqa: F401 +from .types import PGMacAddr8 as PGMacAddr8 # noqa: F401 +from .types import PGUuid as PGUuid +from .types import REGCLASS as REGCLASS +from .types import REGCONFIG as REGCONFIG # noqa: F401 +from .types import TIME as TIME +from .types import TIMESTAMP as TIMESTAMP +from .types import TSVECTOR as TSVECTOR +from ... import exc +from ... import schema +from ... import select +from ... import sql +from ... import util +from ...engine import characteristics +from ...engine import default +from ...engine import interfaces +from ...engine import ObjectKind +from ...engine import ObjectScope +from ...engine import reflection +from ...engine import URL +from ...engine.reflection import ReflectionDefaults +from ...sql import bindparam +from ...sql import coercions +from ...sql import compiler +from ...sql import elements +from ...sql import expression +from ...sql import roles +from ...sql import sqltypes +from ...sql import util as sql_util +from ...sql.compiler import InsertmanyvaluesSentinelOpts +from ...sql.visitors import InternalTraversal +from ...types import BIGINT +from ...types import BOOLEAN +from ...types import CHAR +from ...types import DATE +from ...types import DOUBLE_PRECISION +from ...types import FLOAT +from ...types import INTEGER +from ...types import NUMERIC +from ...types import REAL +from ...types import SMALLINT +from ...types import TEXT +from ...types import UUID as UUID +from ...types import VARCHAR +from ...util.typing import TypedDict + +IDX_USING = re.compile(r"^(?:btree|hash|gist|gin|[\w_]+)$", re.I) + +RESERVED_WORDS = { + "all", + "analyse", + "analyze", + "and", + "any", + "array", + "as", + "asc", + "asymmetric", + "both", + "case", + "cast", + "check", + "collate", + "column", + "constraint", + "create", + "current_catalog", + "current_date", + "current_role", + "current_time", + "current_timestamp", + "current_user", + "default", + "deferrable", + "desc", + "distinct", + "do", + "else", + "end", + "except", + "false", + "fetch", + "for", + "foreign", + "from", + "grant", + "group", + "having", + "in", + "initially", + "intersect", + "into", + "leading", + "limit", + "localtime", + "localtimestamp", + "new", + "not", + "null", + "of", + "off", + "offset", + "old", + "on", + "only", + "or", + "order", + "placing", + "primary", + "references", + "returning", + "select", + "session_user", + "some", + "symmetric", + "table", + "then", + "to", + "trailing", + "true", + "union", + "unique", + "user", + "using", + "variadic", + "when", + "where", + "window", + "with", + "authorization", + "between", + "binary", + "cross", + "current_schema", + "freeze", + "full", + "ilike", + "inner", + "is", + "isnull", + "join", + "left", + "like", + "natural", + "notnull", + "outer", + "over", + "overlaps", + "right", + "similar", + "verbose", +} + +colspecs = { + sqltypes.ARRAY: _array.ARRAY, + sqltypes.Interval: INTERVAL, + sqltypes.Enum: ENUM, + sqltypes.JSON.JSONPathType: _json.JSONPATH, + sqltypes.JSON: _json.JSON, + sqltypes.Uuid: PGUuid, +} + + +ischema_names = { + "_array": _array.ARRAY, + "hstore": HSTORE, + "json": _json.JSON, + "jsonb": _json.JSONB, + "int4range": _ranges.INT4RANGE, + "int8range": _ranges.INT8RANGE, + "numrange": _ranges.NUMRANGE, + "daterange": _ranges.DATERANGE, + "tsrange": _ranges.TSRANGE, + "tstzrange": _ranges.TSTZRANGE, + "int4multirange": _ranges.INT4MULTIRANGE, + "int8multirange": _ranges.INT8MULTIRANGE, + "nummultirange": _ranges.NUMMULTIRANGE, + "datemultirange": _ranges.DATEMULTIRANGE, + "tsmultirange": _ranges.TSMULTIRANGE, + "tstzmultirange": _ranges.TSTZMULTIRANGE, + "integer": INTEGER, + "bigint": BIGINT, + "smallint": SMALLINT, + "character varying": VARCHAR, + "character": CHAR, + '"char"': sqltypes.String, + "name": sqltypes.String, + "text": TEXT, + "numeric": NUMERIC, + "float": FLOAT, + "real": REAL, + "inet": INET, + "cidr": CIDR, + "citext": CITEXT, + "uuid": UUID, + "bit": BIT, + "bit varying": BIT, + "macaddr": MACADDR, + "macaddr8": MACADDR8, + "money": MONEY, + "oid": OID, + "regclass": REGCLASS, + "double precision": DOUBLE_PRECISION, + "timestamp": TIMESTAMP, + "timestamp with time zone": TIMESTAMP, + "timestamp without time zone": TIMESTAMP, + "time with time zone": TIME, + "time without time zone": TIME, + "date": DATE, + "time": TIME, + "bytea": BYTEA, + "boolean": BOOLEAN, + "interval": INTERVAL, + "tsvector": TSVECTOR, +} + + +class PGCompiler(compiler.SQLCompiler): + def visit_to_tsvector_func(self, element, **kw): + return self._assert_pg_ts_ext(element, **kw) + + def visit_to_tsquery_func(self, element, **kw): + return self._assert_pg_ts_ext(element, **kw) + + def visit_plainto_tsquery_func(self, element, **kw): + return self._assert_pg_ts_ext(element, **kw) + + def visit_phraseto_tsquery_func(self, element, **kw): + return self._assert_pg_ts_ext(element, **kw) + + def visit_websearch_to_tsquery_func(self, element, **kw): + return self._assert_pg_ts_ext(element, **kw) + + def visit_ts_headline_func(self, element, **kw): + return self._assert_pg_ts_ext(element, **kw) + + def _assert_pg_ts_ext(self, element, **kw): + if not isinstance(element, _regconfig_fn): + # other options here include trying to rewrite the function + # with the correct types. however, that means we have to + # "un-SQL-ize" the first argument, which can't work in a + # generalized way. Also, parent compiler class has already added + # the incorrect return type to the result map. So let's just + # make sure the function we want is used up front. + + raise exc.CompileError( + f'Can\'t compile "{element.name}()" full text search ' + f"function construct that does not originate from the " + f'"sqlalchemy.dialects.postgresql" package. ' + f'Please ensure "import sqlalchemy.dialects.postgresql" is ' + f"called before constructing " + f'"sqlalchemy.func.{element.name}()" to ensure registration ' + f"of the correct argument and return types." + ) + + return f"{element.name}{self.function_argspec(element, **kw)}" + + def render_bind_cast(self, type_, dbapi_type, sqltext): + if dbapi_type._type_affinity is sqltypes.String and dbapi_type.length: + # use VARCHAR with no length for VARCHAR cast. + # see #9511 + dbapi_type = sqltypes.STRINGTYPE + return f"""{sqltext}::{ + self.dialect.type_compiler_instance.process( + dbapi_type, identifier_preparer=self.preparer + ) + }""" + + def visit_array(self, element, **kw): + return "ARRAY[%s]" % self.visit_clauselist(element, **kw) + + def visit_slice(self, element, **kw): + return "%s:%s" % ( + self.process(element.start, **kw), + self.process(element.stop, **kw), + ) + + def visit_bitwise_xor_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " # ", **kw) + + def visit_json_getitem_op_binary( + self, binary, operator, _cast_applied=False, **kw + ): + if ( + not _cast_applied + and binary.type._type_affinity is not sqltypes.JSON + ): + kw["_cast_applied"] = True + return self.process(sql.cast(binary, binary.type), **kw) + + kw["eager_grouping"] = True + + return self._generate_generic_binary( + binary, " -> " if not _cast_applied else " ->> ", **kw + ) + + def visit_json_path_getitem_op_binary( + self, binary, operator, _cast_applied=False, **kw + ): + if ( + not _cast_applied + and binary.type._type_affinity is not sqltypes.JSON + ): + kw["_cast_applied"] = True + return self.process(sql.cast(binary, binary.type), **kw) + + kw["eager_grouping"] = True + return self._generate_generic_binary( + binary, " #> " if not _cast_applied else " #>> ", **kw + ) + + def visit_getitem_binary(self, binary, operator, **kw): + return "%s[%s]" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_aggregate_order_by(self, element, **kw): + return "%s ORDER BY %s" % ( + self.process(element.target, **kw), + self.process(element.order_by, **kw), + ) + + def visit_match_op_binary(self, binary, operator, **kw): + if "postgresql_regconfig" in binary.modifiers: + regconfig = self.render_literal_value( + binary.modifiers["postgresql_regconfig"], sqltypes.STRINGTYPE + ) + if regconfig: + return "%s @@ plainto_tsquery(%s, %s)" % ( + self.process(binary.left, **kw), + regconfig, + self.process(binary.right, **kw), + ) + return "%s @@ plainto_tsquery(%s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_ilike_case_insensitive_operand(self, element, **kw): + return element.element._compiler_dispatch(self, **kw) + + def visit_ilike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + + return "%s ILIKE %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + ( + " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape is not None + else "" + ) + + def visit_not_ilike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return "%s NOT ILIKE %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + ( + " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape is not None + else "" + ) + + def _regexp_match(self, base_op, binary, operator, kw): + flags = binary.modifiers["flags"] + if flags is None: + return self._generate_generic_binary( + binary, " %s " % base_op, **kw + ) + if flags == "i": + return self._generate_generic_binary( + binary, " %s* " % base_op, **kw + ) + return "%s %s CONCAT('(?', %s, ')', %s)" % ( + self.process(binary.left, **kw), + base_op, + self.render_literal_value(flags, sqltypes.STRINGTYPE), + self.process(binary.right, **kw), + ) + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + return self._regexp_match("~", binary, operator, kw) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + return self._regexp_match("!~", binary, operator, kw) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + string = self.process(binary.left, **kw) + pattern_replace = self.process(binary.right, **kw) + flags = binary.modifiers["flags"] + if flags is None: + return "REGEXP_REPLACE(%s, %s)" % ( + string, + pattern_replace, + ) + else: + return "REGEXP_REPLACE(%s, %s, %s)" % ( + string, + pattern_replace, + self.render_literal_value(flags, sqltypes.STRINGTYPE), + ) + + def visit_empty_set_expr(self, element_types, **kw): + # cast the empty set to the type we are comparing against. if + # we are comparing against the null type, pick an arbitrary + # datatype for the empty set + return "SELECT %s WHERE 1!=1" % ( + ", ".join( + "CAST(NULL AS %s)" + % self.dialect.type_compiler_instance.process( + INTEGER() if type_._isnull else type_ + ) + for type_ in element_types or [INTEGER()] + ), + ) + + def render_literal_value(self, value, type_): + value = super().render_literal_value(value, type_) + + if self.dialect._backslash_escapes: + value = value.replace("\\", "\\\\") + return value + + def visit_aggregate_strings_func(self, fn, **kw): + return "string_agg%s" % self.function_argspec(fn) + + def visit_sequence(self, seq, **kw): + return "nextval('%s')" % self.preparer.format_sequence(seq) + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += " \n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += "\n LIMIT ALL" + text += " OFFSET " + self.process(select._offset_clause, **kw) + return text + + def format_from_hint_text(self, sqltext, table, hint, iscrud): + if hint.upper() != "ONLY": + raise exc.CompileError("Unrecognized hint: %r" % hint) + return "ONLY " + sqltext + + def get_select_precolumns(self, select, **kw): + # Do not call super().get_select_precolumns because + # it will warn/raise when distinct on is present + if select._distinct or select._distinct_on: + if select._distinct_on: + return ( + "DISTINCT ON (" + + ", ".join( + [ + self.process(col, **kw) + for col in select._distinct_on + ] + ) + + ") " + ) + else: + return "DISTINCT " + else: + return "" + + def for_update_clause(self, select, **kw): + if select._for_update_arg.read: + if select._for_update_arg.key_share: + tmp = " FOR KEY SHARE" + else: + tmp = " FOR SHARE" + elif select._for_update_arg.key_share: + tmp = " FOR NO KEY UPDATE" + else: + tmp = " FOR UPDATE" + + if select._for_update_arg.of: + tables = util.OrderedSet() + for c in select._for_update_arg.of: + tables.update(sql_util.surface_selectables_only(c)) + + tmp += " OF " + ", ".join( + self.process(table, ashint=True, use_schema=False, **kw) + for table in tables + ) + + if select._for_update_arg.nowait: + tmp += " NOWAIT" + if select._for_update_arg.skip_locked: + tmp += " SKIP LOCKED" + + return tmp + + def visit_substring_func(self, func, **kw): + s = self.process(func.clauses.clauses[0], **kw) + start = self.process(func.clauses.clauses[1], **kw) + if len(func.clauses.clauses) > 2: + length = self.process(func.clauses.clauses[2], **kw) + return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length) + else: + return "SUBSTRING(%s FROM %s)" % (s, start) + + def _on_conflict_target(self, clause, **kw): + if clause.constraint_target is not None: + # target may be a name of an Index, UniqueConstraint or + # ExcludeConstraint. While there is a separate + # "max_identifier_length" for indexes, PostgreSQL uses the same + # length for all objects so we can use + # truncate_and_render_constraint_name + target_text = ( + "ON CONSTRAINT %s" + % self.preparer.truncate_and_render_constraint_name( + clause.constraint_target + ) + ) + elif clause.inferred_target_elements is not None: + target_text = "(%s)" % ", ".join( + ( + self.preparer.quote(c) + if isinstance(c, str) + else self.process(c, include_table=False, use_schema=False) + ) + for c in clause.inferred_target_elements + ) + if clause.inferred_target_whereclause is not None: + target_text += " WHERE %s" % self.process( + clause.inferred_target_whereclause, + include_table=False, + use_schema=False, + ) + else: + target_text = "" + + return target_text + + def visit_on_conflict_do_nothing(self, on_conflict, **kw): + target_text = self._on_conflict_target(on_conflict, **kw) + + if target_text: + return "ON CONFLICT %s DO NOTHING" % target_text + else: + return "ON CONFLICT DO NOTHING" + + def visit_on_conflict_do_update(self, on_conflict, **kw): + clause = on_conflict + + target_text = self._on_conflict_target(on_conflict, **kw) + + action_set_ops = [] + + set_parameters = dict(clause.update_values_to_set) + # create a list of column assignment clauses as tuples + + insert_statement = self.stack[-1]["selectable"] + cols = insert_statement.table.c + for c in cols: + col_key = c.key + + if col_key in set_parameters: + value = set_parameters.pop(col_key) + elif c in set_parameters: + value = set_parameters.pop(c) + else: + continue + + if coercions._is_literal(value): + value = elements.BindParameter(None, value, type_=c.type) + + else: + if ( + isinstance(value, elements.BindParameter) + and value.type._isnull + ): + value = value._clone() + value.type = c.type + value_text = self.process(value.self_group(), use_schema=False) + + key_text = self.preparer.quote(c.name) + action_set_ops.append("%s = %s" % (key_text, value_text)) + + # check for names that don't match columns + if set_parameters: + util.warn( + "Additional column names not matching " + "any column keys in table '%s': %s" + % ( + self.current_executable.table.name, + (", ".join("'%s'" % c for c in set_parameters)), + ) + ) + for k, v in set_parameters.items(): + key_text = ( + self.preparer.quote(k) + if isinstance(k, str) + else self.process(k, use_schema=False) + ) + value_text = self.process( + coercions.expect(roles.ExpressionElementRole, v), + use_schema=False, + ) + action_set_ops.append("%s = %s" % (key_text, value_text)) + + action_text = ", ".join(action_set_ops) + if clause.update_whereclause is not None: + action_text += " WHERE %s" % self.process( + clause.update_whereclause, include_table=True, use_schema=False + ) + + return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text) + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + kw["asfrom"] = True + return "FROM " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def delete_extra_from_clause( + self, delete_stmt, from_table, extra_froms, from_hints, **kw + ): + """Render the DELETE .. USING clause specific to PostgreSQL.""" + kw["asfrom"] = True + return "USING " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def fetch_clause(self, select, **kw): + # pg requires parens for non literal clauses. It's also required for + # bind parameters if a ::type casts is used by the driver (asyncpg), + # so it's easiest to just always add it + text = "" + if select._offset_clause is not None: + text += "\n OFFSET (%s) ROWS" % self.process( + select._offset_clause, **kw + ) + if select._fetch_clause is not None: + text += "\n FETCH FIRST (%s)%s ROWS %s" % ( + self.process(select._fetch_clause, **kw), + " PERCENT" if select._fetch_clause_options["percent"] else "", + ( + "WITH TIES" + if select._fetch_clause_options["with_ties"] + else "ONLY" + ), + ) + return text + + +class PGDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): + colspec = self.preparer.format_column(column) + impl_type = column.type.dialect_impl(self.dialect) + if isinstance(impl_type, sqltypes.TypeDecorator): + impl_type = impl_type.impl + + has_identity = ( + column.identity is not None + and self.dialect.supports_identity_columns + ) + + if ( + column.primary_key + and column is column.table._autoincrement_column + and ( + self.dialect.supports_smallserial + or not isinstance(impl_type, sqltypes.SmallInteger) + ) + and not has_identity + and ( + column.default is None + or ( + isinstance(column.default, schema.Sequence) + and column.default.optional + ) + ) + ): + if isinstance(impl_type, sqltypes.BigInteger): + colspec += " BIGSERIAL" + elif isinstance(impl_type, sqltypes.SmallInteger): + colspec += " SMALLSERIAL" + else: + colspec += " SERIAL" + else: + colspec += " " + self.dialect.type_compiler_instance.process( + column.type, + type_expression=column, + identifier_preparer=self.preparer, + ) + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if column.computed is not None: + colspec += " " + self.process(column.computed) + if has_identity: + colspec += " " + self.process(column.identity) + + if not column.nullable and not has_identity: + colspec += " NOT NULL" + elif column.nullable and has_identity: + colspec += " NULL" + return colspec + + def _define_constraint_validity(self, constraint): + not_valid = constraint.dialect_options["postgresql"]["not_valid"] + return " NOT VALID" if not_valid else "" + + def visit_check_constraint(self, constraint, **kw): + if constraint._type_bound: + typ = list(constraint.columns)[0].type + if ( + isinstance(typ, sqltypes.ARRAY) + and isinstance(typ.item_type, sqltypes.Enum) + and not typ.item_type.native_enum + ): + raise exc.CompileError( + "PostgreSQL dialect cannot produce the CHECK constraint " + "for ARRAY of non-native ENUM; please specify " + "create_constraint=False on this Enum datatype." + ) + + text = super().visit_check_constraint(constraint) + text += self._define_constraint_validity(constraint) + return text + + def visit_foreign_key_constraint(self, constraint, **kw): + text = super().visit_foreign_key_constraint(constraint) + text += self._define_constraint_validity(constraint) + return text + + def visit_create_enum_type(self, create, **kw): + type_ = create.element + + return "CREATE TYPE %s AS ENUM (%s)" % ( + self.preparer.format_type(type_), + ", ".join( + self.sql_compiler.process(sql.literal(e), literal_binds=True) + for e in type_.enums + ), + ) + + def visit_drop_enum_type(self, drop, **kw): + type_ = drop.element + + return "DROP TYPE %s" % (self.preparer.format_type(type_)) + + def visit_create_domain_type(self, create, **kw): + domain: DOMAIN = create.element + + options = [] + if domain.collation is not None: + options.append(f"COLLATE {self.preparer.quote(domain.collation)}") + if domain.default is not None: + default = self.render_default_string(domain.default) + options.append(f"DEFAULT {default}") + if domain.constraint_name is not None: + name = self.preparer.truncate_and_render_constraint_name( + domain.constraint_name + ) + options.append(f"CONSTRAINT {name}") + if domain.not_null: + options.append("NOT NULL") + if domain.check is not None: + check = self.sql_compiler.process( + domain.check, include_table=False, literal_binds=True + ) + options.append(f"CHECK ({check})") + + return ( + f"CREATE DOMAIN {self.preparer.format_type(domain)} AS " + f"{self.type_compiler.process(domain.data_type)} " + f"{' '.join(options)}" + ) + + def visit_drop_domain_type(self, drop, **kw): + domain = drop.element + return f"DROP DOMAIN {self.preparer.format_type(domain)}" + + def visit_create_index(self, create, **kw): + preparer = self.preparer + index = create.element + self._verify_index_table(index) + text = "CREATE " + if index.unique: + text += "UNIQUE " + + text += "INDEX " + + if self.dialect._supports_create_index_concurrently: + concurrently = index.dialect_options["postgresql"]["concurrently"] + if concurrently: + text += "CONCURRENTLY " + + if create.if_not_exists: + text += "IF NOT EXISTS " + + text += "%s ON %s " % ( + self._prepared_index_name(index, include_schema=False), + preparer.format_table(index.table), + ) + + using = index.dialect_options["postgresql"]["using"] + if using: + text += ( + "USING %s " + % self.preparer.validate_sql_phrase(using, IDX_USING).lower() + ) + + ops = index.dialect_options["postgresql"]["ops"] + text += "(%s)" % ( + ", ".join( + [ + self.sql_compiler.process( + ( + expr.self_group() + if not isinstance(expr, expression.ColumnClause) + else expr + ), + include_table=False, + literal_binds=True, + ) + + ( + (" " + ops[expr.key]) + if hasattr(expr, "key") and expr.key in ops + else "" + ) + for expr in index.expressions + ] + ) + ) + + includeclause = index.dialect_options["postgresql"]["include"] + if includeclause: + inclusions = [ + index.table.c[col] if isinstance(col, str) else col + for col in includeclause + ] + text += " INCLUDE (%s)" % ", ".join( + [preparer.quote(c.name) for c in inclusions] + ) + + nulls_not_distinct = index.dialect_options["postgresql"][ + "nulls_not_distinct" + ] + if nulls_not_distinct is True: + text += " NULLS NOT DISTINCT" + elif nulls_not_distinct is False: + text += " NULLS DISTINCT" + + withclause = index.dialect_options["postgresql"]["with"] + if withclause: + text += " WITH (%s)" % ( + ", ".join( + [ + "%s = %s" % storage_parameter + for storage_parameter in withclause.items() + ] + ) + ) + + tablespace_name = index.dialect_options["postgresql"]["tablespace"] + if tablespace_name: + text += " TABLESPACE %s" % preparer.quote(tablespace_name) + + whereclause = index.dialect_options["postgresql"]["where"] + if whereclause is not None: + whereclause = coercions.expect( + roles.DDLExpressionRole, whereclause + ) + + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, literal_binds=True + ) + text += " WHERE " + where_compiled + + return text + + def define_unique_constraint_distinct(self, constraint, **kw): + nulls_not_distinct = constraint.dialect_options["postgresql"][ + "nulls_not_distinct" + ] + if nulls_not_distinct is True: + nulls_not_distinct_param = "NULLS NOT DISTINCT " + elif nulls_not_distinct is False: + nulls_not_distinct_param = "NULLS DISTINCT " + else: + nulls_not_distinct_param = "" + return nulls_not_distinct_param + + def visit_drop_index(self, drop, **kw): + index = drop.element + + text = "\nDROP INDEX " + + if self.dialect._supports_drop_index_concurrently: + concurrently = index.dialect_options["postgresql"]["concurrently"] + if concurrently: + text += "CONCURRENTLY " + + if drop.if_exists: + text += "IF EXISTS " + + text += self._prepared_index_name(index, include_schema=True) + return text + + def visit_exclude_constraint(self, constraint, **kw): + text = "" + if constraint.name is not None: + text += "CONSTRAINT %s " % self.preparer.format_constraint( + constraint + ) + elements = [] + kw["include_table"] = False + kw["literal_binds"] = True + for expr, name, op in constraint._render_exprs: + exclude_element = self.sql_compiler.process(expr, **kw) + ( + (" " + constraint.ops[expr.key]) + if hasattr(expr, "key") and expr.key in constraint.ops + else "" + ) + + elements.append("%s WITH %s" % (exclude_element, op)) + text += "EXCLUDE USING %s (%s)" % ( + self.preparer.validate_sql_phrase( + constraint.using, IDX_USING + ).lower(), + ", ".join(elements), + ) + if constraint.where is not None: + text += " WHERE (%s)" % self.sql_compiler.process( + constraint.where, literal_binds=True + ) + text += self.define_constraint_deferrability(constraint) + return text + + def post_create_table(self, table): + table_opts = [] + pg_opts = table.dialect_options["postgresql"] + + inherits = pg_opts.get("inherits") + if inherits is not None: + if not isinstance(inherits, (list, tuple)): + inherits = (inherits,) + table_opts.append( + "\n INHERITS ( " + + ", ".join(self.preparer.quote(name) for name in inherits) + + " )" + ) + + if pg_opts["partition_by"]: + table_opts.append("\n PARTITION BY %s" % pg_opts["partition_by"]) + + if pg_opts["using"]: + table_opts.append("\n USING %s" % pg_opts["using"]) + + if pg_opts["with_oids"] is True: + table_opts.append("\n WITH OIDS") + elif pg_opts["with_oids"] is False: + table_opts.append("\n WITHOUT OIDS") + + if pg_opts["on_commit"]: + on_commit_options = pg_opts["on_commit"].replace("_", " ").upper() + table_opts.append("\n ON COMMIT %s" % on_commit_options) + + if pg_opts["tablespace"]: + tablespace_name = pg_opts["tablespace"] + table_opts.append( + "\n TABLESPACE %s" % self.preparer.quote(tablespace_name) + ) + + return "".join(table_opts) + + def visit_computed_column(self, generated, **kw): + if generated.persisted is False: + raise exc.CompileError( + "PostrgreSQL computed columns do not support 'virtual' " + "persistence; set the 'persisted' flag to None or True for " + "PostgreSQL support." + ) + + return "GENERATED ALWAYS AS (%s) STORED" % self.sql_compiler.process( + generated.sqltext, include_table=False, literal_binds=True + ) + + def visit_create_sequence(self, create, **kw): + prefix = None + if create.element.data_type is not None: + prefix = " AS %s" % self.type_compiler.process( + create.element.data_type + ) + + return super().visit_create_sequence(create, prefix=prefix, **kw) + + def _can_comment_on_constraint(self, ddl_instance): + constraint = ddl_instance.element + if constraint.name is None: + raise exc.CompileError( + f"Can't emit COMMENT ON for constraint {constraint!r}: " + "it has no name" + ) + if constraint.table is None: + raise exc.CompileError( + f"Can't emit COMMENT ON for constraint {constraint!r}: " + "it has no associated table" + ) + + def visit_set_constraint_comment(self, create, **kw): + self._can_comment_on_constraint(create) + return "COMMENT ON CONSTRAINT %s ON %s IS %s" % ( + self.preparer.format_constraint(create.element), + self.preparer.format_table(create.element.table), + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.String() + ), + ) + + def visit_drop_constraint_comment(self, drop, **kw): + self._can_comment_on_constraint(drop) + return "COMMENT ON CONSTRAINT %s ON %s IS NULL" % ( + self.preparer.format_constraint(drop.element), + self.preparer.format_table(drop.element.table), + ) + + +class PGTypeCompiler(compiler.GenericTypeCompiler): + def visit_TSVECTOR(self, type_, **kw): + return "TSVECTOR" + + def visit_TSQUERY(self, type_, **kw): + return "TSQUERY" + + def visit_INET(self, type_, **kw): + return "INET" + + def visit_CIDR(self, type_, **kw): + return "CIDR" + + def visit_CITEXT(self, type_, **kw): + return "CITEXT" + + def visit_MACADDR(self, type_, **kw): + return "MACADDR" + + def visit_MACADDR8(self, type_, **kw): + return "MACADDR8" + + def visit_MONEY(self, type_, **kw): + return "MONEY" + + def visit_OID(self, type_, **kw): + return "OID" + + def visit_REGCONFIG(self, type_, **kw): + return "REGCONFIG" + + def visit_REGCLASS(self, type_, **kw): + return "REGCLASS" + + def visit_FLOAT(self, type_, **kw): + if not type_.precision: + return "FLOAT" + else: + return "FLOAT(%(precision)s)" % {"precision": type_.precision} + + def visit_double(self, type_, **kw): + return self.visit_DOUBLE_PRECISION(type, **kw) + + def visit_BIGINT(self, type_, **kw): + return "BIGINT" + + def visit_HSTORE(self, type_, **kw): + return "HSTORE" + + def visit_JSON(self, type_, **kw): + return "JSON" + + def visit_JSONB(self, type_, **kw): + return "JSONB" + + def visit_INT4MULTIRANGE(self, type_, **kw): + return "INT4MULTIRANGE" + + def visit_INT8MULTIRANGE(self, type_, **kw): + return "INT8MULTIRANGE" + + def visit_NUMMULTIRANGE(self, type_, **kw): + return "NUMMULTIRANGE" + + def visit_DATEMULTIRANGE(self, type_, **kw): + return "DATEMULTIRANGE" + + def visit_TSMULTIRANGE(self, type_, **kw): + return "TSMULTIRANGE" + + def visit_TSTZMULTIRANGE(self, type_, **kw): + return "TSTZMULTIRANGE" + + def visit_INT4RANGE(self, type_, **kw): + return "INT4RANGE" + + def visit_INT8RANGE(self, type_, **kw): + return "INT8RANGE" + + def visit_NUMRANGE(self, type_, **kw): + return "NUMRANGE" + + def visit_DATERANGE(self, type_, **kw): + return "DATERANGE" + + def visit_TSRANGE(self, type_, **kw): + return "TSRANGE" + + def visit_TSTZRANGE(self, type_, **kw): + return "TSTZRANGE" + + def visit_json_int_index(self, type_, **kw): + return "INT" + + def visit_json_str_index(self, type_, **kw): + return "TEXT" + + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) + + def visit_enum(self, type_, **kw): + if not type_.native_enum or not self.dialect.supports_native_enum: + return super().visit_enum(type_, **kw) + else: + return self.visit_ENUM(type_, **kw) + + def visit_ENUM(self, type_, identifier_preparer=None, **kw): + if identifier_preparer is None: + identifier_preparer = self.dialect.identifier_preparer + return identifier_preparer.format_type(type_) + + def visit_DOMAIN(self, type_, identifier_preparer=None, **kw): + if identifier_preparer is None: + identifier_preparer = self.dialect.identifier_preparer + return identifier_preparer.format_type(type_) + + def visit_TIMESTAMP(self, type_, **kw): + return "TIMESTAMP%s %s" % ( + ( + "(%d)" % type_.precision + if getattr(type_, "precision", None) is not None + else "" + ), + (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE", + ) + + def visit_TIME(self, type_, **kw): + return "TIME%s %s" % ( + ( + "(%d)" % type_.precision + if getattr(type_, "precision", None) is not None + else "" + ), + (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE", + ) + + def visit_INTERVAL(self, type_, **kw): + text = "INTERVAL" + if type_.fields is not None: + text += " " + type_.fields + if type_.precision is not None: + text += " (%d)" % type_.precision + return text + + def visit_BIT(self, type_, **kw): + if type_.varying: + compiled = "BIT VARYING" + if type_.length is not None: + compiled += "(%d)" % type_.length + else: + compiled = "BIT(%d)" % type_.length + return compiled + + def visit_uuid(self, type_, **kw): + if type_.native_uuid: + return self.visit_UUID(type_, **kw) + else: + return super().visit_uuid(type_, **kw) + + def visit_UUID(self, type_, **kw): + return "UUID" + + def visit_large_binary(self, type_, **kw): + return self.visit_BYTEA(type_, **kw) + + def visit_BYTEA(self, type_, **kw): + return "BYTEA" + + def visit_ARRAY(self, type_, **kw): + inner = self.process(type_.item_type, **kw) + return re.sub( + r"((?: COLLATE.*)?)$", + ( + r"%s\1" + % ( + "[]" + * (type_.dimensions if type_.dimensions is not None else 1) + ) + ), + inner, + count=1, + ) + + def visit_json_path(self, type_, **kw): + return self.visit_JSONPATH(type_, **kw) + + def visit_JSONPATH(self, type_, **kw): + return "JSONPATH" + + +class PGIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = RESERVED_WORDS + + def _unquote_identifier(self, value): + if value[0] == self.initial_quote: + value = value[1:-1].replace( + self.escape_to_quote, self.escape_quote + ) + return value + + def format_type(self, type_, use_schema=True): + if not type_.name: + raise exc.CompileError( + f"PostgreSQL {type_.__class__.__name__} type requires a name." + ) + + name = self.quote(type_.name) + effective_schema = self.schema_for_object(type_) + + if ( + not self.omit_schema + and use_schema + and effective_schema is not None + ): + name = f"{self.quote_schema(effective_schema)}.{name}" + return name + + +class ReflectedNamedType(TypedDict): + """Represents a reflected named type.""" + + name: str + """Name of the type.""" + schema: str + """The schema of the type.""" + visible: bool + """Indicates if this type is in the current search path.""" + + +class ReflectedDomainConstraint(TypedDict): + """Represents a reflect check constraint of a domain.""" + + name: str + """Name of the constraint.""" + check: str + """The check constraint text.""" + + +class ReflectedDomain(ReflectedNamedType): + """Represents a reflected enum.""" + + type: str + """The string name of the underlying data type of the domain.""" + nullable: bool + """Indicates if the domain allows null or not.""" + default: Optional[str] + """The string representation of the default value of this domain + or ``None`` if none present. + """ + constraints: List[ReflectedDomainConstraint] + """The constraints defined in the domain, if any. + The constraint are in order of evaluation by postgresql. + """ + collation: Optional[str] + """The collation for the domain.""" + + +class ReflectedEnum(ReflectedNamedType): + """Represents a reflected enum.""" + + labels: List[str] + """The labels that compose the enum.""" + + +class PGInspector(reflection.Inspector): + dialect: PGDialect + + def get_table_oid( + self, table_name: str, schema: Optional[str] = None + ) -> int: + """Return the OID for the given table name. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + """ + + with self._operation_context() as conn: + return self.dialect.get_table_oid( + conn, table_name, schema, info_cache=self.info_cache + ) + + def get_domains( + self, schema: Optional[str] = None + ) -> List[ReflectedDomain]: + """Return a list of DOMAIN objects. + + Each member is a dictionary containing these fields: + + * name - name of the domain + * schema - the schema name for the domain. + * visible - boolean, whether or not this domain is visible + in the default search path. + * type - the type defined by this domain. + * nullable - Indicates if this domain can be ``NULL``. + * default - The default value of the domain or ``None`` if the + domain has no default. + * constraints - A list of dict wit the constraint defined by this + domain. Each element constaints two keys: ``name`` of the + constraint and ``check`` with the constraint text. + + :param schema: schema name. If None, the default schema + (typically 'public') is used. May also be set to ``'*'`` to + indicate load domains for all schemas. + + .. versionadded:: 2.0 + + """ + with self._operation_context() as conn: + return self.dialect._load_domains( + conn, schema, info_cache=self.info_cache + ) + + def get_enums(self, schema: Optional[str] = None) -> List[ReflectedEnum]: + """Return a list of ENUM objects. + + Each member is a dictionary containing these fields: + + * name - name of the enum + * schema - the schema name for the enum. + * visible - boolean, whether or not this enum is visible + in the default search path. + * labels - a list of string labels that apply to the enum. + + :param schema: schema name. If None, the default schema + (typically 'public') is used. May also be set to ``'*'`` to + indicate load enums for all schemas. + + """ + with self._operation_context() as conn: + return self.dialect._load_enums( + conn, schema, info_cache=self.info_cache + ) + + def get_foreign_table_names( + self, schema: Optional[str] = None + ) -> List[str]: + """Return a list of FOREIGN TABLE names. + + Behavior is similar to that of + :meth:`_reflection.Inspector.get_table_names`, + except that the list is limited to those tables that report a + ``relkind`` value of ``f``. + + """ + with self._operation_context() as conn: + return self.dialect._get_foreign_table_names( + conn, schema, info_cache=self.info_cache + ) + + def has_type( + self, type_name: str, schema: Optional[str] = None, **kw: Any + ) -> bool: + """Return if the database has the specified type in the provided + schema. + + :param type_name: the type to check. + :param schema: schema name. If None, the default schema + (typically 'public') is used. May also be set to ``'*'`` to + check in all schemas. + + .. versionadded:: 2.0 + + """ + with self._operation_context() as conn: + return self.dialect.has_type( + conn, type_name, schema, info_cache=self.info_cache + ) + + +class PGExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): + return self._execute_scalar( + ( + "select nextval('%s')" + % self.identifier_preparer.format_sequence(seq) + ), + type_, + ) + + def get_insert_default(self, column): + if column.primary_key and column is column.table._autoincrement_column: + if column.server_default and column.server_default.has_argument: + # pre-execute passive defaults on primary key columns + return self._execute_scalar( + "select %s" % column.server_default.arg, column.type + ) + + elif column.default is None or ( + column.default.is_sequence and column.default.optional + ): + # execute the sequence associated with a SERIAL primary + # key column. for non-primary-key SERIAL, the ID just + # generates server side. + + try: + seq_name = column._postgresql_seq_name + except AttributeError: + tab = column.table.name + col = column.name + tab = tab[0 : 29 + max(0, (29 - len(col)))] + col = col[0 : 29 + max(0, (29 - len(tab)))] + name = "%s_%s_seq" % (tab, col) + column._postgresql_seq_name = seq_name = name + + if column.table is not None: + effective_schema = self.connection.schema_for_object( + column.table + ) + else: + effective_schema = None + + if effective_schema is not None: + exc = 'select nextval(\'"%s"."%s"\')' % ( + effective_schema, + seq_name, + ) + else: + exc = "select nextval('\"%s\"')" % (seq_name,) + + return self._execute_scalar(exc, column.type) + + return super().get_insert_default(column) + + +class PGReadOnlyConnectionCharacteristic( + characteristics.ConnectionCharacteristic +): + transactional = True + + def reset_characteristic(self, dialect, dbapi_conn): + dialect.set_readonly(dbapi_conn, False) + + def set_characteristic(self, dialect, dbapi_conn, value): + dialect.set_readonly(dbapi_conn, value) + + def get_characteristic(self, dialect, dbapi_conn): + return dialect.get_readonly(dbapi_conn) + + +class PGDeferrableConnectionCharacteristic( + characteristics.ConnectionCharacteristic +): + transactional = True + + def reset_characteristic(self, dialect, dbapi_conn): + dialect.set_deferrable(dbapi_conn, False) + + def set_characteristic(self, dialect, dbapi_conn, value): + dialect.set_deferrable(dbapi_conn, value) + + def get_characteristic(self, dialect, dbapi_conn): + return dialect.get_deferrable(dbapi_conn) + + +class PGDialect(default.DefaultDialect): + name = "postgresql" + supports_statement_cache = True + supports_alter = True + max_identifier_length = 63 + supports_sane_rowcount = True + + bind_typing = interfaces.BindTyping.RENDER_CASTS + + supports_native_enum = True + supports_native_boolean = True + supports_native_uuid = True + supports_smallserial = True + + supports_sequences = True + sequences_optional = True + preexecute_autoincrement_sequences = True + postfetch_lastrowid = False + use_insertmanyvalues = True + + returns_native_bytes = True + + insertmanyvalues_implicit_sentinel = ( + InsertmanyvaluesSentinelOpts.ANY_AUTOINCREMENT + | InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT + | InsertmanyvaluesSentinelOpts.RENDER_SELECT_COL_CASTS + ) + + supports_comments = True + supports_constraint_comments = True + supports_default_values = True + + supports_default_metavalue = True + + supports_empty_insert = False + supports_multivalues_insert = True + + supports_identity_columns = True + + default_paramstyle = "pyformat" + ischema_names = ischema_names + colspecs = colspecs + + statement_compiler = PGCompiler + ddl_compiler = PGDDLCompiler + type_compiler_cls = PGTypeCompiler + preparer = PGIdentifierPreparer + execution_ctx_cls = PGExecutionContext + inspector = PGInspector + + update_returning = True + delete_returning = True + insert_returning = True + update_returning_multifrom = True + delete_returning_multifrom = True + + connection_characteristics = ( + default.DefaultDialect.connection_characteristics + ) + connection_characteristics = connection_characteristics.union( + { + "postgresql_readonly": PGReadOnlyConnectionCharacteristic(), + "postgresql_deferrable": PGDeferrableConnectionCharacteristic(), + } + ) + + construct_arguments = [ + ( + schema.Index, + { + "using": False, + "include": None, + "where": None, + "ops": {}, + "concurrently": False, + "with": {}, + "tablespace": None, + "nulls_not_distinct": None, + }, + ), + ( + schema.Table, + { + "ignore_search_path": False, + "tablespace": None, + "partition_by": None, + "with_oids": None, + "on_commit": None, + "inherits": None, + "using": None, + }, + ), + ( + schema.CheckConstraint, + { + "not_valid": False, + }, + ), + ( + schema.ForeignKeyConstraint, + { + "not_valid": False, + }, + ), + ( + schema.UniqueConstraint, + {"nulls_not_distinct": None}, + ), + ] + + reflection_options = ("postgresql_ignore_search_path",) + + _backslash_escapes = True + _supports_create_index_concurrently = True + _supports_drop_index_concurrently = True + + def __init__( + self, + native_inet_types=None, + json_serializer=None, + json_deserializer=None, + **kwargs, + ): + default.DefaultDialect.__init__(self, **kwargs) + + self._native_inet_types = native_inet_types + self._json_deserializer = json_deserializer + self._json_serializer = json_serializer + + def initialize(self, connection): + super().initialize(connection) + + # https://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689 + self.supports_smallserial = self.server_version_info >= (9, 2) + + self._set_backslash_escapes(connection) + + self._supports_drop_index_concurrently = self.server_version_info >= ( + 9, + 2, + ) + self.supports_identity_columns = self.server_version_info >= (10,) + + def get_isolation_level_values(self, dbapi_conn): + # note the generic dialect doesn't have AUTOCOMMIT, however + # all postgresql dialects should include AUTOCOMMIT. + return ( + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + ) + + def set_isolation_level(self, dbapi_connection, level): + cursor = dbapi_connection.cursor() + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION " + f"ISOLATION LEVEL {level}" + ) + cursor.execute("COMMIT") + cursor.close() + + def get_isolation_level(self, dbapi_connection): + cursor = dbapi_connection.cursor() + cursor.execute("show transaction isolation level") + val = cursor.fetchone()[0] + cursor.close() + return val.upper() + + def set_readonly(self, connection, value): + raise NotImplementedError() + + def get_readonly(self, connection): + raise NotImplementedError() + + def set_deferrable(self, connection, value): + raise NotImplementedError() + + def get_deferrable(self, connection): + raise NotImplementedError() + + def _split_multihost_from_url(self, url: URL) -> Union[ + Tuple[None, None], + Tuple[Tuple[Optional[str], ...], Tuple[Optional[int], ...]], + ]: + hosts: Optional[Tuple[Optional[str], ...]] = None + ports_str: Union[str, Tuple[Optional[str], ...], None] = None + + integrated_multihost = False + + if "host" in url.query: + if isinstance(url.query["host"], (list, tuple)): + integrated_multihost = True + hosts, ports_str = zip( + *[ + token.split(":") if ":" in token else (token, None) + for token in url.query["host"] + ] + ) + + elif isinstance(url.query["host"], str): + hosts = tuple(url.query["host"].split(",")) + + if ( + "port" not in url.query + and len(hosts) == 1 + and ":" in hosts[0] + ): + # internet host is alphanumeric plus dots or hyphens. + # this is essentially rfc1123, which refers to rfc952. + # https://stackoverflow.com/questions/3523028/ + # valid-characters-of-a-hostname + host_port_match = re.match( + r"^([a-zA-Z0-9\-\.]*)(?:\:(\d*))?$", hosts[0] + ) + if host_port_match: + integrated_multihost = True + h, p = host_port_match.group(1, 2) + if TYPE_CHECKING: + assert isinstance(h, str) + assert isinstance(p, str) + hosts = (h,) + ports_str = cast( + "Tuple[Optional[str], ...]", (p,) if p else (None,) + ) + + if "port" in url.query: + if integrated_multihost: + raise exc.ArgumentError( + "Can't mix 'multihost' formats together; use " + '"host=h1,h2,h3&port=p1,p2,p3" or ' + '"host=h1:p1&host=h2:p2&host=h3:p3" separately' + ) + if isinstance(url.query["port"], (list, tuple)): + ports_str = url.query["port"] + elif isinstance(url.query["port"], str): + ports_str = tuple(url.query["port"].split(",")) + + ports: Optional[Tuple[Optional[int], ...]] = None + + if ports_str: + try: + ports = tuple(int(x) if x else None for x in ports_str) + except ValueError: + raise exc.ArgumentError( + f"Received non-integer port arguments: {ports_str}" + ) from None + + if ports and ( + (not hosts and len(ports) > 1) + or ( + hosts + and ports + and len(hosts) != len(ports) + and (len(hosts) > 1 or len(ports) > 1) + ) + ): + raise exc.ArgumentError("number of hosts and ports don't match") + + if hosts is not None: + if ports is None: + ports = tuple(None for _ in hosts) + + return hosts, ports # type: ignore + + def do_begin_twophase(self, connection, xid): + self.do_begin(connection.connection) + + def do_prepare_twophase(self, connection, xid): + connection.exec_driver_sql("PREPARE TRANSACTION '%s'" % xid) + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if is_prepared: + if recover: + # FIXME: ugly hack to get out of transaction + # context when committing recoverable transactions + # Must find out a way how to make the dbapi not + # open a transaction. + connection.exec_driver_sql("ROLLBACK") + connection.exec_driver_sql("ROLLBACK PREPARED '%s'" % xid) + connection.exec_driver_sql("BEGIN") + self.do_rollback(connection.connection) + else: + self.do_rollback(connection.connection) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if is_prepared: + if recover: + connection.exec_driver_sql("ROLLBACK") + connection.exec_driver_sql("COMMIT PREPARED '%s'" % xid) + connection.exec_driver_sql("BEGIN") + self.do_rollback(connection.connection) + else: + self.do_commit(connection.connection) + + def do_recover_twophase(self, connection): + return connection.scalars( + sql.text("SELECT gid FROM pg_prepared_xacts") + ).all() + + def _get_default_schema_name(self, connection): + return connection.exec_driver_sql("select current_schema()").scalar() + + @reflection.cache + def has_schema(self, connection, schema, **kw): + query = select(pg_catalog.pg_namespace.c.nspname).where( + pg_catalog.pg_namespace.c.nspname == schema + ) + return bool(connection.scalar(query)) + + def _pg_class_filter_scope_schema( + self, query, schema, scope, pg_class_table=None + ): + if pg_class_table is None: + pg_class_table = pg_catalog.pg_class + query = query.join( + pg_catalog.pg_namespace, + pg_catalog.pg_namespace.c.oid == pg_class_table.c.relnamespace, + ) + + if scope is ObjectScope.DEFAULT: + query = query.where(pg_class_table.c.relpersistence != "t") + elif scope is ObjectScope.TEMPORARY: + query = query.where(pg_class_table.c.relpersistence == "t") + + if schema is None: + query = query.where( + pg_catalog.pg_table_is_visible(pg_class_table.c.oid), + # ignore pg_catalog schema + pg_catalog.pg_namespace.c.nspname != "pg_catalog", + ) + else: + query = query.where(pg_catalog.pg_namespace.c.nspname == schema) + return query + + def _pg_class_relkind_condition(self, relkinds, pg_class_table=None): + if pg_class_table is None: + pg_class_table = pg_catalog.pg_class + # uses the any form instead of in otherwise postgresql complaings + # that 'IN could not convert type character to "char"' + return pg_class_table.c.relkind == sql.any_(_array.array(relkinds)) + + @lru_cache() + def _has_table_query(self, schema): + query = select(pg_catalog.pg_class.c.relname).where( + pg_catalog.pg_class.c.relname == bindparam("table_name"), + self._pg_class_relkind_condition( + pg_catalog.RELKINDS_ALL_TABLE_LIKE + ), + ) + return self._pg_class_filter_scope_schema( + query, schema, scope=ObjectScope.ANY + ) + + @reflection.cache + def has_table(self, connection, table_name, schema=None, **kw): + self._ensure_has_table_connection(connection) + query = self._has_table_query(schema) + return bool(connection.scalar(query, {"table_name": table_name})) + + @reflection.cache + def has_sequence(self, connection, sequence_name, schema=None, **kw): + query = select(pg_catalog.pg_class.c.relname).where( + pg_catalog.pg_class.c.relkind == "S", + pg_catalog.pg_class.c.relname == sequence_name, + ) + query = self._pg_class_filter_scope_schema( + query, schema, scope=ObjectScope.ANY + ) + return bool(connection.scalar(query)) + + @reflection.cache + def has_type(self, connection, type_name, schema=None, **kw): + query = ( + select(pg_catalog.pg_type.c.typname) + .join( + pg_catalog.pg_namespace, + pg_catalog.pg_namespace.c.oid + == pg_catalog.pg_type.c.typnamespace, + ) + .where(pg_catalog.pg_type.c.typname == type_name) + ) + if schema is None: + query = query.where( + pg_catalog.pg_type_is_visible(pg_catalog.pg_type.c.oid), + # ignore pg_catalog schema + pg_catalog.pg_namespace.c.nspname != "pg_catalog", + ) + elif schema != "*": + query = query.where(pg_catalog.pg_namespace.c.nspname == schema) + + return bool(connection.scalar(query)) + + def _get_server_version_info(self, connection): + v = connection.exec_driver_sql("select pg_catalog.version()").scalar() + m = re.match( + r".*(?:PostgreSQL|EnterpriseDB) " + r"(\d+)\.?(\d+)?(?:\.(\d+))?(?:\.\d+)?(?:devel|beta)?", + v, + ) + if not m: + raise AssertionError( + "Could not determine version from string '%s'" % v + ) + return tuple([int(x) for x in m.group(1, 2, 3) if x is not None]) + + @reflection.cache + def get_table_oid(self, connection, table_name, schema=None, **kw): + """Fetch the oid for schema.table_name.""" + query = select(pg_catalog.pg_class.c.oid).where( + pg_catalog.pg_class.c.relname == table_name, + self._pg_class_relkind_condition( + pg_catalog.RELKINDS_ALL_TABLE_LIKE + ), + ) + query = self._pg_class_filter_scope_schema( + query, schema, scope=ObjectScope.ANY + ) + table_oid = connection.scalar(query) + if table_oid is None: + raise exc.NoSuchTableError( + f"{schema}.{table_name}" if schema else table_name + ) + return table_oid + + @reflection.cache + def get_schema_names(self, connection, **kw): + query = ( + select(pg_catalog.pg_namespace.c.nspname) + .where(pg_catalog.pg_namespace.c.nspname.not_like("pg_%")) + .order_by(pg_catalog.pg_namespace.c.nspname) + ) + return connection.scalars(query).all() + + def _get_relnames_for_relkinds(self, connection, schema, relkinds, scope): + query = select(pg_catalog.pg_class.c.relname).where( + self._pg_class_relkind_condition(relkinds) + ) + query = self._pg_class_filter_scope_schema(query, schema, scope=scope) + return connection.scalars(query).all() + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + return self._get_relnames_for_relkinds( + connection, + schema, + pg_catalog.RELKINDS_TABLE_NO_FOREIGN, + scope=ObjectScope.DEFAULT, + ) + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + return self._get_relnames_for_relkinds( + connection, + schema=None, + relkinds=pg_catalog.RELKINDS_TABLE_NO_FOREIGN, + scope=ObjectScope.TEMPORARY, + ) + + @reflection.cache + def _get_foreign_table_names(self, connection, schema=None, **kw): + return self._get_relnames_for_relkinds( + connection, schema, relkinds=("f",), scope=ObjectScope.ANY + ) + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + return self._get_relnames_for_relkinds( + connection, + schema, + pg_catalog.RELKINDS_VIEW, + scope=ObjectScope.DEFAULT, + ) + + @reflection.cache + def get_materialized_view_names(self, connection, schema=None, **kw): + return self._get_relnames_for_relkinds( + connection, + schema, + pg_catalog.RELKINDS_MAT_VIEW, + scope=ObjectScope.DEFAULT, + ) + + @reflection.cache + def get_temp_view_names(self, connection, schema=None, **kw): + return self._get_relnames_for_relkinds( + connection, + schema, + # NOTE: do not include temp materialzied views (that do not + # seem to be a thing at least up to version 14) + pg_catalog.RELKINDS_VIEW, + scope=ObjectScope.TEMPORARY, + ) + + @reflection.cache + def get_sequence_names(self, connection, schema=None, **kw): + return self._get_relnames_for_relkinds( + connection, schema, relkinds=("S",), scope=ObjectScope.ANY + ) + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + query = ( + select(pg_catalog.pg_get_viewdef(pg_catalog.pg_class.c.oid)) + .select_from(pg_catalog.pg_class) + .where( + pg_catalog.pg_class.c.relname == view_name, + self._pg_class_relkind_condition( + pg_catalog.RELKINDS_VIEW + pg_catalog.RELKINDS_MAT_VIEW + ), + ) + ) + query = self._pg_class_filter_scope_schema( + query, schema, scope=ObjectScope.ANY + ) + res = connection.scalar(query) + if res is None: + raise exc.NoSuchTableError( + f"{schema}.{view_name}" if schema else view_name + ) + else: + return res + + def _value_or_raise(self, data, table, schema): + try: + return dict(data)[(schema, table)] + except KeyError: + raise exc.NoSuchTableError( + f"{schema}.{table}" if schema else table + ) from None + + def _prepare_filter_names(self, filter_names): + if filter_names: + return True, {"filter_names": filter_names} + else: + return False, {} + + def _kind_to_relkinds(self, kind: ObjectKind) -> Tuple[str, ...]: + if kind is ObjectKind.ANY: + return pg_catalog.RELKINDS_ALL_TABLE_LIKE + relkinds = () + if ObjectKind.TABLE in kind: + relkinds += pg_catalog.RELKINDS_TABLE + if ObjectKind.VIEW in kind: + relkinds += pg_catalog.RELKINDS_VIEW + if ObjectKind.MATERIALIZED_VIEW in kind: + relkinds += pg_catalog.RELKINDS_MAT_VIEW + return relkinds + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + data = self.get_multi_columns( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _columns_query(self, schema, has_filter_names, scope, kind): + # NOTE: the query with the default and identity options scalar + # subquery is faster than trying to use outer joins for them + generated = ( + pg_catalog.pg_attribute.c.attgenerated.label("generated") + if self.server_version_info >= (12,) + else sql.null().label("generated") + ) + if self.server_version_info >= (10,): + # join lateral performs worse (~2x slower) than a scalar_subquery + identity = ( + select( + sql.func.json_build_object( + "always", + pg_catalog.pg_attribute.c.attidentity == "a", + "start", + pg_catalog.pg_sequence.c.seqstart, + "increment", + pg_catalog.pg_sequence.c.seqincrement, + "minvalue", + pg_catalog.pg_sequence.c.seqmin, + "maxvalue", + pg_catalog.pg_sequence.c.seqmax, + "cache", + pg_catalog.pg_sequence.c.seqcache, + "cycle", + pg_catalog.pg_sequence.c.seqcycle, + ) + ) + .select_from(pg_catalog.pg_sequence) + .where( + # attidentity != '' is required or it will reflect also + # serial columns as identity. + pg_catalog.pg_attribute.c.attidentity != "", + pg_catalog.pg_sequence.c.seqrelid + == sql.cast( + sql.cast( + pg_catalog.pg_get_serial_sequence( + sql.cast( + sql.cast( + pg_catalog.pg_attribute.c.attrelid, + REGCLASS, + ), + TEXT, + ), + pg_catalog.pg_attribute.c.attname, + ), + REGCLASS, + ), + OID, + ), + ) + .correlate(pg_catalog.pg_attribute) + .scalar_subquery() + .label("identity_options") + ) + else: + identity = sql.null().label("identity_options") + + # join lateral performs the same as scalar_subquery here + default = ( + select( + pg_catalog.pg_get_expr( + pg_catalog.pg_attrdef.c.adbin, + pg_catalog.pg_attrdef.c.adrelid, + ) + ) + .select_from(pg_catalog.pg_attrdef) + .where( + pg_catalog.pg_attrdef.c.adrelid + == pg_catalog.pg_attribute.c.attrelid, + pg_catalog.pg_attrdef.c.adnum + == pg_catalog.pg_attribute.c.attnum, + pg_catalog.pg_attribute.c.atthasdef, + ) + .correlate(pg_catalog.pg_attribute) + .scalar_subquery() + .label("default") + ) + relkinds = self._kind_to_relkinds(kind) + query = ( + select( + pg_catalog.pg_attribute.c.attname.label("name"), + pg_catalog.format_type( + pg_catalog.pg_attribute.c.atttypid, + pg_catalog.pg_attribute.c.atttypmod, + ).label("format_type"), + default, + pg_catalog.pg_attribute.c.attnotnull.label("not_null"), + pg_catalog.pg_class.c.relname.label("table_name"), + pg_catalog.pg_description.c.description.label("comment"), + generated, + identity, + ) + .select_from(pg_catalog.pg_class) + # NOTE: postgresql support table with no user column, meaning + # there is no row with pg_attribute.attnum > 0. use a left outer + # join to avoid filtering these tables. + .outerjoin( + pg_catalog.pg_attribute, + sql.and_( + pg_catalog.pg_class.c.oid + == pg_catalog.pg_attribute.c.attrelid, + pg_catalog.pg_attribute.c.attnum > 0, + ~pg_catalog.pg_attribute.c.attisdropped, + ), + ) + .outerjoin( + pg_catalog.pg_description, + sql.and_( + pg_catalog.pg_description.c.objoid + == pg_catalog.pg_attribute.c.attrelid, + pg_catalog.pg_description.c.objsubid + == pg_catalog.pg_attribute.c.attnum, + ), + ) + .where(self._pg_class_relkind_condition(relkinds)) + .order_by( + pg_catalog.pg_class.c.relname, pg_catalog.pg_attribute.c.attnum + ) + ) + query = self._pg_class_filter_scope_schema(query, schema, scope=scope) + if has_filter_names: + query = query.where( + pg_catalog.pg_class.c.relname.in_(bindparam("filter_names")) + ) + return query + + def get_multi_columns( + self, connection, schema, filter_names, scope, kind, **kw + ): + has_filter_names, params = self._prepare_filter_names(filter_names) + query = self._columns_query(schema, has_filter_names, scope, kind) + rows = connection.execute(query, params).mappings() + + # dictionary with (name, ) if default search path or (schema, name) + # as keys + domains = { + ((d["schema"], d["name"]) if not d["visible"] else (d["name"],)): d + for d in self._load_domains( + connection, schema="*", info_cache=kw.get("info_cache") + ) + } + + # dictionary with (name, ) if default search path or (schema, name) + # as keys + enums = dict( + ( + ((rec["name"],), rec) + if rec["visible"] + else ((rec["schema"], rec["name"]), rec) + ) + for rec in self._load_enums( + connection, schema="*", info_cache=kw.get("info_cache") + ) + ) + + columns = self._get_columns_info(rows, domains, enums, schema) + + return columns.items() + + _format_type_args_pattern = re.compile(r"\((.*)\)") + _format_type_args_delim = re.compile(r"\s*,\s*") + _format_array_spec_pattern = re.compile(r"((?:\[\])*)$") + + def _reflect_type( + self, + format_type: Optional[str], + domains: dict[str, ReflectedDomain], + enums: dict[str, ReflectedEnum], + type_description: str, + ) -> sqltypes.TypeEngine[Any]: + """ + Attempts to reconstruct a column type defined in ischema_names based + on the information available in the format_type. + + If the `format_type` cannot be associated with a known `ischema_names`, + it is treated as a reference to a known PostgreSQL named `ENUM` or + `DOMAIN` type. + """ + type_description = type_description or "unknown type" + if format_type is None: + util.warn( + "PostgreSQL format_type() returned NULL for %s" + % type_description + ) + return sqltypes.NULLTYPE + + attype_args_match = self._format_type_args_pattern.search(format_type) + if attype_args_match and attype_args_match.group(1): + attype_args = self._format_type_args_delim.split( + attype_args_match.group(1) + ) + else: + attype_args = () + + match_array_dim = self._format_array_spec_pattern.search(format_type) + # Each "[]" in array specs corresponds to an array dimension + array_dim = len(match_array_dim.group(1) or "") // 2 + + # Remove all parameters and array specs from format_type to obtain an + # ischema_name candidate + attype = self._format_type_args_pattern.sub("", format_type) + attype = self._format_array_spec_pattern.sub("", attype) + + schema_type = self.ischema_names.get(attype.lower(), None) + args, kwargs = (), {} + + if attype == "numeric": + if len(attype_args) == 2: + precision, scale = map(int, attype_args) + args = (precision, scale) + + elif attype == "double precision": + args = (53,) + + elif attype == "integer": + args = () + + elif attype in ("timestamp with time zone", "time with time zone"): + kwargs["timezone"] = True + if len(attype_args) == 1: + kwargs["precision"] = int(attype_args[0]) + + elif attype in ( + "timestamp without time zone", + "time without time zone", + "time", + ): + kwargs["timezone"] = False + if len(attype_args) == 1: + kwargs["precision"] = int(attype_args[0]) + + elif attype == "bit varying": + kwargs["varying"] = True + if len(attype_args) == 1: + charlen = int(attype_args[0]) + args = (charlen,) + + elif attype.startswith("interval"): + schema_type = INTERVAL + + field_match = re.match(r"interval (.+)", attype) + if field_match: + kwargs["fields"] = field_match.group(1) + + if len(attype_args) == 1: + kwargs["precision"] = int(attype_args[0]) + + else: + enum_or_domain_key = tuple(util.quoted_token_parser(attype)) + + if enum_or_domain_key in enums: + schema_type = ENUM + enum = enums[enum_or_domain_key] + + args = tuple(enum["labels"]) + kwargs["name"] = enum["name"] + + if not enum["visible"]: + kwargs["schema"] = enum["schema"] + args = tuple(enum["labels"]) + elif enum_or_domain_key in domains: + schema_type = DOMAIN + domain = domains[enum_or_domain_key] + + data_type = self._reflect_type( + domain["type"], + domains, + enums, + type_description="DOMAIN '%s'" % domain["name"], + ) + args = (domain["name"], data_type) + + kwargs["collation"] = domain["collation"] + kwargs["default"] = domain["default"] + kwargs["not_null"] = not domain["nullable"] + kwargs["create_type"] = False + + if domain["constraints"]: + # We only support a single constraint + check_constraint = domain["constraints"][0] + + kwargs["constraint_name"] = check_constraint["name"] + kwargs["check"] = check_constraint["check"] + + if not domain["visible"]: + kwargs["schema"] = domain["schema"] + + else: + try: + charlen = int(attype_args[0]) + args = (charlen, *attype_args[1:]) + except (ValueError, IndexError): + args = attype_args + + if not schema_type: + util.warn( + "Did not recognize type '%s' of %s" + % (attype, type_description) + ) + return sqltypes.NULLTYPE + + data_type = schema_type(*args, **kwargs) + if array_dim >= 1: + # postgres does not preserve dimensionality or size of array types. + data_type = _array.ARRAY(data_type) + + return data_type + + def _get_columns_info(self, rows, domains, enums, schema): + columns = defaultdict(list) + for row_dict in rows: + # ensure that each table has an entry, even if it has no columns + if row_dict["name"] is None: + columns[(schema, row_dict["table_name"])] = ( + ReflectionDefaults.columns() + ) + continue + table_cols = columns[(schema, row_dict["table_name"])] + + coltype = self._reflect_type( + row_dict["format_type"], + domains, + enums, + type_description="column '%s'" % row_dict["name"], + ) + + default = row_dict["default"] + name = row_dict["name"] + generated = row_dict["generated"] + nullable = not row_dict["not_null"] + + if isinstance(coltype, DOMAIN): + if not default: + # domain can override the default value but + # cant set it to None + if coltype.default is not None: + default = coltype.default + + nullable = nullable and not coltype.not_null + + identity = row_dict["identity_options"] + + # If a zero byte or blank string depending on driver (is also + # absent for older PG versions), then not a generated column. + # Otherwise, s = stored. (Other values might be added in the + # future.) + if generated not in (None, "", b"\x00"): + computed = dict( + sqltext=default, persisted=generated in ("s", b"s") + ) + default = None + else: + computed = None + + # adjust the default value + autoincrement = False + if default is not None: + match = re.search(r"""(nextval\(')([^']+)('.*$)""", default) + if match is not None: + if issubclass(coltype._type_affinity, sqltypes.Integer): + autoincrement = True + # the default is related to a Sequence + if "." not in match.group(2) and schema is not None: + # unconditionally quote the schema name. this could + # later be enhanced to obey quoting rules / + # "quote schema" + default = ( + match.group(1) + + ('"%s"' % schema) + + "." + + match.group(2) + + match.group(3) + ) + + column_info = { + "name": name, + "type": coltype, + "nullable": nullable, + "default": default, + "autoincrement": autoincrement or identity is not None, + "comment": row_dict["comment"], + } + if computed is not None: + column_info["computed"] = computed + if identity is not None: + column_info["identity"] = identity + + table_cols.append(column_info) + + return columns + + @lru_cache() + def _table_oids_query(self, schema, has_filter_names, scope, kind): + relkinds = self._kind_to_relkinds(kind) + oid_q = select( + pg_catalog.pg_class.c.oid, pg_catalog.pg_class.c.relname + ).where(self._pg_class_relkind_condition(relkinds)) + oid_q = self._pg_class_filter_scope_schema(oid_q, schema, scope=scope) + + if has_filter_names: + oid_q = oid_q.where( + pg_catalog.pg_class.c.relname.in_(bindparam("filter_names")) + ) + return oid_q + + @reflection.flexi_cache( + ("schema", InternalTraversal.dp_string), + ("filter_names", InternalTraversal.dp_string_list), + ("kind", InternalTraversal.dp_plain_obj), + ("scope", InternalTraversal.dp_plain_obj), + ) + def _get_table_oids( + self, connection, schema, filter_names, scope, kind, **kw + ): + has_filter_names, params = self._prepare_filter_names(filter_names) + oid_q = self._table_oids_query(schema, has_filter_names, scope, kind) + result = connection.execute(oid_q, params) + return result.all() + + @lru_cache() + def _constraint_query(self, is_unique): + con_sq = ( + select( + pg_catalog.pg_constraint.c.conrelid, + pg_catalog.pg_constraint.c.conname, + pg_catalog.pg_constraint.c.conindid, + sql.func.unnest(pg_catalog.pg_constraint.c.conkey).label( + "attnum" + ), + sql.func.generate_subscripts( + pg_catalog.pg_constraint.c.conkey, 1 + ).label("ord"), + pg_catalog.pg_description.c.description, + ) + .outerjoin( + pg_catalog.pg_description, + pg_catalog.pg_description.c.objoid + == pg_catalog.pg_constraint.c.oid, + ) + .where( + pg_catalog.pg_constraint.c.contype == bindparam("contype"), + pg_catalog.pg_constraint.c.conrelid.in_(bindparam("oids")), + ) + .subquery("con") + ) + + attr_sq = ( + select( + con_sq.c.conrelid, + con_sq.c.conname, + con_sq.c.conindid, + con_sq.c.description, + con_sq.c.ord, + pg_catalog.pg_attribute.c.attname, + ) + .select_from(pg_catalog.pg_attribute) + .join( + con_sq, + sql.and_( + pg_catalog.pg_attribute.c.attnum == con_sq.c.attnum, + pg_catalog.pg_attribute.c.attrelid == con_sq.c.conrelid, + ), + ) + .where( + # NOTE: restate the condition here, since pg15 otherwise + # seems to get confused on pscopg2 sometimes, doing + # a sequential scan of pg_attribute. + # The condition in the con_sq subquery is not actually needed + # in pg15, but it may be needed in older versions. Keeping it + # does not seems to have any inpact in any case. + con_sq.c.conrelid.in_(bindparam("oids")) + ) + .subquery("attr") + ) + + constraint_query = ( + select( + attr_sq.c.conrelid, + sql.func.array_agg( + # NOTE: cast since some postgresql derivatives may + # not support array_agg on the name type + aggregate_order_by( + attr_sq.c.attname.cast(TEXT), attr_sq.c.ord + ) + ).label("cols"), + attr_sq.c.conname, + sql.func.min(attr_sq.c.description).label("description"), + ) + .group_by(attr_sq.c.conrelid, attr_sq.c.conname) + .order_by(attr_sq.c.conrelid, attr_sq.c.conname) + ) + + if is_unique: + if self.server_version_info >= (15,): + constraint_query = constraint_query.join( + pg_catalog.pg_index, + attr_sq.c.conindid == pg_catalog.pg_index.c.indexrelid, + ).add_columns( + sql.func.bool_and( + pg_catalog.pg_index.c.indnullsnotdistinct + ).label("indnullsnotdistinct") + ) + else: + constraint_query = constraint_query.add_columns( + sql.false().label("indnullsnotdistinct") + ) + else: + constraint_query = constraint_query.add_columns( + sql.null().label("extra") + ) + return constraint_query + + def _reflect_constraint( + self, connection, contype, schema, filter_names, scope, kind, **kw + ): + # used to reflect primary and unique constraint + table_oids = self._get_table_oids( + connection, schema, filter_names, scope, kind, **kw + ) + batches = list(table_oids) + is_unique = contype == "u" + + while batches: + batch = batches[0:3000] + batches[0:3000] = [] + + result = connection.execute( + self._constraint_query(is_unique), + {"oids": [r[0] for r in batch], "contype": contype}, + ) + + result_by_oid = defaultdict(list) + for oid, cols, constraint_name, comment, extra in result: + result_by_oid[oid].append( + (cols, constraint_name, comment, extra) + ) + + for oid, tablename in batch: + for_oid = result_by_oid.get(oid, ()) + if for_oid: + for cols, constraint, comment, extra in for_oid: + if is_unique: + yield tablename, cols, constraint, comment, { + "nullsnotdistinct": extra + } + else: + yield tablename, cols, constraint, comment, None + else: + yield tablename, None, None, None, None + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + data = self.get_multi_pk_constraint( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + def get_multi_pk_constraint( + self, connection, schema, filter_names, scope, kind, **kw + ): + result = self._reflect_constraint( + connection, "p", schema, filter_names, scope, kind, **kw + ) + + # only a single pk can be present for each table. Return an entry + # even if a table has no primary key + default = ReflectionDefaults.pk_constraint + return ( + ( + (schema, table_name), + ( + { + "constrained_columns": [] if cols is None else cols, + "name": pk_name, + "comment": comment, + } + if pk_name is not None + else default() + ), + ) + for table_name, cols, pk_name, comment, _ in result + ) + + @reflection.cache + def get_foreign_keys( + self, + connection, + table_name, + schema=None, + postgresql_ignore_search_path=False, + **kw, + ): + data = self.get_multi_foreign_keys( + connection, + schema=schema, + filter_names=[table_name], + postgresql_ignore_search_path=postgresql_ignore_search_path, + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _foreing_key_query(self, schema, has_filter_names, scope, kind): + pg_class_ref = pg_catalog.pg_class.alias("cls_ref") + pg_namespace_ref = pg_catalog.pg_namespace.alias("nsp_ref") + relkinds = self._kind_to_relkinds(kind) + query = ( + select( + pg_catalog.pg_class.c.relname, + pg_catalog.pg_constraint.c.conname, + # NOTE: avoid calling pg_get_constraintdef when not needed + # to speed up the query + sql.case( + ( + pg_catalog.pg_constraint.c.oid.is_not(None), + pg_catalog.pg_get_constraintdef( + pg_catalog.pg_constraint.c.oid, True + ), + ), + else_=None, + ), + pg_namespace_ref.c.nspname, + pg_catalog.pg_description.c.description, + ) + .select_from(pg_catalog.pg_class) + .outerjoin( + pg_catalog.pg_constraint, + sql.and_( + pg_catalog.pg_class.c.oid + == pg_catalog.pg_constraint.c.conrelid, + pg_catalog.pg_constraint.c.contype == "f", + ), + ) + .outerjoin( + pg_class_ref, + pg_class_ref.c.oid == pg_catalog.pg_constraint.c.confrelid, + ) + .outerjoin( + pg_namespace_ref, + pg_class_ref.c.relnamespace == pg_namespace_ref.c.oid, + ) + .outerjoin( + pg_catalog.pg_description, + pg_catalog.pg_description.c.objoid + == pg_catalog.pg_constraint.c.oid, + ) + .order_by( + pg_catalog.pg_class.c.relname, + pg_catalog.pg_constraint.c.conname, + ) + .where(self._pg_class_relkind_condition(relkinds)) + ) + query = self._pg_class_filter_scope_schema(query, schema, scope) + if has_filter_names: + query = query.where( + pg_catalog.pg_class.c.relname.in_(bindparam("filter_names")) + ) + return query + + @util.memoized_property + def _fk_regex_pattern(self): + # optionally quoted token + qtoken = '(?:"[^"]+"|[A-Za-z0-9_]+?)' + + # https://www.postgresql.org/docs/current/static/sql-createtable.html + return re.compile( + r"FOREIGN KEY \((.*?)\) " + rf"REFERENCES (?:({qtoken})\.)?({qtoken})\(((?:{qtoken}(?: *, *)?)+)\)" # noqa: E501 + r"[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?" + r"[\s]?(ON UPDATE " + r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?" + r"[\s]?(ON DELETE " + r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?" + r"[\s]?(DEFERRABLE|NOT DEFERRABLE)?" + r"[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?" + ) + + def get_multi_foreign_keys( + self, + connection, + schema, + filter_names, + scope, + kind, + postgresql_ignore_search_path=False, + **kw, + ): + preparer = self.identifier_preparer + + has_filter_names, params = self._prepare_filter_names(filter_names) + query = self._foreing_key_query(schema, has_filter_names, scope, kind) + result = connection.execute(query, params) + + FK_REGEX = self._fk_regex_pattern + + fkeys = defaultdict(list) + default = ReflectionDefaults.foreign_keys + for table_name, conname, condef, conschema, comment in result: + # ensure that each table has an entry, even if it has + # no foreign keys + if conname is None: + fkeys[(schema, table_name)] = default() + continue + table_fks = fkeys[(schema, table_name)] + m = re.search(FK_REGEX, condef).groups() + + ( + constrained_columns, + referred_schema, + referred_table, + referred_columns, + _, + match, + _, + onupdate, + _, + ondelete, + deferrable, + _, + initially, + ) = m + + if deferrable is not None: + deferrable = True if deferrable == "DEFERRABLE" else False + constrained_columns = [ + preparer._unquote_identifier(x) + for x in re.split(r"\s*,\s*", constrained_columns) + ] + + if postgresql_ignore_search_path: + # when ignoring search path, we use the actual schema + # provided it isn't the "default" schema + if conschema != self.default_schema_name: + referred_schema = conschema + else: + referred_schema = schema + elif referred_schema: + # referred_schema is the schema that we regexp'ed from + # pg_get_constraintdef(). If the schema is in the search + # path, pg_get_constraintdef() will give us None. + referred_schema = preparer._unquote_identifier(referred_schema) + elif schema is not None and schema == conschema: + # If the actual schema matches the schema of the table + # we're reflecting, then we will use that. + referred_schema = schema + + referred_table = preparer._unquote_identifier(referred_table) + referred_columns = [ + preparer._unquote_identifier(x) + for x in re.split(r"\s*,\s", referred_columns) + ] + options = { + k: v + for k, v in [ + ("onupdate", onupdate), + ("ondelete", ondelete), + ("initially", initially), + ("deferrable", deferrable), + ("match", match), + ] + if v is not None and v != "NO ACTION" + } + fkey_d = { + "name": conname, + "constrained_columns": constrained_columns, + "referred_schema": referred_schema, + "referred_table": referred_table, + "referred_columns": referred_columns, + "options": options, + "comment": comment, + } + table_fks.append(fkey_d) + return fkeys.items() + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + data = self.get_multi_indexes( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @util.memoized_property + def _index_query(self): + pg_class_index = pg_catalog.pg_class.alias("cls_idx") + # NOTE: repeating oids clause improve query performance + + # subquery to get the columns + idx_sq = ( + select( + pg_catalog.pg_index.c.indexrelid, + pg_catalog.pg_index.c.indrelid, + sql.func.unnest(pg_catalog.pg_index.c.indkey).label("attnum"), + sql.func.generate_subscripts( + pg_catalog.pg_index.c.indkey, 1 + ).label("ord"), + ) + .where( + ~pg_catalog.pg_index.c.indisprimary, + pg_catalog.pg_index.c.indrelid.in_(bindparam("oids")), + ) + .subquery("idx") + ) + + attr_sq = ( + select( + idx_sq.c.indexrelid, + idx_sq.c.indrelid, + idx_sq.c.ord, + # NOTE: always using pg_get_indexdef is too slow so just + # invoke when the element is an expression + sql.case( + ( + idx_sq.c.attnum == 0, + pg_catalog.pg_get_indexdef( + idx_sq.c.indexrelid, idx_sq.c.ord + 1, True + ), + ), + # NOTE: need to cast this since attname is of type "name" + # that's limited to 63 bytes, while pg_get_indexdef + # returns "text" so its output may get cut + else_=pg_catalog.pg_attribute.c.attname.cast(TEXT), + ).label("element"), + (idx_sq.c.attnum == 0).label("is_expr"), + ) + .select_from(idx_sq) + .outerjoin( + # do not remove rows where idx_sq.c.attnum is 0 + pg_catalog.pg_attribute, + sql.and_( + pg_catalog.pg_attribute.c.attnum == idx_sq.c.attnum, + pg_catalog.pg_attribute.c.attrelid == idx_sq.c.indrelid, + ), + ) + .where(idx_sq.c.indrelid.in_(bindparam("oids"))) + .subquery("idx_attr") + ) + + cols_sq = ( + select( + attr_sq.c.indexrelid, + sql.func.min(attr_sq.c.indrelid), + sql.func.array_agg( + aggregate_order_by(attr_sq.c.element, attr_sq.c.ord) + ).label("elements"), + sql.func.array_agg( + aggregate_order_by(attr_sq.c.is_expr, attr_sq.c.ord) + ).label("elements_is_expr"), + ) + .group_by(attr_sq.c.indexrelid) + .subquery("idx_cols") + ) + + if self.server_version_info >= (11, 0): + indnkeyatts = pg_catalog.pg_index.c.indnkeyatts + else: + indnkeyatts = sql.null().label("indnkeyatts") + + if self.server_version_info >= (15,): + nulls_not_distinct = pg_catalog.pg_index.c.indnullsnotdistinct + else: + nulls_not_distinct = sql.false().label("indnullsnotdistinct") + + return ( + select( + pg_catalog.pg_index.c.indrelid, + pg_class_index.c.relname.label("relname_index"), + pg_catalog.pg_index.c.indisunique, + pg_catalog.pg_constraint.c.conrelid.is_not(None).label( + "has_constraint" + ), + pg_catalog.pg_index.c.indoption, + pg_class_index.c.reloptions, + pg_catalog.pg_am.c.amname, + # NOTE: pg_get_expr is very fast so this case has almost no + # performance impact + sql.case( + ( + pg_catalog.pg_index.c.indpred.is_not(None), + pg_catalog.pg_get_expr( + pg_catalog.pg_index.c.indpred, + pg_catalog.pg_index.c.indrelid, + ), + ), + else_=None, + ).label("filter_definition"), + indnkeyatts, + nulls_not_distinct, + cols_sq.c.elements, + cols_sq.c.elements_is_expr, + ) + .select_from(pg_catalog.pg_index) + .where( + pg_catalog.pg_index.c.indrelid.in_(bindparam("oids")), + ~pg_catalog.pg_index.c.indisprimary, + ) + .join( + pg_class_index, + pg_catalog.pg_index.c.indexrelid == pg_class_index.c.oid, + ) + .join( + pg_catalog.pg_am, + pg_class_index.c.relam == pg_catalog.pg_am.c.oid, + ) + .outerjoin( + cols_sq, + pg_catalog.pg_index.c.indexrelid == cols_sq.c.indexrelid, + ) + .outerjoin( + pg_catalog.pg_constraint, + sql.and_( + pg_catalog.pg_index.c.indrelid + == pg_catalog.pg_constraint.c.conrelid, + pg_catalog.pg_index.c.indexrelid + == pg_catalog.pg_constraint.c.conindid, + pg_catalog.pg_constraint.c.contype + == sql.any_(_array.array(("p", "u", "x"))), + ), + ) + .order_by(pg_catalog.pg_index.c.indrelid, pg_class_index.c.relname) + ) + + def get_multi_indexes( + self, connection, schema, filter_names, scope, kind, **kw + ): + table_oids = self._get_table_oids( + connection, schema, filter_names, scope, kind, **kw + ) + + indexes = defaultdict(list) + default = ReflectionDefaults.indexes + + batches = list(table_oids) + + while batches: + batch = batches[0:3000] + batches[0:3000] = [] + + result = connection.execute( + self._index_query, {"oids": [r[0] for r in batch]} + ).mappings() + + result_by_oid = defaultdict(list) + for row_dict in result: + result_by_oid[row_dict["indrelid"]].append(row_dict) + + for oid, table_name in batch: + if oid not in result_by_oid: + # ensure that each table has an entry, even if reflection + # is skipped because not supported + indexes[(schema, table_name)] = default() + continue + + for row in result_by_oid[oid]: + index_name = row["relname_index"] + + table_indexes = indexes[(schema, table_name)] + + all_elements = row["elements"] + all_elements_is_expr = row["elements_is_expr"] + indnkeyatts = row["indnkeyatts"] + # "The number of key columns in the index, not counting any + # included columns, which are merely stored and do not + # participate in the index semantics" + if indnkeyatts and len(all_elements) > indnkeyatts: + # this is a "covering index" which has INCLUDE columns + # as well as regular index columns + inc_cols = all_elements[indnkeyatts:] + idx_elements = all_elements[:indnkeyatts] + idx_elements_is_expr = all_elements_is_expr[ + :indnkeyatts + ] + # postgresql does not support expression on included + # columns as of v14: "ERROR: expressions are not + # supported in included columns". + assert all( + not is_expr + for is_expr in all_elements_is_expr[indnkeyatts:] + ) + else: + idx_elements = all_elements + idx_elements_is_expr = all_elements_is_expr + inc_cols = [] + + index = {"name": index_name, "unique": row["indisunique"]} + if any(idx_elements_is_expr): + index["column_names"] = [ + None if is_expr else expr + for expr, is_expr in zip( + idx_elements, idx_elements_is_expr + ) + ] + index["expressions"] = idx_elements + else: + index["column_names"] = idx_elements + + sorting = {} + for col_index, col_flags in enumerate(row["indoption"]): + col_sorting = () + # try to set flags only if they differ from PG + # defaults... + if col_flags & 0x01: + col_sorting += ("desc",) + if not (col_flags & 0x02): + col_sorting += ("nulls_last",) + else: + if col_flags & 0x02: + col_sorting += ("nulls_first",) + if col_sorting: + sorting[idx_elements[col_index]] = col_sorting + if sorting: + index["column_sorting"] = sorting + if row["has_constraint"]: + index["duplicates_constraint"] = index_name + + dialect_options = {} + if row["reloptions"]: + dialect_options["postgresql_with"] = dict( + [option.split("=") for option in row["reloptions"]] + ) + # it *might* be nice to include that this is 'btree' in the + # reflection info. But we don't want an Index object + # to have a ``postgresql_using`` in it that is just the + # default, so for the moment leaving this out. + amname = row["amname"] + if amname != "btree": + dialect_options["postgresql_using"] = row["amname"] + if row["filter_definition"]: + dialect_options["postgresql_where"] = row[ + "filter_definition" + ] + if self.server_version_info >= (11,): + # NOTE: this is legacy, this is part of + # dialect_options now as of #7382 + index["include_columns"] = inc_cols + dialect_options["postgresql_include"] = inc_cols + if row["indnullsnotdistinct"]: + # the default is False, so ignore it. + dialect_options["postgresql_nulls_not_distinct"] = row[ + "indnullsnotdistinct" + ] + + if dialect_options: + index["dialect_options"] = dialect_options + + table_indexes.append(index) + return indexes.items() + + @reflection.cache + def get_unique_constraints( + self, connection, table_name, schema=None, **kw + ): + data = self.get_multi_unique_constraints( + connection, + schema=schema, + filter_names=[table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + def get_multi_unique_constraints( + self, + connection, + schema, + filter_names, + scope, + kind, + **kw, + ): + result = self._reflect_constraint( + connection, "u", schema, filter_names, scope, kind, **kw + ) + + # each table can have multiple unique constraints + uniques = defaultdict(list) + default = ReflectionDefaults.unique_constraints + for table_name, cols, con_name, comment, options in result: + # ensure a list is created for each table. leave it empty if + # the table has no unique cosntraint + if con_name is None: + uniques[(schema, table_name)] = default() + continue + + uc_dict = { + "column_names": cols, + "name": con_name, + "comment": comment, + } + if options: + if options["nullsnotdistinct"]: + uc_dict["dialect_options"] = { + "postgresql_nulls_not_distinct": options[ + "nullsnotdistinct" + ] + } + + uniques[(schema, table_name)].append(uc_dict) + return uniques.items() + + @reflection.cache + def get_table_comment(self, connection, table_name, schema=None, **kw): + data = self.get_multi_table_comment( + connection, + schema, + [table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _comment_query(self, schema, has_filter_names, scope, kind): + relkinds = self._kind_to_relkinds(kind) + query = ( + select( + pg_catalog.pg_class.c.relname, + pg_catalog.pg_description.c.description, + ) + .select_from(pg_catalog.pg_class) + .outerjoin( + pg_catalog.pg_description, + sql.and_( + pg_catalog.pg_class.c.oid + == pg_catalog.pg_description.c.objoid, + pg_catalog.pg_description.c.objsubid == 0, + ), + ) + .where(self._pg_class_relkind_condition(relkinds)) + ) + query = self._pg_class_filter_scope_schema(query, schema, scope) + if has_filter_names: + query = query.where( + pg_catalog.pg_class.c.relname.in_(bindparam("filter_names")) + ) + return query + + def get_multi_table_comment( + self, connection, schema, filter_names, scope, kind, **kw + ): + has_filter_names, params = self._prepare_filter_names(filter_names) + query = self._comment_query(schema, has_filter_names, scope, kind) + result = connection.execute(query, params) + + default = ReflectionDefaults.table_comment + return ( + ( + (schema, table), + {"text": comment} if comment is not None else default(), + ) + for table, comment in result + ) + + @reflection.cache + def get_check_constraints(self, connection, table_name, schema=None, **kw): + data = self.get_multi_check_constraints( + connection, + schema, + [table_name], + scope=ObjectScope.ANY, + kind=ObjectKind.ANY, + **kw, + ) + return self._value_or_raise(data, table_name, schema) + + @lru_cache() + def _check_constraint_query(self, schema, has_filter_names, scope, kind): + relkinds = self._kind_to_relkinds(kind) + query = ( + select( + pg_catalog.pg_class.c.relname, + pg_catalog.pg_constraint.c.conname, + # NOTE: avoid calling pg_get_constraintdef when not needed + # to speed up the query + sql.case( + ( + pg_catalog.pg_constraint.c.oid.is_not(None), + pg_catalog.pg_get_constraintdef( + pg_catalog.pg_constraint.c.oid, True + ), + ), + else_=None, + ), + pg_catalog.pg_description.c.description, + ) + .select_from(pg_catalog.pg_class) + .outerjoin( + pg_catalog.pg_constraint, + sql.and_( + pg_catalog.pg_class.c.oid + == pg_catalog.pg_constraint.c.conrelid, + pg_catalog.pg_constraint.c.contype == "c", + ), + ) + .outerjoin( + pg_catalog.pg_description, + pg_catalog.pg_description.c.objoid + == pg_catalog.pg_constraint.c.oid, + ) + .order_by( + pg_catalog.pg_class.c.relname, + pg_catalog.pg_constraint.c.conname, + ) + .where(self._pg_class_relkind_condition(relkinds)) + ) + query = self._pg_class_filter_scope_schema(query, schema, scope) + if has_filter_names: + query = query.where( + pg_catalog.pg_class.c.relname.in_(bindparam("filter_names")) + ) + return query + + def get_multi_check_constraints( + self, connection, schema, filter_names, scope, kind, **kw + ): + has_filter_names, params = self._prepare_filter_names(filter_names) + query = self._check_constraint_query( + schema, has_filter_names, scope, kind + ) + result = connection.execute(query, params) + + check_constraints = defaultdict(list) + default = ReflectionDefaults.check_constraints + for table_name, check_name, src, comment in result: + # only two cases for check_name and src: both null or both defined + if check_name is None and src is None: + check_constraints[(schema, table_name)] = default() + continue + # samples: + # "CHECK (((a > 1) AND (a < 5)))" + # "CHECK (((a = 1) OR ((a > 2) AND (a < 5))))" + # "CHECK (((a > 1) AND (a < 5))) NOT VALID" + # "CHECK (some_boolean_function(a))" + # "CHECK (((a\n < 1)\n OR\n (a\n >= 5))\n)" + # "CHECK (a NOT NULL) NO INHERIT" + # "CHECK (a NOT NULL) NO INHERIT NOT VALID" + + m = re.match( + r"^CHECK *\((.+)\)( NO INHERIT)?( NOT VALID)?$", + src, + flags=re.DOTALL, + ) + if not m: + util.warn("Could not parse CHECK constraint text: %r" % src) + sqltext = "" + else: + sqltext = re.compile( + r"^[\s\n]*\((.+)\)[\s\n]*$", flags=re.DOTALL + ).sub(r"\1", m.group(1)) + entry = { + "name": check_name, + "sqltext": sqltext, + "comment": comment, + } + if m: + do = {} + if " NOT VALID" in m.groups(): + do["not_valid"] = True + if " NO INHERIT" in m.groups(): + do["no_inherit"] = True + if do: + entry["dialect_options"] = do + + check_constraints[(schema, table_name)].append(entry) + return check_constraints.items() + + def _pg_type_filter_schema(self, query, schema): + if schema is None: + query = query.where( + pg_catalog.pg_type_is_visible(pg_catalog.pg_type.c.oid), + # ignore pg_catalog schema + pg_catalog.pg_namespace.c.nspname != "pg_catalog", + ) + elif schema != "*": + query = query.where(pg_catalog.pg_namespace.c.nspname == schema) + return query + + @lru_cache() + def _enum_query(self, schema): + lbl_agg_sq = ( + select( + pg_catalog.pg_enum.c.enumtypid, + sql.func.array_agg( + aggregate_order_by( + # NOTE: cast since some postgresql derivatives may + # not support array_agg on the name type + pg_catalog.pg_enum.c.enumlabel.cast(TEXT), + pg_catalog.pg_enum.c.enumsortorder, + ) + ).label("labels"), + ) + .group_by(pg_catalog.pg_enum.c.enumtypid) + .subquery("lbl_agg") + ) + + query = ( + select( + pg_catalog.pg_type.c.typname.label("name"), + pg_catalog.pg_type_is_visible(pg_catalog.pg_type.c.oid).label( + "visible" + ), + pg_catalog.pg_namespace.c.nspname.label("schema"), + lbl_agg_sq.c.labels.label("labels"), + ) + .join( + pg_catalog.pg_namespace, + pg_catalog.pg_namespace.c.oid + == pg_catalog.pg_type.c.typnamespace, + ) + .outerjoin( + lbl_agg_sq, pg_catalog.pg_type.c.oid == lbl_agg_sq.c.enumtypid + ) + .where(pg_catalog.pg_type.c.typtype == "e") + .order_by( + pg_catalog.pg_namespace.c.nspname, pg_catalog.pg_type.c.typname + ) + ) + + return self._pg_type_filter_schema(query, schema) + + @reflection.cache + def _load_enums(self, connection, schema=None, **kw): + if not self.supports_native_enum: + return [] + + result = connection.execute(self._enum_query(schema)) + + enums = [] + for name, visible, schema, labels in result: + enums.append( + { + "name": name, + "schema": schema, + "visible": visible, + "labels": [] if labels is None else labels, + } + ) + return enums + + @lru_cache() + def _domain_query(self, schema): + con_sq = ( + select( + pg_catalog.pg_constraint.c.contypid, + sql.func.array_agg( + pg_catalog.pg_get_constraintdef( + pg_catalog.pg_constraint.c.oid, True + ) + ).label("condefs"), + sql.func.array_agg( + # NOTE: cast since some postgresql derivatives may + # not support array_agg on the name type + pg_catalog.pg_constraint.c.conname.cast(TEXT) + ).label("connames"), + ) + # The domain this constraint is on; zero if not a domain constraint + .where(pg_catalog.pg_constraint.c.contypid != 0) + .group_by(pg_catalog.pg_constraint.c.contypid) + .subquery("domain_constraints") + ) + + query = ( + select( + pg_catalog.pg_type.c.typname.label("name"), + pg_catalog.format_type( + pg_catalog.pg_type.c.typbasetype, + pg_catalog.pg_type.c.typtypmod, + ).label("attype"), + (~pg_catalog.pg_type.c.typnotnull).label("nullable"), + pg_catalog.pg_type.c.typdefault.label("default"), + pg_catalog.pg_type_is_visible(pg_catalog.pg_type.c.oid).label( + "visible" + ), + pg_catalog.pg_namespace.c.nspname.label("schema"), + con_sq.c.condefs, + con_sq.c.connames, + pg_catalog.pg_collation.c.collname, + ) + .join( + pg_catalog.pg_namespace, + pg_catalog.pg_namespace.c.oid + == pg_catalog.pg_type.c.typnamespace, + ) + .outerjoin( + pg_catalog.pg_collation, + pg_catalog.pg_type.c.typcollation + == pg_catalog.pg_collation.c.oid, + ) + .outerjoin( + con_sq, + pg_catalog.pg_type.c.oid == con_sq.c.contypid, + ) + .where(pg_catalog.pg_type.c.typtype == "d") + .order_by( + pg_catalog.pg_namespace.c.nspname, pg_catalog.pg_type.c.typname + ) + ) + return self._pg_type_filter_schema(query, schema) + + @reflection.cache + def _load_domains(self, connection, schema=None, **kw): + result = connection.execute(self._domain_query(schema)) + + domains: List[ReflectedDomain] = [] + for domain in result.mappings(): + # strip (30) from character varying(30) + attype = re.search(r"([^\(]+)", domain["attype"]).group(1) + constraints: List[ReflectedDomainConstraint] = [] + if domain["connames"]: + # When a domain has multiple CHECK constraints, they will + # be tested in alphabetical order by name. + sorted_constraints = sorted( + zip(domain["connames"], domain["condefs"]), + key=lambda t: t[0], + ) + for name, def_ in sorted_constraints: + # constraint is in the form "CHECK (expression)". + # remove "CHECK (" and the tailing ")". + check = def_[7:-1] + constraints.append({"name": name, "check": check}) + + domain_rec: ReflectedDomain = { + "name": domain["name"], + "schema": domain["schema"], + "visible": domain["visible"], + "type": attype, + "nullable": domain["nullable"], + "default": domain["default"], + "constraints": constraints, + "collation": domain["collname"], + } + domains.append(domain_rec) + + return domains + + def _set_backslash_escapes(self, connection): + # this method is provided as an override hook for descendant + # dialects (e.g. Redshift), so removing it may break them + std_string = connection.exec_driver_sql( + "show standard_conforming_strings" + ).scalar() + self._backslash_escapes = std_string == "off" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/dml.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/dml.py new file mode 100644 index 00000000..4404ecd3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/dml.py @@ -0,0 +1,310 @@ +# dialects/postgresql/dml.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +from typing import Any +from typing import Optional + +from . import ext +from .._typing import _OnConflictConstraintT +from .._typing import _OnConflictIndexElementsT +from .._typing import _OnConflictIndexWhereT +from .._typing import _OnConflictSetT +from .._typing import _OnConflictWhereT +from ... import util +from ...sql import coercions +from ...sql import roles +from ...sql import schema +from ...sql._typing import _DMLTableArgument +from ...sql.base import _exclusive_against +from ...sql.base import _generative +from ...sql.base import ColumnCollection +from ...sql.base import ReadOnlyColumnCollection +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...sql.elements import KeyedColumnElement +from ...sql.expression import alias +from ...util.typing import Self + + +__all__ = ("Insert", "insert") + + +def insert(table: _DMLTableArgument) -> Insert: + """Construct a PostgreSQL-specific variant :class:`_postgresql.Insert` + construct. + + .. container:: inherited_member + + The :func:`sqlalchemy.dialects.postgresql.insert` function creates + a :class:`sqlalchemy.dialects.postgresql.Insert`. This class is based + on the dialect-agnostic :class:`_sql.Insert` construct which may + be constructed using the :func:`_sql.insert` function in + SQLAlchemy Core. + + The :class:`_postgresql.Insert` construct includes additional methods + :meth:`_postgresql.Insert.on_conflict_do_update`, + :meth:`_postgresql.Insert.on_conflict_do_nothing`. + + """ + return Insert(table) + + +class Insert(StandardInsert): + """PostgreSQL-specific implementation of INSERT. + + Adds methods for PG-specific syntaxes such as ON CONFLICT. + + The :class:`_postgresql.Insert` object is created using the + :func:`sqlalchemy.dialects.postgresql.insert` function. + + """ + + stringify_dialect = "postgresql" + inherit_cache = False + + @util.memoized_property + def excluded( + self, + ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]: + """Provide the ``excluded`` namespace for an ON CONFLICT statement + + PG's ON CONFLICT clause allows reference to the row that would + be inserted, known as ``excluded``. This attribute provides + all columns in this row to be referenceable. + + .. tip:: The :attr:`_postgresql.Insert.excluded` attribute is an + instance of :class:`_expression.ColumnCollection`, which provides + an interface the same as that of the :attr:`_schema.Table.c` + collection described at :ref:`metadata_tables_and_columns`. + With this collection, ordinary names are accessible like attributes + (e.g. ``stmt.excluded.some_column``), but special names and + dictionary method names should be accessed using indexed access, + such as ``stmt.excluded["column name"]`` or + ``stmt.excluded["values"]``. See the docstring for + :class:`_expression.ColumnCollection` for further examples. + + .. seealso:: + + :ref:`postgresql_insert_on_conflict` - example of how + to use :attr:`_expression.Insert.excluded` + + """ + return alias(self.table, name="excluded").columns + + _on_conflict_exclusive = _exclusive_against( + "_post_values_clause", + msgs={ + "_post_values_clause": "This Insert construct already has " + "an ON CONFLICT clause established" + }, + ) + + @_generative + @_on_conflict_exclusive + def on_conflict_do_update( + self, + constraint: _OnConflictConstraintT = None, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + set_: _OnConflictSetT = None, + where: _OnConflictWhereT = None, + ) -> Self: + r""" + Specifies a DO UPDATE SET action for ON CONFLICT clause. + + Either the ``constraint`` or ``index_elements`` argument is + required, but only one of these can be specified. + + :param constraint: + The name of a unique or exclusion constraint on the table, + or the constraint object itself if it has a .name attribute. + + :param index_elements: + A sequence consisting of string column names, :class:`_schema.Column` + objects, or other column expression objects that will be used + to infer a target index. + + :param index_where: + Additional WHERE criterion that can be used to infer a + conditional target index. + + :param set\_: + A dictionary or other mapping object + where the keys are either names of columns in the target table, + or :class:`_schema.Column` objects or other ORM-mapped columns + matching that of the target table, and expressions or literals + as values, specifying the ``SET`` actions to take. + + .. versionadded:: 1.4 The + :paramref:`_postgresql.Insert.on_conflict_do_update.set_` + parameter supports :class:`_schema.Column` objects from the target + :class:`_schema.Table` as keys. + + .. warning:: This dictionary does **not** take into account + Python-specified default UPDATE values or generation functions, + e.g. those specified using :paramref:`_schema.Column.onupdate`. + These values will not be exercised for an ON CONFLICT style of + UPDATE, unless they are manually specified in the + :paramref:`.Insert.on_conflict_do_update.set_` dictionary. + + :param where: + Optional argument. If present, can be a literal SQL + string or an acceptable expression for a ``WHERE`` clause + that restricts the rows affected by ``DO UPDATE SET``. Rows + not meeting the ``WHERE`` condition will not be updated + (effectively a ``DO NOTHING`` for those rows). + + + .. seealso:: + + :ref:`postgresql_insert_on_conflict` + + """ + self._post_values_clause = OnConflictDoUpdate( + constraint, index_elements, index_where, set_, where + ) + return self + + @_generative + @_on_conflict_exclusive + def on_conflict_do_nothing( + self, + constraint: _OnConflictConstraintT = None, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + ) -> Self: + """ + Specifies a DO NOTHING action for ON CONFLICT clause. + + The ``constraint`` and ``index_elements`` arguments + are optional, but only one of these can be specified. + + :param constraint: + The name of a unique or exclusion constraint on the table, + or the constraint object itself if it has a .name attribute. + + :param index_elements: + A sequence consisting of string column names, :class:`_schema.Column` + objects, or other column expression objects that will be used + to infer a target index. + + :param index_where: + Additional WHERE criterion that can be used to infer a + conditional target index. + + .. seealso:: + + :ref:`postgresql_insert_on_conflict` + + """ + self._post_values_clause = OnConflictDoNothing( + constraint, index_elements, index_where + ) + return self + + +class OnConflictClause(ClauseElement): + stringify_dialect = "postgresql" + + constraint_target: Optional[str] + inferred_target_elements: _OnConflictIndexElementsT + inferred_target_whereclause: _OnConflictIndexWhereT + + def __init__( + self, + constraint: _OnConflictConstraintT = None, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + ): + if constraint is not None: + if not isinstance(constraint, str) and isinstance( + constraint, + (schema.Constraint, ext.ExcludeConstraint), + ): + constraint = getattr(constraint, "name") or constraint + + if constraint is not None: + if index_elements is not None: + raise ValueError( + "'constraint' and 'index_elements' are mutually exclusive" + ) + + if isinstance(constraint, str): + self.constraint_target = constraint + self.inferred_target_elements = None + self.inferred_target_whereclause = None + elif isinstance(constraint, schema.Index): + index_elements = constraint.expressions + index_where = constraint.dialect_options["postgresql"].get( + "where" + ) + elif isinstance(constraint, ext.ExcludeConstraint): + index_elements = constraint.columns + index_where = constraint.where + else: + index_elements = constraint.columns + index_where = constraint.dialect_options["postgresql"].get( + "where" + ) + + if index_elements is not None: + self.constraint_target = None + self.inferred_target_elements = index_elements + self.inferred_target_whereclause = index_where + elif constraint is None: + self.constraint_target = self.inferred_target_elements = ( + self.inferred_target_whereclause + ) = None + + +class OnConflictDoNothing(OnConflictClause): + __visit_name__ = "on_conflict_do_nothing" + + +class OnConflictDoUpdate(OnConflictClause): + __visit_name__ = "on_conflict_do_update" + + def __init__( + self, + constraint: _OnConflictConstraintT = None, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + set_: _OnConflictSetT = None, + where: _OnConflictWhereT = None, + ): + super().__init__( + constraint=constraint, + index_elements=index_elements, + index_where=index_where, + ) + + if ( + self.inferred_target_elements is None + and self.constraint_target is None + ): + raise ValueError( + "Either constraint or index_elements, " + "but not both, must be specified unless DO NOTHING" + ) + + if isinstance(set_, dict): + if not set_: + raise ValueError("set parameter dictionary must not be empty") + elif isinstance(set_, ColumnCollection): + set_ = dict(set_) + else: + raise ValueError( + "set parameter must be a non-empty dictionary " + "or a ColumnCollection such as the `.c.` collection " + "of a Table object" + ) + self.update_values_to_set = [ + (coercions.expect(roles.DMLColumnRole, key), value) + for key, value in set_.items() + ] + self.update_whereclause = where diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/ext.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/ext.py new file mode 100644 index 00000000..7fc08953 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/ext.py @@ -0,0 +1,496 @@ +# dialects/postgresql/ext.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from __future__ import annotations + +from typing import Any +from typing import TYPE_CHECKING +from typing import TypeVar + +from . import types +from .array import ARRAY +from ...sql import coercions +from ...sql import elements +from ...sql import expression +from ...sql import functions +from ...sql import roles +from ...sql import schema +from ...sql.schema import ColumnCollectionConstraint +from ...sql.sqltypes import TEXT +from ...sql.visitors import InternalTraversal + +_T = TypeVar("_T", bound=Any) + +if TYPE_CHECKING: + from ...sql.visitors import _TraverseInternalsType + + +class aggregate_order_by(expression.ColumnElement): + """Represent a PostgreSQL aggregate order by expression. + + E.g.:: + + from sqlalchemy.dialects.postgresql import aggregate_order_by + expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc())) + stmt = select(expr) + + would represent the expression:: + + SELECT array_agg(a ORDER BY b DESC) FROM table; + + Similarly:: + + expr = func.string_agg( + table.c.a, + aggregate_order_by(literal_column("','"), table.c.a) + ) + stmt = select(expr) + + Would represent:: + + SELECT string_agg(a, ',' ORDER BY a) FROM table; + + .. versionchanged:: 1.2.13 - the ORDER BY argument may be multiple terms + + .. seealso:: + + :class:`_functions.array_agg` + + """ + + __visit_name__ = "aggregate_order_by" + + stringify_dialect = "postgresql" + _traverse_internals: _TraverseInternalsType = [ + ("target", InternalTraversal.dp_clauseelement), + ("type", InternalTraversal.dp_type), + ("order_by", InternalTraversal.dp_clauseelement), + ] + + def __init__(self, target, *order_by): + self.target = coercions.expect(roles.ExpressionElementRole, target) + self.type = self.target.type + + _lob = len(order_by) + if _lob == 0: + raise TypeError("at least one ORDER BY element is required") + elif _lob == 1: + self.order_by = coercions.expect( + roles.ExpressionElementRole, order_by[0] + ) + else: + self.order_by = elements.ClauseList( + *order_by, _literal_as_text_role=roles.ExpressionElementRole + ) + + def self_group(self, against=None): + return self + + def get_children(self, **kwargs): + return self.target, self.order_by + + def _copy_internals(self, clone=elements._clone, **kw): + self.target = clone(self.target, **kw) + self.order_by = clone(self.order_by, **kw) + + @property + def _from_objects(self): + return self.target._from_objects + self.order_by._from_objects + + +class ExcludeConstraint(ColumnCollectionConstraint): + """A table-level EXCLUDE constraint. + + Defines an EXCLUDE constraint as described in the `PostgreSQL + documentation`__. + + __ https://www.postgresql.org/docs/current/static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE + + """ # noqa + + __visit_name__ = "exclude_constraint" + + where = None + inherit_cache = False + + create_drop_stringify_dialect = "postgresql" + + @elements._document_text_coercion( + "where", + ":class:`.ExcludeConstraint`", + ":paramref:`.ExcludeConstraint.where`", + ) + def __init__(self, *elements, **kw): + r""" + Create an :class:`.ExcludeConstraint` object. + + E.g.:: + + const = ExcludeConstraint( + (Column('period'), '&&'), + (Column('group'), '='), + where=(Column('group') != 'some group'), + ops={'group': 'my_operator_class'} + ) + + The constraint is normally embedded into the :class:`_schema.Table` + construct + directly, or added later using :meth:`.append_constraint`:: + + some_table = Table( + 'some_table', metadata, + Column('id', Integer, primary_key=True), + Column('period', TSRANGE()), + Column('group', String) + ) + + some_table.append_constraint( + ExcludeConstraint( + (some_table.c.period, '&&'), + (some_table.c.group, '='), + where=some_table.c.group != 'some group', + name='some_table_excl_const', + ops={'group': 'my_operator_class'} + ) + ) + + The exclude constraint defined in this example requires the + ``btree_gist`` extension, that can be created using the + command ``CREATE EXTENSION btree_gist;``. + + :param \*elements: + + A sequence of two tuples of the form ``(column, operator)`` where + "column" is either a :class:`_schema.Column` object, or a SQL + expression element (e.g. ``func.int8range(table.from, table.to)``) + or the name of a column as string, and "operator" is a string + containing the operator to use (e.g. `"&&"` or `"="`). + + In order to specify a column name when a :class:`_schema.Column` + object is not available, while ensuring + that any necessary quoting rules take effect, an ad-hoc + :class:`_schema.Column` or :func:`_expression.column` + object should be used. + The ``column`` may also be a string SQL expression when + passed as :func:`_expression.literal_column` or + :func:`_expression.text` + + :param name: + Optional, the in-database name of this constraint. + + :param deferrable: + Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + + :param initially: + Optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + + :param using: + Optional string. If set, emit USING when issuing DDL + for this constraint. Defaults to 'gist'. + + :param where: + Optional SQL expression construct or literal SQL string. + If set, emit WHERE when issuing DDL + for this constraint. + + :param ops: + Optional dictionary. Used to define operator classes for the + elements; works the same way as that of the + :ref:`postgresql_ops ` + parameter specified to the :class:`_schema.Index` construct. + + .. versionadded:: 1.3.21 + + .. seealso:: + + :ref:`postgresql_operator_classes` - general description of how + PostgreSQL operator classes are specified. + + """ + columns = [] + render_exprs = [] + self.operators = {} + + expressions, operators = zip(*elements) + + for (expr, column, strname, add_element), operator in zip( + coercions.expect_col_expression_collection( + roles.DDLConstraintColumnRole, expressions + ), + operators, + ): + if add_element is not None: + columns.append(add_element) + + name = column.name if column is not None else strname + + if name is not None: + # backwards compat + self.operators[name] = operator + + render_exprs.append((expr, name, operator)) + + self._render_exprs = render_exprs + + ColumnCollectionConstraint.__init__( + self, + *columns, + name=kw.get("name"), + deferrable=kw.get("deferrable"), + initially=kw.get("initially"), + ) + self.using = kw.get("using", "gist") + where = kw.get("where") + if where is not None: + self.where = coercions.expect(roles.StatementOptionRole, where) + + self.ops = kw.get("ops", {}) + + def _set_parent(self, table, **kw): + super()._set_parent(table) + + self._render_exprs = [ + ( + expr if not isinstance(expr, str) else table.c[expr], + name, + operator, + ) + for expr, name, operator in (self._render_exprs) + ] + + def _copy(self, target_table=None, **kw): + elements = [ + ( + schema._copy_expression(expr, self.parent, target_table), + operator, + ) + for expr, _, operator in self._render_exprs + ] + c = self.__class__( + *elements, + name=self.name, + deferrable=self.deferrable, + initially=self.initially, + where=self.where, + using=self.using, + ) + c.dispatch._update(self.dispatch) + return c + + +def array_agg(*arg, **kw): + """PostgreSQL-specific form of :class:`_functions.array_agg`, ensures + return type is :class:`_postgresql.ARRAY` and not + the plain :class:`_types.ARRAY`, unless an explicit ``type_`` + is passed. + + """ + kw["_default_array_type"] = ARRAY + return functions.func.array_agg(*arg, **kw) + + +class _regconfig_fn(functions.GenericFunction[_T]): + inherit_cache = True + + def __init__(self, *args, **kwargs): + args = list(args) + if len(args) > 1: + initial_arg = coercions.expect( + roles.ExpressionElementRole, + args.pop(0), + name=getattr(self, "name", None), + apply_propagate_attrs=self, + type_=types.REGCONFIG, + ) + initial_arg = [initial_arg] + else: + initial_arg = [] + + addtl_args = [ + coercions.expect( + roles.ExpressionElementRole, + c, + name=getattr(self, "name", None), + apply_propagate_attrs=self, + ) + for c in args + ] + super().__init__(*(initial_arg + addtl_args), **kwargs) + + +class to_tsvector(_regconfig_fn): + """The PostgreSQL ``to_tsvector`` SQL function. + + This function applies automatic casting of the REGCONFIG argument + to use the :class:`_postgresql.REGCONFIG` datatype automatically, + and applies a return type of :class:`_postgresql.TSVECTOR`. + + Assuming the PostgreSQL dialect has been imported, either by invoking + ``from sqlalchemy.dialects import postgresql``, or by creating a PostgreSQL + engine using ``create_engine("postgresql...")``, + :class:`_postgresql.to_tsvector` will be used automatically when invoking + ``sqlalchemy.func.to_tsvector()``, ensuring the correct argument and return + type handlers are used at compile and execution time. + + .. versionadded:: 2.0.0rc1 + + """ + + inherit_cache = True + type = types.TSVECTOR + + +class to_tsquery(_regconfig_fn): + """The PostgreSQL ``to_tsquery`` SQL function. + + This function applies automatic casting of the REGCONFIG argument + to use the :class:`_postgresql.REGCONFIG` datatype automatically, + and applies a return type of :class:`_postgresql.TSQUERY`. + + Assuming the PostgreSQL dialect has been imported, either by invoking + ``from sqlalchemy.dialects import postgresql``, or by creating a PostgreSQL + engine using ``create_engine("postgresql...")``, + :class:`_postgresql.to_tsquery` will be used automatically when invoking + ``sqlalchemy.func.to_tsquery()``, ensuring the correct argument and return + type handlers are used at compile and execution time. + + .. versionadded:: 2.0.0rc1 + + """ + + inherit_cache = True + type = types.TSQUERY + + +class plainto_tsquery(_regconfig_fn): + """The PostgreSQL ``plainto_tsquery`` SQL function. + + This function applies automatic casting of the REGCONFIG argument + to use the :class:`_postgresql.REGCONFIG` datatype automatically, + and applies a return type of :class:`_postgresql.TSQUERY`. + + Assuming the PostgreSQL dialect has been imported, either by invoking + ``from sqlalchemy.dialects import postgresql``, or by creating a PostgreSQL + engine using ``create_engine("postgresql...")``, + :class:`_postgresql.plainto_tsquery` will be used automatically when + invoking ``sqlalchemy.func.plainto_tsquery()``, ensuring the correct + argument and return type handlers are used at compile and execution time. + + .. versionadded:: 2.0.0rc1 + + """ + + inherit_cache = True + type = types.TSQUERY + + +class phraseto_tsquery(_regconfig_fn): + """The PostgreSQL ``phraseto_tsquery`` SQL function. + + This function applies automatic casting of the REGCONFIG argument + to use the :class:`_postgresql.REGCONFIG` datatype automatically, + and applies a return type of :class:`_postgresql.TSQUERY`. + + Assuming the PostgreSQL dialect has been imported, either by invoking + ``from sqlalchemy.dialects import postgresql``, or by creating a PostgreSQL + engine using ``create_engine("postgresql...")``, + :class:`_postgresql.phraseto_tsquery` will be used automatically when + invoking ``sqlalchemy.func.phraseto_tsquery()``, ensuring the correct + argument and return type handlers are used at compile and execution time. + + .. versionadded:: 2.0.0rc1 + + """ + + inherit_cache = True + type = types.TSQUERY + + +class websearch_to_tsquery(_regconfig_fn): + """The PostgreSQL ``websearch_to_tsquery`` SQL function. + + This function applies automatic casting of the REGCONFIG argument + to use the :class:`_postgresql.REGCONFIG` datatype automatically, + and applies a return type of :class:`_postgresql.TSQUERY`. + + Assuming the PostgreSQL dialect has been imported, either by invoking + ``from sqlalchemy.dialects import postgresql``, or by creating a PostgreSQL + engine using ``create_engine("postgresql...")``, + :class:`_postgresql.websearch_to_tsquery` will be used automatically when + invoking ``sqlalchemy.func.websearch_to_tsquery()``, ensuring the correct + argument and return type handlers are used at compile and execution time. + + .. versionadded:: 2.0.0rc1 + + """ + + inherit_cache = True + type = types.TSQUERY + + +class ts_headline(_regconfig_fn): + """The PostgreSQL ``ts_headline`` SQL function. + + This function applies automatic casting of the REGCONFIG argument + to use the :class:`_postgresql.REGCONFIG` datatype automatically, + and applies a return type of :class:`_types.TEXT`. + + Assuming the PostgreSQL dialect has been imported, either by invoking + ``from sqlalchemy.dialects import postgresql``, or by creating a PostgreSQL + engine using ``create_engine("postgresql...")``, + :class:`_postgresql.ts_headline` will be used automatically when invoking + ``sqlalchemy.func.ts_headline()``, ensuring the correct argument and return + type handlers are used at compile and execution time. + + .. versionadded:: 2.0.0rc1 + + """ + + inherit_cache = True + type = TEXT + + def __init__(self, *args, **kwargs): + args = list(args) + + # parse types according to + # https://www.postgresql.org/docs/current/textsearch-controls.html#TEXTSEARCH-HEADLINE + if len(args) < 2: + # invalid args; don't do anything + has_regconfig = False + elif ( + isinstance(args[1], elements.ColumnElement) + and args[1].type._type_affinity is types.TSQUERY + ): + # tsquery is second argument, no regconfig argument + has_regconfig = False + else: + has_regconfig = True + + if has_regconfig: + initial_arg = coercions.expect( + roles.ExpressionElementRole, + args.pop(0), + apply_propagate_attrs=self, + name=getattr(self, "name", None), + type_=types.REGCONFIG, + ) + initial_arg = [initial_arg] + else: + initial_arg = [] + + addtl_args = [ + coercions.expect( + roles.ExpressionElementRole, + c, + name=getattr(self, "name", None), + apply_propagate_attrs=self, + ) + for c in args + ] + super().__init__(*(initial_arg + addtl_args), **kwargs) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/hstore.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/hstore.py new file mode 100644 index 00000000..04c8cf16 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/hstore.py @@ -0,0 +1,397 @@ +# dialects/postgresql/hstore.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +import re + +from .array import ARRAY +from .operators import CONTAINED_BY +from .operators import CONTAINS +from .operators import GETITEM +from .operators import HAS_ALL +from .operators import HAS_ANY +from .operators import HAS_KEY +from ... import types as sqltypes +from ...sql import functions as sqlfunc + + +__all__ = ("HSTORE", "hstore") + + +class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): + """Represent the PostgreSQL HSTORE type. + + The :class:`.HSTORE` type stores dictionaries containing strings, e.g.:: + + data_table = Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', HSTORE) + ) + + with engine.connect() as conn: + conn.execute( + data_table.insert(), + data = {"key1": "value1", "key2": "value2"} + ) + + :class:`.HSTORE` provides for a wide range of operations, including: + + * Index operations:: + + data_table.c.data['some key'] == 'some value' + + * Containment operations:: + + data_table.c.data.has_key('some key') + + data_table.c.data.has_all(['one', 'two', 'three']) + + * Concatenation:: + + data_table.c.data + {"k1": "v1"} + + For a full list of special methods see + :class:`.HSTORE.comparator_factory`. + + .. container:: topic + + **Detecting Changes in HSTORE columns when using the ORM** + + For usage with the SQLAlchemy ORM, it may be desirable to combine the + usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary now + part of the :mod:`sqlalchemy.ext.mutable` extension. This extension + will allow "in-place" changes to the dictionary, e.g. addition of new + keys or replacement/removal of existing keys to/from the current + dictionary, to produce events which will be detected by the unit of + work:: + + from sqlalchemy.ext.mutable import MutableDict + + class MyClass(Base): + __tablename__ = 'data_table' + + id = Column(Integer, primary_key=True) + data = Column(MutableDict.as_mutable(HSTORE)) + + my_object = session.query(MyClass).one() + + # in-place mutation, requires Mutable extension + # in order for the ORM to detect + my_object.data['some_key'] = 'some value' + + session.commit() + + When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM + will not be alerted to any changes to the contents of an existing + dictionary, unless that dictionary value is re-assigned to the + HSTORE-attribute itself, thus generating a change event. + + .. seealso:: + + :class:`.hstore` - render the PostgreSQL ``hstore()`` function. + + + """ + + __visit_name__ = "HSTORE" + hashable = False + text_type = sqltypes.Text() + + def __init__(self, text_type=None): + """Construct a new :class:`.HSTORE`. + + :param text_type: the type that should be used for indexed values. + Defaults to :class:`_types.Text`. + + """ + if text_type is not None: + self.text_type = text_type + + class Comparator( + sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator + ): + """Define comparison operations for :class:`.HSTORE`.""" + + def has_key(self, other): + """Boolean expression. Test for presence of a key. Note that the + key may be a SQLA expression. + """ + return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean) + + def has_all(self, other): + """Boolean expression. Test for presence of all keys in jsonb""" + return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean) + + def has_any(self, other): + """Boolean expression. Test for presence of any key in jsonb""" + return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean) + + def contains(self, other, **kwargs): + """Boolean expression. Test if keys (or array) are a superset + of/contained the keys of the argument jsonb expression. + + kwargs may be ignored by this operator but are required for API + conformance. + """ + return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) + + def contained_by(self, other): + """Boolean expression. Test if keys are a proper subset of the + keys of the argument jsonb expression. + """ + return self.operate( + CONTAINED_BY, other, result_type=sqltypes.Boolean + ) + + def _setup_getitem(self, index): + return GETITEM, index, self.type.text_type + + def defined(self, key): + """Boolean expression. Test for presence of a non-NULL value for + the key. Note that the key may be a SQLA expression. + """ + return _HStoreDefinedFunction(self.expr, key) + + def delete(self, key): + """HStore expression. Returns the contents of this hstore with the + given key deleted. Note that the key may be a SQLA expression. + """ + if isinstance(key, dict): + key = _serialize_hstore(key) + return _HStoreDeleteFunction(self.expr, key) + + def slice(self, array): + """HStore expression. Returns a subset of an hstore defined by + array of keys. + """ + return _HStoreSliceFunction(self.expr, array) + + def keys(self): + """Text array expression. Returns array of keys.""" + return _HStoreKeysFunction(self.expr) + + def vals(self): + """Text array expression. Returns array of values.""" + return _HStoreValsFunction(self.expr) + + def array(self): + """Text array expression. Returns array of alternating keys and + values. + """ + return _HStoreArrayFunction(self.expr) + + def matrix(self): + """Text array expression. Returns array of [key, value] pairs.""" + return _HStoreMatrixFunction(self.expr) + + comparator_factory = Comparator + + def bind_processor(self, dialect): + def process(value): + if isinstance(value, dict): + return _serialize_hstore(value) + else: + return value + + return process + + def result_processor(self, dialect, coltype): + def process(value): + if value is not None: + return _parse_hstore(value) + else: + return value + + return process + + +class hstore(sqlfunc.GenericFunction): + """Construct an hstore value within a SQL expression using the + PostgreSQL ``hstore()`` function. + + The :class:`.hstore` function accepts one or two arguments as described + in the PostgreSQL documentation. + + E.g.:: + + from sqlalchemy.dialects.postgresql import array, hstore + + select(hstore('key1', 'value1')) + + select( + hstore( + array(['key1', 'key2', 'key3']), + array(['value1', 'value2', 'value3']) + ) + ) + + .. seealso:: + + :class:`.HSTORE` - the PostgreSQL ``HSTORE`` datatype. + + """ + + type = HSTORE + name = "hstore" + inherit_cache = True + + +class _HStoreDefinedFunction(sqlfunc.GenericFunction): + type = sqltypes.Boolean + name = "defined" + inherit_cache = True + + +class _HStoreDeleteFunction(sqlfunc.GenericFunction): + type = HSTORE + name = "delete" + inherit_cache = True + + +class _HStoreSliceFunction(sqlfunc.GenericFunction): + type = HSTORE + name = "slice" + inherit_cache = True + + +class _HStoreKeysFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = "akeys" + inherit_cache = True + + +class _HStoreValsFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = "avals" + inherit_cache = True + + +class _HStoreArrayFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = "hstore_to_array" + inherit_cache = True + + +class _HStoreMatrixFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = "hstore_to_matrix" + inherit_cache = True + + +# +# parsing. note that none of this is used with the psycopg2 backend, +# which provides its own native extensions. +# + +# My best guess at the parsing rules of hstore literals, since no formal +# grammar is given. This is mostly reverse engineered from PG's input parser +# behavior. +HSTORE_PAIR_RE = re.compile( + r""" +( + "(?P (\\ . | [^"])* )" # Quoted key +) +[ ]* => [ ]* # Pair operator, optional adjoining whitespace +( + (?P NULL ) # NULL value + | "(?P (\\ . | [^"])* )" # Quoted value +) +""", + re.VERBOSE, +) + +HSTORE_DELIMITER_RE = re.compile( + r""" +[ ]* , [ ]* +""", + re.VERBOSE, +) + + +def _parse_error(hstore_str, pos): + """format an unmarshalling error.""" + + ctx = 20 + hslen = len(hstore_str) + + parsed_tail = hstore_str[max(pos - ctx - 1, 0) : min(pos, hslen)] + residual = hstore_str[min(pos, hslen) : min(pos + ctx + 1, hslen)] + + if len(parsed_tail) > ctx: + parsed_tail = "[...]" + parsed_tail[1:] + if len(residual) > ctx: + residual = residual[:-1] + "[...]" + + return "After %r, could not parse residual at position %d: %r" % ( + parsed_tail, + pos, + residual, + ) + + +def _parse_hstore(hstore_str): + """Parse an hstore from its literal string representation. + + Attempts to approximate PG's hstore input parsing rules as closely as + possible. Although currently this is not strictly necessary, since the + current implementation of hstore's output syntax is stricter than what it + accepts as input, the documentation makes no guarantees that will always + be the case. + + + + """ + result = {} + pos = 0 + pair_match = HSTORE_PAIR_RE.match(hstore_str) + + while pair_match is not None: + key = pair_match.group("key").replace(r"\"", '"').replace("\\\\", "\\") + if pair_match.group("value_null"): + value = None + else: + value = ( + pair_match.group("value") + .replace(r"\"", '"') + .replace("\\\\", "\\") + ) + result[key] = value + + pos += pair_match.end() + + delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:]) + if delim_match is not None: + pos += delim_match.end() + + pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:]) + + if pos != len(hstore_str): + raise ValueError(_parse_error(hstore_str, pos)) + + return result + + +def _serialize_hstore(val): + """Serialize a dictionary into an hstore literal. Keys and values must + both be strings (except None for values). + + """ + + def esc(s, position): + if position == "value" and s is None: + return "NULL" + elif isinstance(s, str): + return '"%s"' % s.replace("\\", "\\\\").replace('"', r"\"") + else: + raise ValueError( + "%r in %s position is not a string." % (s, position) + ) + + return ", ".join( + "%s=>%s" % (esc(k, "key"), esc(v, "value")) for k, v in val.items() + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/json.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/json.py new file mode 100644 index 00000000..1cdafbd0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/json.py @@ -0,0 +1,332 @@ +# dialects/postgresql/json.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from .array import ARRAY +from .array import array as _pg_array +from .operators import ASTEXT +from .operators import CONTAINED_BY +from .operators import CONTAINS +from .operators import DELETE_PATH +from .operators import HAS_ALL +from .operators import HAS_ANY +from .operators import HAS_KEY +from .operators import JSONPATH_ASTEXT +from .operators import PATH_EXISTS +from .operators import PATH_MATCH +from ... import types as sqltypes +from ...sql import cast + +__all__ = ("JSON", "JSONB") + + +class JSONPathType(sqltypes.JSON.JSONPathType): + def _processor(self, dialect, super_proc): + def process(value): + if isinstance(value, str): + # If it's already a string assume that it's in json path + # format. This allows using cast with json paths literals + return value + elif value: + # If it's already a string assume that it's in json path + # format. This allows using cast with json paths literals + value = "{%s}" % (", ".join(map(str, value))) + else: + value = "{}" + if super_proc: + value = super_proc(value) + return value + + return process + + def bind_processor(self, dialect): + return self._processor(dialect, self.string_bind_processor(dialect)) + + def literal_processor(self, dialect): + return self._processor(dialect, self.string_literal_processor(dialect)) + + +class JSONPATH(JSONPathType): + """JSON Path Type. + + This is usually required to cast literal values to json path when using + json search like function, such as ``jsonb_path_query_array`` or + ``jsonb_path_exists``:: + + stmt = sa.select( + sa.func.jsonb_path_query_array( + table.c.jsonb_col, cast("$.address.id", JSONPATH) + ) + ) + + """ + + __visit_name__ = "JSONPATH" + + +class JSON(sqltypes.JSON): + """Represent the PostgreSQL JSON type. + + :class:`_postgresql.JSON` is used automatically whenever the base + :class:`_types.JSON` datatype is used against a PostgreSQL backend, + however base :class:`_types.JSON` datatype does not provide Python + accessors for PostgreSQL-specific comparison methods such as + :meth:`_postgresql.JSON.Comparator.astext`; additionally, to use + PostgreSQL ``JSONB``, the :class:`_postgresql.JSONB` datatype should + be used explicitly. + + .. seealso:: + + :class:`_types.JSON` - main documentation for the generic + cross-platform JSON datatype. + + The operators provided by the PostgreSQL version of :class:`_types.JSON` + include: + + * Index operations (the ``->`` operator):: + + data_table.c.data['some key'] + + data_table.c.data[5] + + + * Index operations returning text (the ``->>`` operator):: + + data_table.c.data['some key'].astext == 'some value' + + Note that equivalent functionality is available via the + :attr:`.JSON.Comparator.as_string` accessor. + + * Index operations with CAST + (equivalent to ``CAST(col ->> ['some key'] AS )``):: + + data_table.c.data['some key'].astext.cast(Integer) == 5 + + Note that equivalent functionality is available via the + :attr:`.JSON.Comparator.as_integer` and similar accessors. + + * Path index operations (the ``#>`` operator):: + + data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')] + + * Path index operations returning text (the ``#>>`` operator):: + + data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == 'some value' + + Index operations return an expression object whose type defaults to + :class:`_types.JSON` by default, + so that further JSON-oriented instructions + may be called upon the result type. + + Custom serializers and deserializers are specified at the dialect level, + that is using :func:`_sa.create_engine`. The reason for this is that when + using psycopg2, the DBAPI only allows serializers at the per-cursor + or per-connection level. E.g.:: + + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test", + json_serializer=my_serialize_fn, + json_deserializer=my_deserialize_fn + ) + + When using the psycopg2 dialect, the json_deserializer is registered + against the database using ``psycopg2.extras.register_default_json``. + + .. seealso:: + + :class:`_types.JSON` - Core level JSON type + + :class:`_postgresql.JSONB` + + """ # noqa + + astext_type = sqltypes.Text() + + def __init__(self, none_as_null=False, astext_type=None): + """Construct a :class:`_types.JSON` type. + + :param none_as_null: if True, persist the value ``None`` as a + SQL NULL value, not the JSON encoding of ``null``. Note that + when this flag is False, the :func:`.null` construct can still + be used to persist a NULL value:: + + from sqlalchemy import null + conn.execute(table.insert(), {"data": null()}) + + .. seealso:: + + :attr:`_types.JSON.NULL` + + :param astext_type: the type to use for the + :attr:`.JSON.Comparator.astext` + accessor on indexed attributes. Defaults to :class:`_types.Text`. + + """ + super().__init__(none_as_null=none_as_null) + if astext_type is not None: + self.astext_type = astext_type + + class Comparator(sqltypes.JSON.Comparator): + """Define comparison operations for :class:`_types.JSON`.""" + + @property + def astext(self): + """On an indexed expression, use the "astext" (e.g. "->>") + conversion when rendered in SQL. + + E.g.:: + + select(data_table.c.data['some key'].astext) + + .. seealso:: + + :meth:`_expression.ColumnElement.cast` + + """ + if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType): + return self.expr.left.operate( + JSONPATH_ASTEXT, + self.expr.right, + result_type=self.type.astext_type, + ) + else: + return self.expr.left.operate( + ASTEXT, self.expr.right, result_type=self.type.astext_type + ) + + comparator_factory = Comparator + + +class JSONB(JSON): + """Represent the PostgreSQL JSONB type. + + The :class:`_postgresql.JSONB` type stores arbitrary JSONB format data, + e.g.:: + + data_table = Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', JSONB) + ) + + with engine.connect() as conn: + conn.execute( + data_table.insert(), + data = {"key1": "value1", "key2": "value2"} + ) + + The :class:`_postgresql.JSONB` type includes all operations provided by + :class:`_types.JSON`, including the same behaviors for indexing + operations. + It also adds additional operators specific to JSONB, including + :meth:`.JSONB.Comparator.has_key`, :meth:`.JSONB.Comparator.has_all`, + :meth:`.JSONB.Comparator.has_any`, :meth:`.JSONB.Comparator.contains`, + :meth:`.JSONB.Comparator.contained_by`, + :meth:`.JSONB.Comparator.delete_path`, + :meth:`.JSONB.Comparator.path_exists` and + :meth:`.JSONB.Comparator.path_match`. + + Like the :class:`_types.JSON` type, the :class:`_postgresql.JSONB` + type does not detect + in-place changes when used with the ORM, unless the + :mod:`sqlalchemy.ext.mutable` extension is used. + + Custom serializers and deserializers + are shared with the :class:`_types.JSON` class, + using the ``json_serializer`` + and ``json_deserializer`` keyword arguments. These must be specified + at the dialect level using :func:`_sa.create_engine`. When using + psycopg2, the serializers are associated with the jsonb type using + ``psycopg2.extras.register_default_jsonb`` on a per-connection basis, + in the same way that ``psycopg2.extras.register_default_json`` is used + to register these handlers with the json type. + + .. seealso:: + + :class:`_types.JSON` + + """ + + __visit_name__ = "JSONB" + + class Comparator(JSON.Comparator): + """Define comparison operations for :class:`_types.JSON`.""" + + def has_key(self, other): + """Boolean expression. Test for presence of a key (equivalent of + the ``?`` operator). Note that the key may be a SQLA expression. + """ + return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean) + + def has_all(self, other): + """Boolean expression. Test for presence of all keys in jsonb + (equivalent of the ``?&`` operator) + """ + return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean) + + def has_any(self, other): + """Boolean expression. Test for presence of any key in jsonb + (equivalent of the ``?|`` operator) + """ + return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean) + + def contains(self, other, **kwargs): + """Boolean expression. Test if keys (or array) are a superset + of/contained the keys of the argument jsonb expression + (equivalent of the ``@>`` operator). + + kwargs may be ignored by this operator but are required for API + conformance. + """ + return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) + + def contained_by(self, other): + """Boolean expression. Test if keys are a proper subset of the + keys of the argument jsonb expression + (equivalent of the ``<@`` operator). + """ + return self.operate( + CONTAINED_BY, other, result_type=sqltypes.Boolean + ) + + def delete_path(self, array): + """JSONB expression. Deletes field or array element specified in + the argument array (equivalent of the ``#-`` operator). + + The input may be a list of strings that will be coerced to an + ``ARRAY`` or an instance of :meth:`_postgres.array`. + + .. versionadded:: 2.0 + """ + if not isinstance(array, _pg_array): + array = _pg_array(array) + right_side = cast(array, ARRAY(sqltypes.TEXT)) + return self.operate(DELETE_PATH, right_side, result_type=JSONB) + + def path_exists(self, other): + """Boolean expression. Test for presence of item given by the + argument JSONPath expression (equivalent of the ``@?`` operator). + + .. versionadded:: 2.0 + """ + return self.operate( + PATH_EXISTS, other, result_type=sqltypes.Boolean + ) + + def path_match(self, other): + """Boolean expression. Test if JSONPath predicate given by the + argument JSONPath expression matches + (equivalent of the ``@@`` operator). + + Only the first item of the result is taken into account. + + .. versionadded:: 2.0 + """ + return self.operate( + PATH_MATCH, other, result_type=sqltypes.Boolean + ) + + comparator_factory = Comparator diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/named_types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/named_types.py new file mode 100644 index 00000000..16e5c867 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/named_types.py @@ -0,0 +1,509 @@ +# dialects/postgresql/named_types.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from __future__ import annotations + +from typing import Any +from typing import Optional +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from ... import schema +from ... import util +from ...sql import coercions +from ...sql import elements +from ...sql import roles +from ...sql import sqltypes +from ...sql import type_api +from ...sql.base import _NoArg +from ...sql.ddl import InvokeCreateDDLBase +from ...sql.ddl import InvokeDropDDLBase + +if TYPE_CHECKING: + from ...sql._typing import _TypeEngineArgument + + +class NamedType(sqltypes.TypeEngine): + """Base for named types.""" + + __abstract__ = True + DDLGenerator: Type[NamedTypeGenerator] + DDLDropper: Type[NamedTypeDropper] + create_type: bool + + def create(self, bind, checkfirst=True, **kw): + """Emit ``CREATE`` DDL for this type. + + :param bind: a connectable :class:`_engine.Engine`, + :class:`_engine.Connection`, or similar object to emit + SQL. + :param checkfirst: if ``True``, a query against + the PG catalog will be first performed to see + if the type does not exist already before + creating. + + """ + bind._run_ddl_visitor(self.DDLGenerator, self, checkfirst=checkfirst) + + def drop(self, bind, checkfirst=True, **kw): + """Emit ``DROP`` DDL for this type. + + :param bind: a connectable :class:`_engine.Engine`, + :class:`_engine.Connection`, or similar object to emit + SQL. + :param checkfirst: if ``True``, a query against + the PG catalog will be first performed to see + if the type actually exists before dropping. + + """ + bind._run_ddl_visitor(self.DDLDropper, self, checkfirst=checkfirst) + + def _check_for_name_in_memos(self, checkfirst, kw): + """Look in the 'ddl runner' for 'memos', then + note our name in that collection. + + This to ensure a particular named type is operated + upon only once within any kind of create/drop + sequence without relying upon "checkfirst". + + """ + if not self.create_type: + return True + if "_ddl_runner" in kw: + ddl_runner = kw["_ddl_runner"] + type_name = f"pg_{self.__visit_name__}" + if type_name in ddl_runner.memo: + existing = ddl_runner.memo[type_name] + else: + existing = ddl_runner.memo[type_name] = set() + present = (self.schema, self.name) in existing + existing.add((self.schema, self.name)) + return present + else: + return False + + def _on_table_create(self, target, bind, checkfirst=False, **kw): + if ( + checkfirst + or ( + not self.metadata + and not kw.get("_is_metadata_operation", False) + ) + ) and not self._check_for_name_in_memos(checkfirst, kw): + self.create(bind=bind, checkfirst=checkfirst) + + def _on_table_drop(self, target, bind, checkfirst=False, **kw): + if ( + not self.metadata + and not kw.get("_is_metadata_operation", False) + and not self._check_for_name_in_memos(checkfirst, kw) + ): + self.drop(bind=bind, checkfirst=checkfirst) + + def _on_metadata_create(self, target, bind, checkfirst=False, **kw): + if not self._check_for_name_in_memos(checkfirst, kw): + self.create(bind=bind, checkfirst=checkfirst) + + def _on_metadata_drop(self, target, bind, checkfirst=False, **kw): + if not self._check_for_name_in_memos(checkfirst, kw): + self.drop(bind=bind, checkfirst=checkfirst) + + +class NamedTypeGenerator(InvokeCreateDDLBase): + def __init__(self, dialect, connection, checkfirst=False, **kwargs): + super().__init__(connection, **kwargs) + self.checkfirst = checkfirst + + def _can_create_type(self, type_): + if not self.checkfirst: + return True + + effective_schema = self.connection.schema_for_object(type_) + return not self.connection.dialect.has_type( + self.connection, type_.name, schema=effective_schema + ) + + +class NamedTypeDropper(InvokeDropDDLBase): + def __init__(self, dialect, connection, checkfirst=False, **kwargs): + super().__init__(connection, **kwargs) + self.checkfirst = checkfirst + + def _can_drop_type(self, type_): + if not self.checkfirst: + return True + + effective_schema = self.connection.schema_for_object(type_) + return self.connection.dialect.has_type( + self.connection, type_.name, schema=effective_schema + ) + + +class EnumGenerator(NamedTypeGenerator): + def visit_enum(self, enum): + if not self._can_create_type(enum): + return + + with self.with_ddl_events(enum): + self.connection.execute(CreateEnumType(enum)) + + +class EnumDropper(NamedTypeDropper): + def visit_enum(self, enum): + if not self._can_drop_type(enum): + return + + with self.with_ddl_events(enum): + self.connection.execute(DropEnumType(enum)) + + +class ENUM(NamedType, type_api.NativeForEmulated, sqltypes.Enum): + """PostgreSQL ENUM type. + + This is a subclass of :class:`_types.Enum` which includes + support for PG's ``CREATE TYPE`` and ``DROP TYPE``. + + When the builtin type :class:`_types.Enum` is used and the + :paramref:`.Enum.native_enum` flag is left at its default of + True, the PostgreSQL backend will use a :class:`_postgresql.ENUM` + type as the implementation, so the special create/drop rules + will be used. + + The create/drop behavior of ENUM is necessarily intricate, due to the + awkward relationship the ENUM type has in relationship to the + parent table, in that it may be "owned" by just a single table, or + may be shared among many tables. + + When using :class:`_types.Enum` or :class:`_postgresql.ENUM` + in an "inline" fashion, the ``CREATE TYPE`` and ``DROP TYPE`` is emitted + corresponding to when the :meth:`_schema.Table.create` and + :meth:`_schema.Table.drop` + methods are called:: + + table = Table('sometable', metadata, + Column('some_enum', ENUM('a', 'b', 'c', name='myenum')) + ) + + table.create(engine) # will emit CREATE ENUM and CREATE TABLE + table.drop(engine) # will emit DROP TABLE and DROP ENUM + + To use a common enumerated type between multiple tables, the best + practice is to declare the :class:`_types.Enum` or + :class:`_postgresql.ENUM` independently, and associate it with the + :class:`_schema.MetaData` object itself:: + + my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata) + + t1 = Table('sometable_one', metadata, + Column('some_enum', myenum) + ) + + t2 = Table('sometable_two', metadata, + Column('some_enum', myenum) + ) + + When this pattern is used, care must still be taken at the level + of individual table creates. Emitting CREATE TABLE without also + specifying ``checkfirst=True`` will still cause issues:: + + t1.create(engine) # will fail: no such type 'myenum' + + If we specify ``checkfirst=True``, the individual table-level create + operation will check for the ``ENUM`` and create if not exists:: + + # will check if enum exists, and emit CREATE TYPE if not + t1.create(engine, checkfirst=True) + + When using a metadata-level ENUM type, the type will always be created + and dropped if either the metadata-wide create/drop is called:: + + metadata.create_all(engine) # will emit CREATE TYPE + metadata.drop_all(engine) # will emit DROP TYPE + + The type can also be created and dropped directly:: + + my_enum.create(engine) + my_enum.drop(engine) + + """ + + native_enum = True + DDLGenerator = EnumGenerator + DDLDropper = EnumDropper + + def __init__( + self, + *enums, + name: Union[str, _NoArg, None] = _NoArg.NO_ARG, + create_type: bool = True, + **kw, + ): + """Construct an :class:`_postgresql.ENUM`. + + Arguments are the same as that of + :class:`_types.Enum`, but also including + the following parameters. + + :param create_type: Defaults to True. + Indicates that ``CREATE TYPE`` should be + emitted, after optionally checking for the + presence of the type, when the parent + table is being created; and additionally + that ``DROP TYPE`` is called when the table + is dropped. When ``False``, no check + will be performed and no ``CREATE TYPE`` + or ``DROP TYPE`` is emitted, unless + :meth:`~.postgresql.ENUM.create` + or :meth:`~.postgresql.ENUM.drop` + are called directly. + Setting to ``False`` is helpful + when invoking a creation scheme to a SQL file + without access to the actual database - + the :meth:`~.postgresql.ENUM.create` and + :meth:`~.postgresql.ENUM.drop` methods can + be used to emit SQL to a target bind. + + """ + native_enum = kw.pop("native_enum", None) + if native_enum is False: + util.warn( + "the native_enum flag does not apply to the " + "sqlalchemy.dialects.postgresql.ENUM datatype; this type " + "always refers to ENUM. Use sqlalchemy.types.Enum for " + "non-native enum." + ) + self.create_type = create_type + if name is not _NoArg.NO_ARG: + kw["name"] = name + super().__init__(*enums, **kw) + + def coerce_compared_value(self, op, value): + super_coerced_type = super().coerce_compared_value(op, value) + if ( + super_coerced_type._type_affinity + is type_api.STRINGTYPE._type_affinity + ): + return self + else: + return super_coerced_type + + @classmethod + def __test_init__(cls): + return cls(name="name") + + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): + """Produce a PostgreSQL native :class:`_postgresql.ENUM` from plain + :class:`.Enum`. + + """ + kw.setdefault("validate_strings", impl.validate_strings) + kw.setdefault("name", impl.name) + kw.setdefault("schema", impl.schema) + kw.setdefault("inherit_schema", impl.inherit_schema) + kw.setdefault("metadata", impl.metadata) + kw.setdefault("_create_events", False) + kw.setdefault("values_callable", impl.values_callable) + kw.setdefault("omit_aliases", impl._omit_aliases) + kw.setdefault("_adapted_from", impl) + if type_api._is_native_for_emulated(impl.__class__): + kw.setdefault("create_type", impl.create_type) + + return cls(**kw) + + def create(self, bind=None, checkfirst=True): + """Emit ``CREATE TYPE`` for this + :class:`_postgresql.ENUM`. + + If the underlying dialect does not support + PostgreSQL CREATE TYPE, no action is taken. + + :param bind: a connectable :class:`_engine.Engine`, + :class:`_engine.Connection`, or similar object to emit + SQL. + :param checkfirst: if ``True``, a query against + the PG catalog will be first performed to see + if the type does not exist already before + creating. + + """ + if not bind.dialect.supports_native_enum: + return + + super().create(bind, checkfirst=checkfirst) + + def drop(self, bind=None, checkfirst=True): + """Emit ``DROP TYPE`` for this + :class:`_postgresql.ENUM`. + + If the underlying dialect does not support + PostgreSQL DROP TYPE, no action is taken. + + :param bind: a connectable :class:`_engine.Engine`, + :class:`_engine.Connection`, or similar object to emit + SQL. + :param checkfirst: if ``True``, a query against + the PG catalog will be first performed to see + if the type actually exists before dropping. + + """ + if not bind.dialect.supports_native_enum: + return + + super().drop(bind, checkfirst=checkfirst) + + def get_dbapi_type(self, dbapi): + """dont return dbapi.STRING for ENUM in PostgreSQL, since that's + a different type""" + + return None + + +class DomainGenerator(NamedTypeGenerator): + def visit_DOMAIN(self, domain): + if not self._can_create_type(domain): + return + with self.with_ddl_events(domain): + self.connection.execute(CreateDomainType(domain)) + + +class DomainDropper(NamedTypeDropper): + def visit_DOMAIN(self, domain): + if not self._can_drop_type(domain): + return + + with self.with_ddl_events(domain): + self.connection.execute(DropDomainType(domain)) + + +class DOMAIN(NamedType, sqltypes.SchemaType): + r"""Represent the DOMAIN PostgreSQL type. + + A domain is essentially a data type with optional constraints + that restrict the allowed set of values. E.g.:: + + PositiveInt = DOMAIN( + "pos_int", Integer, check="VALUE > 0", not_null=True + ) + + UsPostalCode = DOMAIN( + "us_postal_code", + Text, + check="VALUE ~ '^\d{5}$' OR VALUE ~ '^\d{5}-\d{4}$'" + ) + + See the `PostgreSQL documentation`__ for additional details + + __ https://www.postgresql.org/docs/current/sql-createdomain.html + + .. versionadded:: 2.0 + + """ + + DDLGenerator = DomainGenerator + DDLDropper = DomainDropper + + __visit_name__ = "DOMAIN" + + def __init__( + self, + name: str, + data_type: _TypeEngineArgument[Any], + *, + collation: Optional[str] = None, + default: Union[elements.TextClause, str, None] = None, + constraint_name: Optional[str] = None, + not_null: Optional[bool] = None, + check: Union[elements.TextClause, str, None] = None, + create_type: bool = True, + **kw: Any, + ): + """ + Construct a DOMAIN. + + :param name: the name of the domain + :param data_type: The underlying data type of the domain. + This can include array specifiers. + :param collation: An optional collation for the domain. + If no collation is specified, the underlying data type's default + collation is used. The underlying type must be collatable if + ``collation`` is specified. + :param default: The DEFAULT clause specifies a default value for + columns of the domain data type. The default should be a string + or a :func:`_expression.text` value. + If no default value is specified, then the default value is + the null value. + :param constraint_name: An optional name for a constraint. + If not specified, the backend generates a name. + :param not_null: Values of this domain are prevented from being null. + By default domain are allowed to be null. If not specified + no nullability clause will be emitted. + :param check: CHECK clause specify integrity constraint or test + which values of the domain must satisfy. A constraint must be + an expression producing a Boolean result that can use the key + word VALUE to refer to the value being tested. + Differently from PostgreSQL, only a single check clause is + currently allowed in SQLAlchemy. + :param schema: optional schema name + :param metadata: optional :class:`_schema.MetaData` object which + this :class:`_postgresql.DOMAIN` will be directly associated + :param create_type: Defaults to True. + Indicates that ``CREATE TYPE`` should be emitted, after optionally + checking for the presence of the type, when the parent table is + being created; and additionally that ``DROP TYPE`` is called + when the table is dropped. + + """ + self.data_type = type_api.to_instance(data_type) + self.default = default + self.collation = collation + self.constraint_name = constraint_name + self.not_null = bool(not_null) + if check is not None: + check = coercions.expect(roles.DDLExpressionRole, check) + self.check = check + self.create_type = create_type + super().__init__(name=name, **kw) + + @classmethod + def __test_init__(cls): + return cls("name", sqltypes.Integer) + + def adapt(self, impl, **kw): + if self.default: + kw["default"] = self.default + if self.constraint_name is not None: + kw["constraint_name"] = self.constraint_name + if self.not_null: + kw["not_null"] = self.not_null + if self.check is not None: + kw["check"] = str(self.check) + if self.create_type: + kw["create_type"] = self.create_type + + return super().adapt(impl, **kw) + + +class CreateEnumType(schema._CreateDropBase): + __visit_name__ = "create_enum_type" + + +class DropEnumType(schema._CreateDropBase): + __visit_name__ = "drop_enum_type" + + +class CreateDomainType(schema._CreateDropBase): + """Represent a CREATE DOMAIN statement.""" + + __visit_name__ = "create_domain_type" + + +class DropDomainType(schema._CreateDropBase): + """Represent a DROP DOMAIN statement.""" + + __visit_name__ = "drop_domain_type" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/operators.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/operators.py new file mode 100644 index 00000000..53e175f9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/operators.py @@ -0,0 +1,129 @@ +# dialects/postgresql/operators.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors +from ...sql import operators + + +_getitem_precedence = operators._PRECEDENCE[operators.json_getitem_op] +_eq_precedence = operators._PRECEDENCE[operators.eq] + +# JSON + JSONB +ASTEXT = operators.custom_op( + "->>", + precedence=_getitem_precedence, + natural_self_precedent=True, + eager_grouping=True, +) + +JSONPATH_ASTEXT = operators.custom_op( + "#>>", + precedence=_getitem_precedence, + natural_self_precedent=True, + eager_grouping=True, +) + +# JSONB + HSTORE +HAS_KEY = operators.custom_op( + "?", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +HAS_ALL = operators.custom_op( + "?&", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +HAS_ANY = operators.custom_op( + "?|", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +# JSONB +DELETE_PATH = operators.custom_op( + "#-", + precedence=_getitem_precedence, + natural_self_precedent=True, + eager_grouping=True, +) + +PATH_EXISTS = operators.custom_op( + "@?", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +PATH_MATCH = operators.custom_op( + "@@", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +# JSONB + ARRAY + HSTORE + RANGE +CONTAINS = operators.custom_op( + "@>", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +CONTAINED_BY = operators.custom_op( + "<@", + precedence=_eq_precedence, + natural_self_precedent=True, + eager_grouping=True, + is_comparison=True, +) + +# ARRAY + RANGE +OVERLAP = operators.custom_op( + "&&", + precedence=_eq_precedence, + is_comparison=True, +) + +# RANGE +STRICTLY_LEFT_OF = operators.custom_op( + "<<", precedence=_eq_precedence, is_comparison=True +) + +STRICTLY_RIGHT_OF = operators.custom_op( + ">>", precedence=_eq_precedence, is_comparison=True +) + +NOT_EXTEND_RIGHT_OF = operators.custom_op( + "&<", precedence=_eq_precedence, is_comparison=True +) + +NOT_EXTEND_LEFT_OF = operators.custom_op( + "&>", precedence=_eq_precedence, is_comparison=True +) + +ADJACENT_TO = operators.custom_op( + "-|-", precedence=_eq_precedence, is_comparison=True +) + +# HSTORE +GETITEM = operators.custom_op( + "->", + precedence=_getitem_precedence, + natural_self_precedent=True, + eager_grouping=True, +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/pg8000.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/pg8000.py new file mode 100644 index 00000000..0151be02 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/pg8000.py @@ -0,0 +1,662 @@ +# dialects/postgresql/pg8000.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: postgresql+pg8000 + :name: pg8000 + :dbapi: pg8000 + :connectstring: postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...] + :url: https://pypi.org/project/pg8000/ + +.. versionchanged:: 1.4 The pg8000 dialect has been updated for version + 1.16.6 and higher, and is again part of SQLAlchemy's continuous integration + with full feature support. + +.. _pg8000_unicode: + +Unicode +------- + +pg8000 will encode / decode string values between it and the server using the +PostgreSQL ``client_encoding`` parameter; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +Typically, this can be changed to ``utf-8``, as a more useful default:: + + #client_encoding = sql_ascii # actually, defaults to database + # encoding + client_encoding = utf8 + +The ``client_encoding`` can be overridden for a session by executing the SQL: + +SET CLIENT_ENCODING TO 'utf8'; + +SQLAlchemy will execute this SQL on all new connections based on the value +passed to :func:`_sa.create_engine` using the ``client_encoding`` parameter:: + + engine = create_engine( + "postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8') + +.. _pg8000_ssl: + +SSL Connections +--------------- + +pg8000 accepts a Python ``SSLContext`` object which may be specified using the +:paramref:`_sa.create_engine.connect_args` dictionary:: + + import ssl + ssl_context = ssl.create_default_context() + engine = sa.create_engine( + "postgresql+pg8000://scott:tiger@192.168.0.199/test", + connect_args={"ssl_context": ssl_context}, + ) + +If the server uses an automatically-generated certificate that is self-signed +or does not match the host name (as seen from the client), it may also be +necessary to disable hostname checking:: + + import ssl + ssl_context = ssl.create_default_context() + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + engine = sa.create_engine( + "postgresql+pg8000://scott:tiger@192.168.0.199/test", + connect_args={"ssl_context": ssl_context}, + ) + +.. _pg8000_isolation_level: + +pg8000 Transaction Isolation Level +------------------------------------- + +The pg8000 dialect offers the same isolation level settings as that +of the :ref:`psycopg2 ` dialect: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +.. seealso:: + + :ref:`postgresql_isolation_level` + + :ref:`psycopg2_isolation_level` + + +""" # noqa +import decimal +import re + +from . import ranges +from .array import ARRAY as PGARRAY +from .base import _DECIMAL_TYPES +from .base import _FLOAT_TYPES +from .base import _INT_TYPES +from .base import ENUM +from .base import INTERVAL +from .base import PGCompiler +from .base import PGDialect +from .base import PGExecutionContext +from .base import PGIdentifierPreparer +from .json import JSON +from .json import JSONB +from .json import JSONPathType +from .pg_catalog import _SpaceVector +from .pg_catalog import OIDVECTOR +from .types import CITEXT +from ... import exc +from ... import util +from ...engine import processors +from ...sql import sqltypes +from ...sql.elements import quoted_name + + +class _PGString(sqltypes.String): + render_bind_cast = True + + +class _PGNumeric(sqltypes.Numeric): + render_bind_cast = True + + def result_processor(self, dialect, coltype): + if self.asdecimal: + if coltype in _FLOAT_TYPES: + return processors.to_decimal_processor_factory( + decimal.Decimal, self._effective_decimal_return_scale + ) + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + # pg8000 returns Decimal natively for 1700 + return None + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype + ) + else: + if coltype in _FLOAT_TYPES: + # pg8000 returns float natively for 701 + return None + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + return processors.to_float + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype + ) + + +class _PGFloat(_PGNumeric, sqltypes.Float): + __visit_name__ = "float" + render_bind_cast = True + + +class _PGNumericNoBind(_PGNumeric): + def bind_processor(self, dialect): + return None + + +class _PGJSON(JSON): + render_bind_cast = True + + def result_processor(self, dialect, coltype): + return None + + +class _PGJSONB(JSONB): + render_bind_cast = True + + def result_processor(self, dialect, coltype): + return None + + +class _PGJSONIndexType(sqltypes.JSON.JSONIndexType): + def get_dbapi_type(self, dbapi): + raise NotImplementedError("should not be here") + + +class _PGJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + __visit_name__ = "json_int_index" + + render_bind_cast = True + + +class _PGJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + __visit_name__ = "json_str_index" + + render_bind_cast = True + + +class _PGJSONPathType(JSONPathType): + pass + + # DBAPI type 1009 + + +class _PGEnum(ENUM): + def get_dbapi_type(self, dbapi): + return dbapi.UNKNOWN + + +class _PGInterval(INTERVAL): + render_bind_cast = True + + def get_dbapi_type(self, dbapi): + return dbapi.INTERVAL + + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): + return _PGInterval(precision=interval.second_precision) + + +class _PGTimeStamp(sqltypes.DateTime): + render_bind_cast = True + + +class _PGDate(sqltypes.Date): + render_bind_cast = True + + +class _PGTime(sqltypes.Time): + render_bind_cast = True + + +class _PGInteger(sqltypes.Integer): + render_bind_cast = True + + +class _PGSmallInteger(sqltypes.SmallInteger): + render_bind_cast = True + + +class _PGNullType(sqltypes.NullType): + pass + + +class _PGBigInteger(sqltypes.BigInteger): + render_bind_cast = True + + +class _PGBoolean(sqltypes.Boolean): + render_bind_cast = True + + +class _PGARRAY(PGARRAY): + render_bind_cast = True + + +class _PGOIDVECTOR(_SpaceVector, OIDVECTOR): + pass + + +class _Pg8000Range(ranges.AbstractSingleRangeImpl): + def bind_processor(self, dialect): + pg8000_Range = dialect.dbapi.Range + + def to_range(value): + if isinstance(value, ranges.Range): + value = pg8000_Range( + value.lower, value.upper, value.bounds, value.empty + ) + return value + + return to_range + + def result_processor(self, dialect, coltype): + def to_range(value): + if value is not None: + value = ranges.Range( + value.lower, + value.upper, + bounds=value.bounds, + empty=value.is_empty, + ) + return value + + return to_range + + +class _Pg8000MultiRange(ranges.AbstractMultiRangeImpl): + def bind_processor(self, dialect): + pg8000_Range = dialect.dbapi.Range + + def to_multirange(value): + if isinstance(value, list): + mr = [] + for v in value: + if isinstance(v, ranges.Range): + mr.append( + pg8000_Range(v.lower, v.upper, v.bounds, v.empty) + ) + else: + mr.append(v) + return mr + else: + return value + + return to_multirange + + def result_processor(self, dialect, coltype): + def to_multirange(value): + if value is None: + return None + else: + return ranges.MultiRange( + ranges.Range( + v.lower, v.upper, bounds=v.bounds, empty=v.is_empty + ) + for v in value + ) + + return to_multirange + + +_server_side_id = util.counter() + + +class PGExecutionContext_pg8000(PGExecutionContext): + def create_server_side_cursor(self): + ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:]) + return ServerSideCursor(self._dbapi_connection.cursor(), ident) + + def pre_exec(self): + if not self.compiled: + return + + +class ServerSideCursor: + server_side = True + + def __init__(self, cursor, ident): + self.ident = ident + self.cursor = cursor + + @property + def connection(self): + return self.cursor.connection + + @property + def rowcount(self): + return self.cursor.rowcount + + @property + def description(self): + return self.cursor.description + + def execute(self, operation, args=(), stream=None): + op = "DECLARE " + self.ident + " NO SCROLL CURSOR FOR " + operation + self.cursor.execute(op, args, stream=stream) + return self + + def executemany(self, operation, param_sets): + self.cursor.executemany(operation, param_sets) + return self + + def fetchone(self): + self.cursor.execute("FETCH FORWARD 1 FROM " + self.ident) + return self.cursor.fetchone() + + def fetchmany(self, num=None): + if num is None: + return self.fetchall() + else: + self.cursor.execute( + "FETCH FORWARD " + str(int(num)) + " FROM " + self.ident + ) + return self.cursor.fetchall() + + def fetchall(self): + self.cursor.execute("FETCH FORWARD ALL FROM " + self.ident) + return self.cursor.fetchall() + + def close(self): + self.cursor.execute("CLOSE " + self.ident) + self.cursor.close() + + def setinputsizes(self, *sizes): + self.cursor.setinputsizes(*sizes) + + def setoutputsize(self, size, column=None): + pass + + +class PGCompiler_pg8000(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return ( + self.process(binary.left, **kw) + + " %% " + + self.process(binary.right, **kw) + ) + + +class PGIdentifierPreparer_pg8000(PGIdentifierPreparer): + def __init__(self, *args, **kwargs): + PGIdentifierPreparer.__init__(self, *args, **kwargs) + self._double_percents = False + + +class PGDialect_pg8000(PGDialect): + driver = "pg8000" + supports_statement_cache = True + + supports_unicode_statements = True + + supports_unicode_binds = True + + default_paramstyle = "format" + supports_sane_multi_rowcount = True + execution_ctx_cls = PGExecutionContext_pg8000 + statement_compiler = PGCompiler_pg8000 + preparer = PGIdentifierPreparer_pg8000 + supports_server_side_cursors = True + + render_bind_cast = True + + # reversed as of pg8000 1.16.6. 1.16.5 and lower + # are no longer compatible + description_encoding = None + # description_encoding = "use_encoding" + + colspecs = util.update_copy( + PGDialect.colspecs, + { + sqltypes.String: _PGString, + sqltypes.Numeric: _PGNumericNoBind, + sqltypes.Float: _PGFloat, + sqltypes.JSON: _PGJSON, + sqltypes.Boolean: _PGBoolean, + sqltypes.NullType: _PGNullType, + JSONB: _PGJSONB, + CITEXT: CITEXT, + sqltypes.JSON.JSONPathType: _PGJSONPathType, + sqltypes.JSON.JSONIndexType: _PGJSONIndexType, + sqltypes.JSON.JSONIntIndexType: _PGJSONIntIndexType, + sqltypes.JSON.JSONStrIndexType: _PGJSONStrIndexType, + sqltypes.Interval: _PGInterval, + INTERVAL: _PGInterval, + sqltypes.DateTime: _PGTimeStamp, + sqltypes.DateTime: _PGTimeStamp, + sqltypes.Date: _PGDate, + sqltypes.Time: _PGTime, + sqltypes.Integer: _PGInteger, + sqltypes.SmallInteger: _PGSmallInteger, + sqltypes.BigInteger: _PGBigInteger, + sqltypes.Enum: _PGEnum, + sqltypes.ARRAY: _PGARRAY, + OIDVECTOR: _PGOIDVECTOR, + ranges.INT4RANGE: _Pg8000Range, + ranges.INT8RANGE: _Pg8000Range, + ranges.NUMRANGE: _Pg8000Range, + ranges.DATERANGE: _Pg8000Range, + ranges.TSRANGE: _Pg8000Range, + ranges.TSTZRANGE: _Pg8000Range, + ranges.INT4MULTIRANGE: _Pg8000MultiRange, + ranges.INT8MULTIRANGE: _Pg8000MultiRange, + ranges.NUMMULTIRANGE: _Pg8000MultiRange, + ranges.DATEMULTIRANGE: _Pg8000MultiRange, + ranges.TSMULTIRANGE: _Pg8000MultiRange, + ranges.TSTZMULTIRANGE: _Pg8000MultiRange, + }, + ) + + def __init__(self, client_encoding=None, **kwargs): + PGDialect.__init__(self, **kwargs) + self.client_encoding = client_encoding + + if self._dbapi_version < (1, 16, 6): + raise NotImplementedError("pg8000 1.16.6 or greater is required") + + if self._native_inet_types: + raise NotImplementedError( + "The pg8000 dialect does not fully implement " + "ipaddress type handling; INET is supported by default, " + "CIDR is not" + ) + + @util.memoized_property + def _dbapi_version(self): + if self.dbapi and hasattr(self.dbapi, "__version__"): + return tuple( + [ + int(x) + for x in re.findall( + r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__ + ) + ] + ) + else: + return (99, 99, 99) + + @classmethod + def import_dbapi(cls): + return __import__("pg8000") + + def create_connect_args(self, url): + opts = url.translate_connect_args(username="user") + if "port" in opts: + opts["port"] = int(opts["port"]) + opts.update(url.query) + return ([], opts) + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.InterfaceError) and "network error" in str( + e + ): + # new as of pg8000 1.19.0 for broken connections + return True + + # connection was closed normally + return "connection is closed" in str(e) + + def get_isolation_level_values(self, dbapi_connection): + return ( + "AUTOCOMMIT", + "READ COMMITTED", + "READ UNCOMMITTED", + "REPEATABLE READ", + "SERIALIZABLE", + ) + + def set_isolation_level(self, dbapi_connection, level): + level = level.replace("_", " ") + + if level == "AUTOCOMMIT": + dbapi_connection.autocommit = True + else: + dbapi_connection.autocommit = False + cursor = dbapi_connection.cursor() + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION " + f"ISOLATION LEVEL {level}" + ) + cursor.execute("COMMIT") + cursor.close() + + def set_readonly(self, connection, value): + cursor = connection.cursor() + try: + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION %s" + % ("READ ONLY" if value else "READ WRITE") + ) + cursor.execute("COMMIT") + finally: + cursor.close() + + def get_readonly(self, connection): + cursor = connection.cursor() + try: + cursor.execute("show transaction_read_only") + val = cursor.fetchone()[0] + finally: + cursor.close() + + return val == "on" + + def set_deferrable(self, connection, value): + cursor = connection.cursor() + try: + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION %s" + % ("DEFERRABLE" if value else "NOT DEFERRABLE") + ) + cursor.execute("COMMIT") + finally: + cursor.close() + + def get_deferrable(self, connection): + cursor = connection.cursor() + try: + cursor.execute("show transaction_deferrable") + val = cursor.fetchone()[0] + finally: + cursor.close() + + return val == "on" + + def _set_client_encoding(self, dbapi_connection, client_encoding): + cursor = dbapi_connection.cursor() + cursor.execute( + f"""SET CLIENT_ENCODING TO '{ + client_encoding.replace("'", "''") + }'""" + ) + cursor.execute("COMMIT") + cursor.close() + + def do_begin_twophase(self, connection, xid): + connection.connection.tpc_begin((0, xid, "")) + + def do_prepare_twophase(self, connection, xid): + connection.connection.tpc_prepare() + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + connection.connection.tpc_rollback((0, xid, "")) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + connection.connection.tpc_commit((0, xid, "")) + + def do_recover_twophase(self, connection): + return [row[1] for row in connection.connection.tpc_recover()] + + def on_connect(self): + fns = [] + + def on_connect(conn): + conn.py_types[quoted_name] = conn.py_types[str] + + fns.append(on_connect) + + if self.client_encoding is not None: + + def on_connect(conn): + self._set_client_encoding(conn, self.client_encoding) + + fns.append(on_connect) + + if self._native_inet_types is False: + + def on_connect(conn): + # inet + conn.register_in_adapter(869, lambda s: s) + + # cidr + conn.register_in_adapter(650, lambda s: s) + + fns.append(on_connect) + + if self._json_deserializer: + + def on_connect(conn): + # json + conn.register_in_adapter(114, self._json_deserializer) + + # jsonb + conn.register_in_adapter(3802, self._json_deserializer) + + fns.append(on_connect) + + if len(fns) > 0: + + def on_connect(conn): + for fn in fns: + fn(conn) + + return on_connect + else: + return None + + @util.memoized_property + def _dialect_specific_select_one(self): + return ";" + + +dialect = PGDialect_pg8000 diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/pg_catalog.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/pg_catalog.py new file mode 100644 index 00000000..9b5562c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -0,0 +1,300 @@ +# dialects/postgresql/pg_catalog.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from .array import ARRAY +from .types import OID +from .types import REGCLASS +from ... import Column +from ... import func +from ... import MetaData +from ... import Table +from ...types import BigInteger +from ...types import Boolean +from ...types import CHAR +from ...types import Float +from ...types import Integer +from ...types import SmallInteger +from ...types import String +from ...types import Text +from ...types import TypeDecorator + + +# types +class NAME(TypeDecorator): + impl = String(64, collation="C") + cache_ok = True + + +class PG_NODE_TREE(TypeDecorator): + impl = Text(collation="C") + cache_ok = True + + +class INT2VECTOR(TypeDecorator): + impl = ARRAY(SmallInteger) + cache_ok = True + + +class OIDVECTOR(TypeDecorator): + impl = ARRAY(OID) + cache_ok = True + + +class _SpaceVector: + def result_processor(self, dialect, coltype): + def process(value): + if value is None: + return value + return [int(p) for p in value.split(" ")] + + return process + + +REGPROC = REGCLASS # seems an alias + +# functions +_pg_cat = func.pg_catalog +quote_ident = _pg_cat.quote_ident +pg_table_is_visible = _pg_cat.pg_table_is_visible +pg_type_is_visible = _pg_cat.pg_type_is_visible +pg_get_viewdef = _pg_cat.pg_get_viewdef +pg_get_serial_sequence = _pg_cat.pg_get_serial_sequence +format_type = _pg_cat.format_type +pg_get_expr = _pg_cat.pg_get_expr +pg_get_constraintdef = _pg_cat.pg_get_constraintdef +pg_get_indexdef = _pg_cat.pg_get_indexdef + +# constants +RELKINDS_TABLE_NO_FOREIGN = ("r", "p") +RELKINDS_TABLE = RELKINDS_TABLE_NO_FOREIGN + ("f",) +RELKINDS_VIEW = ("v",) +RELKINDS_MAT_VIEW = ("m",) +RELKINDS_ALL_TABLE_LIKE = RELKINDS_TABLE + RELKINDS_VIEW + RELKINDS_MAT_VIEW + +# tables +pg_catalog_meta = MetaData(schema="pg_catalog") + +pg_namespace = Table( + "pg_namespace", + pg_catalog_meta, + Column("oid", OID), + Column("nspname", NAME), + Column("nspowner", OID), +) + +pg_class = Table( + "pg_class", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("relname", NAME), + Column("relnamespace", OID), + Column("reltype", OID), + Column("reloftype", OID), + Column("relowner", OID), + Column("relam", OID), + Column("relfilenode", OID), + Column("reltablespace", OID), + Column("relpages", Integer), + Column("reltuples", Float), + Column("relallvisible", Integer, info={"server_version": (9, 2)}), + Column("reltoastrelid", OID), + Column("relhasindex", Boolean), + Column("relisshared", Boolean), + Column("relpersistence", CHAR, info={"server_version": (9, 1)}), + Column("relkind", CHAR), + Column("relnatts", SmallInteger), + Column("relchecks", SmallInteger), + Column("relhasrules", Boolean), + Column("relhastriggers", Boolean), + Column("relhassubclass", Boolean), + Column("relrowsecurity", Boolean), + Column("relforcerowsecurity", Boolean, info={"server_version": (9, 5)}), + Column("relispopulated", Boolean, info={"server_version": (9, 3)}), + Column("relreplident", CHAR, info={"server_version": (9, 4)}), + Column("relispartition", Boolean, info={"server_version": (10,)}), + Column("relrewrite", OID, info={"server_version": (11,)}), + Column("reloptions", ARRAY(Text)), +) + +pg_type = Table( + "pg_type", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("typname", NAME), + Column("typnamespace", OID), + Column("typowner", OID), + Column("typlen", SmallInteger), + Column("typbyval", Boolean), + Column("typtype", CHAR), + Column("typcategory", CHAR), + Column("typispreferred", Boolean), + Column("typisdefined", Boolean), + Column("typdelim", CHAR), + Column("typrelid", OID), + Column("typelem", OID), + Column("typarray", OID), + Column("typinput", REGPROC), + Column("typoutput", REGPROC), + Column("typreceive", REGPROC), + Column("typsend", REGPROC), + Column("typmodin", REGPROC), + Column("typmodout", REGPROC), + Column("typanalyze", REGPROC), + Column("typalign", CHAR), + Column("typstorage", CHAR), + Column("typnotnull", Boolean), + Column("typbasetype", OID), + Column("typtypmod", Integer), + Column("typndims", Integer), + Column("typcollation", OID, info={"server_version": (9, 1)}), + Column("typdefault", Text), +) + +pg_index = Table( + "pg_index", + pg_catalog_meta, + Column("indexrelid", OID), + Column("indrelid", OID), + Column("indnatts", SmallInteger), + Column("indnkeyatts", SmallInteger, info={"server_version": (11,)}), + Column("indisunique", Boolean), + Column("indnullsnotdistinct", Boolean, info={"server_version": (15,)}), + Column("indisprimary", Boolean), + Column("indisexclusion", Boolean, info={"server_version": (9, 1)}), + Column("indimmediate", Boolean), + Column("indisclustered", Boolean), + Column("indisvalid", Boolean), + Column("indcheckxmin", Boolean), + Column("indisready", Boolean), + Column("indislive", Boolean, info={"server_version": (9, 3)}), # 9.3 + Column("indisreplident", Boolean), + Column("indkey", INT2VECTOR), + Column("indcollation", OIDVECTOR, info={"server_version": (9, 1)}), # 9.1 + Column("indclass", OIDVECTOR), + Column("indoption", INT2VECTOR), + Column("indexprs", PG_NODE_TREE), + Column("indpred", PG_NODE_TREE), +) + +pg_attribute = Table( + "pg_attribute", + pg_catalog_meta, + Column("attrelid", OID), + Column("attname", NAME), + Column("atttypid", OID), + Column("attstattarget", Integer), + Column("attlen", SmallInteger), + Column("attnum", SmallInteger), + Column("attndims", Integer), + Column("attcacheoff", Integer), + Column("atttypmod", Integer), + Column("attbyval", Boolean), + Column("attstorage", CHAR), + Column("attalign", CHAR), + Column("attnotnull", Boolean), + Column("atthasdef", Boolean), + Column("atthasmissing", Boolean, info={"server_version": (11,)}), + Column("attidentity", CHAR, info={"server_version": (10,)}), + Column("attgenerated", CHAR, info={"server_version": (12,)}), + Column("attisdropped", Boolean), + Column("attislocal", Boolean), + Column("attinhcount", Integer), + Column("attcollation", OID, info={"server_version": (9, 1)}), +) + +pg_constraint = Table( + "pg_constraint", + pg_catalog_meta, + Column("oid", OID), # 9.3 + Column("conname", NAME), + Column("connamespace", OID), + Column("contype", CHAR), + Column("condeferrable", Boolean), + Column("condeferred", Boolean), + Column("convalidated", Boolean, info={"server_version": (9, 1)}), + Column("conrelid", OID), + Column("contypid", OID), + Column("conindid", OID), + Column("conparentid", OID, info={"server_version": (11,)}), + Column("confrelid", OID), + Column("confupdtype", CHAR), + Column("confdeltype", CHAR), + Column("confmatchtype", CHAR), + Column("conislocal", Boolean), + Column("coninhcount", Integer), + Column("connoinherit", Boolean, info={"server_version": (9, 2)}), + Column("conkey", ARRAY(SmallInteger)), + Column("confkey", ARRAY(SmallInteger)), +) + +pg_sequence = Table( + "pg_sequence", + pg_catalog_meta, + Column("seqrelid", OID), + Column("seqtypid", OID), + Column("seqstart", BigInteger), + Column("seqincrement", BigInteger), + Column("seqmax", BigInteger), + Column("seqmin", BigInteger), + Column("seqcache", BigInteger), + Column("seqcycle", Boolean), + info={"server_version": (10,)}, +) + +pg_attrdef = Table( + "pg_attrdef", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("adrelid", OID), + Column("adnum", SmallInteger), + Column("adbin", PG_NODE_TREE), +) + +pg_description = Table( + "pg_description", + pg_catalog_meta, + Column("objoid", OID), + Column("classoid", OID), + Column("objsubid", Integer), + Column("description", Text(collation="C")), +) + +pg_enum = Table( + "pg_enum", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("enumtypid", OID), + Column("enumsortorder", Float(), info={"server_version": (9, 1)}), + Column("enumlabel", NAME), +) + +pg_am = Table( + "pg_am", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("amname", NAME), + Column("amhandler", REGPROC, info={"server_version": (9, 6)}), + Column("amtype", CHAR, info={"server_version": (9, 6)}), +) + +pg_collation = Table( + "pg_collation", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("collname", NAME), + Column("collnamespace", OID), + Column("collowner", OID), + Column("collprovider", CHAR, info={"server_version": (10,)}), + Column("collisdeterministic", Boolean, info={"server_version": (12,)}), + Column("collencoding", Integer), + Column("collcollate", Text), + Column("collctype", Text), + Column("colliculocale", Text), + Column("collicurules", Text, info={"server_version": (16,)}), + Column("collversion", Text, info={"server_version": (10,)}), +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/provision.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/provision.py new file mode 100644 index 00000000..38573c77 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/provision.py @@ -0,0 +1,175 @@ +# dialects/postgresql/provision.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +import time + +from ... import exc +from ... import inspect +from ... import text +from ...testing import warn_test_suite +from ...testing.provision import create_db +from ...testing.provision import drop_all_schema_objects_post_tables +from ...testing.provision import drop_all_schema_objects_pre_tables +from ...testing.provision import drop_db +from ...testing.provision import log +from ...testing.provision import post_configure_engine +from ...testing.provision import prepare_for_drop_tables +from ...testing.provision import set_default_schema_on_connection +from ...testing.provision import temp_table_keyword_args +from ...testing.provision import upsert + + +@create_db.for_db("postgresql") +def _pg_create_db(cfg, eng, ident): + template_db = cfg.options.postgresql_templatedb + + with eng.execution_options(isolation_level="AUTOCOMMIT").begin() as conn: + if not template_db: + template_db = conn.exec_driver_sql( + "select current_database()" + ).scalar() + + attempt = 0 + while True: + try: + conn.exec_driver_sql( + "CREATE DATABASE %s TEMPLATE %s" % (ident, template_db) + ) + except exc.OperationalError as err: + attempt += 1 + if attempt >= 3: + raise + if "accessed by other users" in str(err): + log.info( + "Waiting to create %s, URI %r, " + "template DB %s is in use sleeping for .5", + ident, + eng.url, + template_db, + ) + time.sleep(0.5) + except: + raise + else: + break + + +@drop_db.for_db("postgresql") +def _pg_drop_db(cfg, eng, ident): + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + with conn.begin(): + conn.execute( + text( + "select pg_terminate_backend(pid) from pg_stat_activity " + "where usename=current_user and pid != pg_backend_pid() " + "and datname=:dname" + ), + dict(dname=ident), + ) + conn.exec_driver_sql("DROP DATABASE %s" % ident) + + +@temp_table_keyword_args.for_db("postgresql") +def _postgresql_temp_table_keyword_args(cfg, eng): + return {"prefixes": ["TEMPORARY"]} + + +@set_default_schema_on_connection.for_db("postgresql") +def _postgresql_set_default_schema_on_connection( + cfg, dbapi_connection, schema_name +): + existing_autocommit = dbapi_connection.autocommit + dbapi_connection.autocommit = True + cursor = dbapi_connection.cursor() + cursor.execute("SET SESSION search_path='%s'" % schema_name) + cursor.close() + dbapi_connection.autocommit = existing_autocommit + + +@drop_all_schema_objects_pre_tables.for_db("postgresql") +def drop_all_schema_objects_pre_tables(cfg, eng): + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + for xid in conn.exec_driver_sql( + "select gid from pg_prepared_xacts" + ).scalars(): + conn.exec_driver_sql("ROLLBACK PREPARED '%s'" % xid) + + +@drop_all_schema_objects_post_tables.for_db("postgresql") +def drop_all_schema_objects_post_tables(cfg, eng): + from sqlalchemy.dialects import postgresql + + inspector = inspect(eng) + with eng.begin() as conn: + for enum in inspector.get_enums("*"): + conn.execute( + postgresql.DropEnumType( + postgresql.ENUM(name=enum["name"], schema=enum["schema"]) + ) + ) + + +@prepare_for_drop_tables.for_db("postgresql") +def prepare_for_drop_tables(config, connection): + """Ensure there are no locks on the current username/database.""" + + result = connection.exec_driver_sql( + "select pid, state, wait_event_type, query " + # "select pg_terminate_backend(pid), state, wait_event_type " + "from pg_stat_activity where " + "usename=current_user " + "and datname=current_database() and state='idle in transaction' " + "and pid != pg_backend_pid()" + ) + rows = result.all() # noqa + if rows: + warn_test_suite( + "PostgreSQL may not be able to DROP tables due to " + "idle in transaction: %s" + % ("; ".join(row._mapping["query"] for row in rows)) + ) + + +@upsert.for_db("postgresql") +def _upsert( + cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False +): + from sqlalchemy.dialects.postgresql import insert + + stmt = insert(table) + + table_pk = inspect(table).selectable + + if set_lambda: + stmt = stmt.on_conflict_do_update( + index_elements=table_pk.primary_key, set_=set_lambda(stmt.excluded) + ) + else: + stmt = stmt.on_conflict_do_nothing() + + stmt = stmt.returning( + *returning, sort_by_parameter_order=sort_by_parameter_order + ) + return stmt + + +_extensions = [ + ("citext", (13,)), + ("hstore", (13,)), +] + + +@post_configure_engine.for_db("postgresql") +def _create_citext_extension(url, engine, follower_ident): + with engine.connect() as conn: + for extension, min_version in _extensions: + if conn.dialect.server_version_info >= min_version: + conn.execute( + text(f"CREATE EXTENSION IF NOT EXISTS {extension}") + ) + conn.commit() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg.py new file mode 100644 index 00000000..b8c0087d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg.py @@ -0,0 +1,776 @@ +# dialects/postgresql/psycopg.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: postgresql+psycopg + :name: psycopg (a.k.a. psycopg 3) + :dbapi: psycopg + :connectstring: postgresql+psycopg://user:password@host:port/dbname[?key=value&key=value...] + :url: https://pypi.org/project/psycopg/ + +``psycopg`` is the package and module name for version 3 of the ``psycopg`` +database driver, formerly known as ``psycopg2``. This driver is different +enough from its ``psycopg2`` predecessor that SQLAlchemy supports it +via a totally separate dialect; support for ``psycopg2`` is expected to remain +for as long as that package continues to function for modern Python versions, +and also remains the default dialect for the ``postgresql://`` dialect +series. + +The SQLAlchemy ``psycopg`` dialect provides both a sync and an async +implementation under the same dialect name. The proper version is +selected depending on how the engine is created: + +* calling :func:`_sa.create_engine` with ``postgresql+psycopg://...`` will + automatically select the sync version, e.g.:: + + from sqlalchemy import create_engine + sync_engine = create_engine("postgresql+psycopg://scott:tiger@localhost/test") + +* calling :func:`_asyncio.create_async_engine` with + ``postgresql+psycopg://...`` will automatically select the async version, + e.g.:: + + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("postgresql+psycopg://scott:tiger@localhost/test") + +The asyncio version of the dialect may also be specified explicitly using the +``psycopg_async`` suffix, as:: + + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("postgresql+psycopg_async://scott:tiger@localhost/test") + +.. seealso:: + + :ref:`postgresql_psycopg2` - The SQLAlchemy ``psycopg`` + dialect shares most of its behavior with the ``psycopg2`` dialect. + Further documentation is available there. + +Using a different Cursor class +------------------------------ + +One of the differences between ``psycopg`` and the older ``psycopg2`` +is how bound parameters are handled: ``psycopg2`` would bind them +client side, while ``psycopg`` by default will bind them server side. + +It's possible to configure ``psycopg`` to do client side binding by +specifying the ``cursor_factory`` to be ``ClientCursor`` when creating +the engine:: + + from psycopg import ClientCursor + + client_side_engine = create_engine( + "postgresql+psycopg://...", + connect_args={"cursor_factory": ClientCursor}, + ) + +Similarly when using an async engine the ``AsyncClientCursor`` can be +specified:: + + from psycopg import AsyncClientCursor + + client_side_engine = create_async_engine( + "postgresql+psycopg://...", + connect_args={"cursor_factory": AsyncClientCursor}, + ) + +.. seealso:: + + `Client-side-binding cursors `_ + +""" # noqa +from __future__ import annotations + +from collections import deque +import logging +import re +from typing import cast +from typing import TYPE_CHECKING + +from . import ranges +from ._psycopg_common import _PGDialect_common_psycopg +from ._psycopg_common import _PGExecutionContext_common_psycopg +from .base import INTERVAL +from .base import PGCompiler +from .base import PGIdentifierPreparer +from .base import REGCONFIG +from .json import JSON +from .json import JSONB +from .json import JSONPathType +from .types import CITEXT +from ... import pool +from ... import util +from ...engine import AdaptedConnection +from ...sql import sqltypes +from ...util.concurrency import await_fallback +from ...util.concurrency import await_only + +if TYPE_CHECKING: + from typing import Iterable + + from psycopg import AsyncConnection + +logger = logging.getLogger("sqlalchemy.dialects.postgresql") + + +class _PGString(sqltypes.String): + render_bind_cast = True + + +class _PGREGCONFIG(REGCONFIG): + render_bind_cast = True + + +class _PGJSON(JSON): + render_bind_cast = True + + def bind_processor(self, dialect): + return self._make_bind_processor(None, dialect._psycopg_Json) + + def result_processor(self, dialect, coltype): + return None + + +class _PGJSONB(JSONB): + render_bind_cast = True + + def bind_processor(self, dialect): + return self._make_bind_processor(None, dialect._psycopg_Jsonb) + + def result_processor(self, dialect, coltype): + return None + + +class _PGJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + __visit_name__ = "json_int_index" + + render_bind_cast = True + + +class _PGJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + __visit_name__ = "json_str_index" + + render_bind_cast = True + + +class _PGJSONPathType(JSONPathType): + pass + + +class _PGInterval(INTERVAL): + render_bind_cast = True + + +class _PGTimeStamp(sqltypes.DateTime): + render_bind_cast = True + + +class _PGDate(sqltypes.Date): + render_bind_cast = True + + +class _PGTime(sqltypes.Time): + render_bind_cast = True + + +class _PGInteger(sqltypes.Integer): + render_bind_cast = True + + +class _PGSmallInteger(sqltypes.SmallInteger): + render_bind_cast = True + + +class _PGNullType(sqltypes.NullType): + render_bind_cast = True + + +class _PGBigInteger(sqltypes.BigInteger): + render_bind_cast = True + + +class _PGBoolean(sqltypes.Boolean): + render_bind_cast = True + + +class _PsycopgRange(ranges.AbstractSingleRangeImpl): + def bind_processor(self, dialect): + psycopg_Range = cast(PGDialect_psycopg, dialect)._psycopg_Range + + def to_range(value): + if isinstance(value, ranges.Range): + value = psycopg_Range( + value.lower, value.upper, value.bounds, value.empty + ) + return value + + return to_range + + def result_processor(self, dialect, coltype): + def to_range(value): + if value is not None: + value = ranges.Range( + value._lower, + value._upper, + bounds=value._bounds if value._bounds else "[)", + empty=not value._bounds, + ) + return value + + return to_range + + +class _PsycopgMultiRange(ranges.AbstractMultiRangeImpl): + def bind_processor(self, dialect): + psycopg_Range = cast(PGDialect_psycopg, dialect)._psycopg_Range + psycopg_Multirange = cast( + PGDialect_psycopg, dialect + )._psycopg_Multirange + + NoneType = type(None) + + def to_range(value): + if isinstance(value, (str, NoneType, psycopg_Multirange)): + return value + + return psycopg_Multirange( + [ + psycopg_Range( + element.lower, + element.upper, + element.bounds, + element.empty, + ) + for element in cast("Iterable[ranges.Range]", value) + ] + ) + + return to_range + + def result_processor(self, dialect, coltype): + def to_range(value): + if value is None: + return None + else: + return ranges.MultiRange( + ranges.Range( + elem._lower, + elem._upper, + bounds=elem._bounds if elem._bounds else "[)", + empty=not elem._bounds, + ) + for elem in value + ) + + return to_range + + +class PGExecutionContext_psycopg(_PGExecutionContext_common_psycopg): + pass + + +class PGCompiler_psycopg(PGCompiler): + pass + + +class PGIdentifierPreparer_psycopg(PGIdentifierPreparer): + pass + + +def _log_notices(diagnostic): + logger.info("%s: %s", diagnostic.severity, diagnostic.message_primary) + + +class PGDialect_psycopg(_PGDialect_common_psycopg): + driver = "psycopg" + + supports_statement_cache = True + supports_server_side_cursors = True + default_paramstyle = "pyformat" + supports_sane_multi_rowcount = True + + execution_ctx_cls = PGExecutionContext_psycopg + statement_compiler = PGCompiler_psycopg + preparer = PGIdentifierPreparer_psycopg + psycopg_version = (0, 0) + + _has_native_hstore = True + _psycopg_adapters_map = None + + colspecs = util.update_copy( + _PGDialect_common_psycopg.colspecs, + { + sqltypes.String: _PGString, + REGCONFIG: _PGREGCONFIG, + JSON: _PGJSON, + CITEXT: CITEXT, + sqltypes.JSON: _PGJSON, + JSONB: _PGJSONB, + sqltypes.JSON.JSONPathType: _PGJSONPathType, + sqltypes.JSON.JSONIntIndexType: _PGJSONIntIndexType, + sqltypes.JSON.JSONStrIndexType: _PGJSONStrIndexType, + sqltypes.Interval: _PGInterval, + INTERVAL: _PGInterval, + sqltypes.Date: _PGDate, + sqltypes.DateTime: _PGTimeStamp, + sqltypes.Time: _PGTime, + sqltypes.Integer: _PGInteger, + sqltypes.SmallInteger: _PGSmallInteger, + sqltypes.BigInteger: _PGBigInteger, + ranges.AbstractSingleRange: _PsycopgRange, + ranges.AbstractMultiRange: _PsycopgMultiRange, + }, + ) + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + if self.dbapi: + m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__) + if m: + self.psycopg_version = tuple( + int(x) for x in m.group(1, 2, 3) if x is not None + ) + + if self.psycopg_version < (3, 0, 2): + raise ImportError( + "psycopg version 3.0.2 or higher is required." + ) + + from psycopg.adapt import AdaptersMap + + self._psycopg_adapters_map = adapters_map = AdaptersMap( + self.dbapi.adapters + ) + + if self._native_inet_types is False: + import psycopg.types.string + + adapters_map.register_loader( + "inet", psycopg.types.string.TextLoader + ) + adapters_map.register_loader( + "cidr", psycopg.types.string.TextLoader + ) + + if self._json_deserializer: + from psycopg.types.json import set_json_loads + + set_json_loads(self._json_deserializer, adapters_map) + + if self._json_serializer: + from psycopg.types.json import set_json_dumps + + set_json_dumps(self._json_serializer, adapters_map) + + def create_connect_args(self, url): + # see https://github.com/psycopg/psycopg/issues/83 + cargs, cparams = super().create_connect_args(url) + + if self._psycopg_adapters_map: + cparams["context"] = self._psycopg_adapters_map + if self.client_encoding is not None: + cparams["client_encoding"] = self.client_encoding + return cargs, cparams + + def _type_info_fetch(self, connection, name): + from psycopg.types import TypeInfo + + return TypeInfo.fetch(connection.connection.driver_connection, name) + + def initialize(self, connection): + super().initialize(connection) + + # PGDialect.initialize() checks server version for <= 8.2 and sets + # this flag to False if so + if not self.insert_returning: + self.insert_executemany_returning = False + + # HSTORE can't be registered until we have a connection so that + # we can look up its OID, so we set up this adapter in + # initialize() + if self.use_native_hstore: + info = self._type_info_fetch(connection, "hstore") + self._has_native_hstore = info is not None + if self._has_native_hstore: + from psycopg.types.hstore import register_hstore + + # register the adapter for connections made subsequent to + # this one + register_hstore(info, self._psycopg_adapters_map) + + # register the adapter for this connection + register_hstore(info, connection.connection) + + @classmethod + def import_dbapi(cls): + import psycopg + + return psycopg + + @classmethod + def get_async_dialect_cls(cls, url): + return PGDialectAsync_psycopg + + @util.memoized_property + def _isolation_lookup(self): + return { + "READ COMMITTED": self.dbapi.IsolationLevel.READ_COMMITTED, + "READ UNCOMMITTED": self.dbapi.IsolationLevel.READ_UNCOMMITTED, + "REPEATABLE READ": self.dbapi.IsolationLevel.REPEATABLE_READ, + "SERIALIZABLE": self.dbapi.IsolationLevel.SERIALIZABLE, + } + + @util.memoized_property + def _psycopg_Json(self): + from psycopg.types import json + + return json.Json + + @util.memoized_property + def _psycopg_Jsonb(self): + from psycopg.types import json + + return json.Jsonb + + @util.memoized_property + def _psycopg_TransactionStatus(self): + from psycopg.pq import TransactionStatus + + return TransactionStatus + + @util.memoized_property + def _psycopg_Range(self): + from psycopg.types.range import Range + + return Range + + @util.memoized_property + def _psycopg_Multirange(self): + from psycopg.types.multirange import Multirange + + return Multirange + + def _do_isolation_level(self, connection, autocommit, isolation_level): + connection.autocommit = autocommit + connection.isolation_level = isolation_level + + def get_isolation_level(self, dbapi_connection): + status_before = dbapi_connection.info.transaction_status + value = super().get_isolation_level(dbapi_connection) + + # don't rely on psycopg providing enum symbols, compare with + # eq/ne + if status_before == self._psycopg_TransactionStatus.IDLE: + dbapi_connection.rollback() + return value + + def set_isolation_level(self, dbapi_connection, level): + if level == "AUTOCOMMIT": + self._do_isolation_level( + dbapi_connection, autocommit=True, isolation_level=None + ) + else: + self._do_isolation_level( + dbapi_connection, + autocommit=False, + isolation_level=self._isolation_lookup[level], + ) + + def set_readonly(self, connection, value): + connection.read_only = value + + def get_readonly(self, connection): + return connection.read_only + + def on_connect(self): + def notices(conn): + conn.add_notice_handler(_log_notices) + + fns = [notices] + + if self.isolation_level is not None: + + def on_connect(conn): + self.set_isolation_level(conn, self.isolation_level) + + fns.append(on_connect) + + # fns always has the notices function + def on_connect(conn): + for fn in fns: + fn(conn) + + return on_connect + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.Error) and connection is not None: + if connection.closed or connection.broken: + return True + return False + + def _do_prepared_twophase(self, connection, command, recover=False): + dbapi_conn = connection.connection.dbapi_connection + if ( + recover + # don't rely on psycopg providing enum symbols, compare with + # eq/ne + or dbapi_conn.info.transaction_status + != self._psycopg_TransactionStatus.IDLE + ): + dbapi_conn.rollback() + before_autocommit = dbapi_conn.autocommit + try: + if not before_autocommit: + self._do_autocommit(dbapi_conn, True) + dbapi_conn.execute(command) + finally: + if not before_autocommit: + self._do_autocommit(dbapi_conn, before_autocommit) + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if is_prepared: + self._do_prepared_twophase( + connection, f"ROLLBACK PREPARED '{xid}'", recover=recover + ) + else: + self.do_rollback(connection.connection) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if is_prepared: + self._do_prepared_twophase( + connection, f"COMMIT PREPARED '{xid}'", recover=recover + ) + else: + self.do_commit(connection.connection) + + @util.memoized_property + def _dialect_specific_select_one(self): + return ";" + + +class AsyncAdapt_psycopg_cursor: + __slots__ = ("_cursor", "await_", "_rows") + + _psycopg_ExecStatus = None + + def __init__(self, cursor, await_) -> None: + self._cursor = cursor + self.await_ = await_ + self._rows = deque() + + def __getattr__(self, name): + return getattr(self._cursor, name) + + @property + def arraysize(self): + return self._cursor.arraysize + + @arraysize.setter + def arraysize(self, value): + self._cursor.arraysize = value + + def close(self): + self._rows.clear() + # Normal cursor just call _close() in a non-sync way. + self._cursor._close() + + def execute(self, query, params=None, **kw): + result = self.await_(self._cursor.execute(query, params, **kw)) + # sqlalchemy result is not async, so need to pull all rows here + res = self._cursor.pgresult + + # don't rely on psycopg providing enum symbols, compare with + # eq/ne + if res and res.status == self._psycopg_ExecStatus.TUPLES_OK: + rows = self.await_(self._cursor.fetchall()) + self._rows = deque(rows) + return result + + def executemany(self, query, params_seq): + return self.await_(self._cursor.executemany(query, params_seq)) + + def __iter__(self): + while self._rows: + yield self._rows.popleft() + + def fetchone(self): + if self._rows: + return self._rows.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + size = self._cursor.arraysize + + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] + + def fetchall(self): + retval = list(self._rows) + self._rows.clear() + return retval + + +class AsyncAdapt_psycopg_ss_cursor(AsyncAdapt_psycopg_cursor): + def execute(self, query, params=None, **kw): + self.await_(self._cursor.execute(query, params, **kw)) + return self + + def close(self): + self.await_(self._cursor.close()) + + def fetchone(self): + return self.await_(self._cursor.fetchone()) + + def fetchmany(self, size=0): + return self.await_(self._cursor.fetchmany(size)) + + def fetchall(self): + return self.await_(self._cursor.fetchall()) + + def __iter__(self): + iterator = self._cursor.__aiter__() + while True: + try: + yield self.await_(iterator.__anext__()) + except StopAsyncIteration: + break + + +class AsyncAdapt_psycopg_connection(AdaptedConnection): + _connection: AsyncConnection + __slots__ = () + await_ = staticmethod(await_only) + + def __init__(self, connection) -> None: + self._connection = connection + + def __getattr__(self, name): + return getattr(self._connection, name) + + def execute(self, query, params=None, **kw): + cursor = self.await_(self._connection.execute(query, params, **kw)) + return AsyncAdapt_psycopg_cursor(cursor, self.await_) + + def cursor(self, *args, **kw): + cursor = self._connection.cursor(*args, **kw) + if hasattr(cursor, "name"): + return AsyncAdapt_psycopg_ss_cursor(cursor, self.await_) + else: + return AsyncAdapt_psycopg_cursor(cursor, self.await_) + + def commit(self): + self.await_(self._connection.commit()) + + def rollback(self): + self.await_(self._connection.rollback()) + + def close(self): + self.await_(self._connection.close()) + + @property + def autocommit(self): + return self._connection.autocommit + + @autocommit.setter + def autocommit(self, value): + self.set_autocommit(value) + + def set_autocommit(self, value): + self.await_(self._connection.set_autocommit(value)) + + def set_isolation_level(self, value): + self.await_(self._connection.set_isolation_level(value)) + + def set_read_only(self, value): + self.await_(self._connection.set_read_only(value)) + + def set_deferrable(self, value): + self.await_(self._connection.set_deferrable(value)) + + +class AsyncAdaptFallback_psycopg_connection(AsyncAdapt_psycopg_connection): + __slots__ = () + await_ = staticmethod(await_fallback) + + +class PsycopgAdaptDBAPI: + def __init__(self, psycopg) -> None: + self.psycopg = psycopg + + for k, v in self.psycopg.__dict__.items(): + if k != "connect": + self.__dict__[k] = v + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop( + "async_creator_fn", self.psycopg.AsyncConnection.connect + ) + if util.asbool(async_fallback): + return AsyncAdaptFallback_psycopg_connection( + await_fallback(creator_fn(*arg, **kw)) + ) + else: + return AsyncAdapt_psycopg_connection( + await_only(creator_fn(*arg, **kw)) + ) + + +class PGDialectAsync_psycopg(PGDialect_psycopg): + is_async = True + supports_statement_cache = True + + @classmethod + def import_dbapi(cls): + import psycopg + from psycopg.pq import ExecStatus + + AsyncAdapt_psycopg_cursor._psycopg_ExecStatus = ExecStatus + + return PsycopgAdaptDBAPI(psycopg) + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if util.asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def _type_info_fetch(self, connection, name): + from psycopg.types import TypeInfo + + adapted = connection.connection + return adapted.await_(TypeInfo.fetch(adapted.driver_connection, name)) + + def _do_isolation_level(self, connection, autocommit, isolation_level): + connection.set_autocommit(autocommit) + connection.set_isolation_level(isolation_level) + + def _do_autocommit(self, connection, value): + connection.set_autocommit(value) + + def set_readonly(self, connection, value): + connection.set_read_only(value) + + def set_deferrable(self, connection, value): + connection.set_deferrable(value) + + def get_driver_connection(self, connection): + return connection._connection + + +dialect = PGDialect_psycopg +dialect_async = PGDialectAsync_psycopg diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py new file mode 100644 index 00000000..fc05aca9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py @@ -0,0 +1,886 @@ +# dialects/postgresql/psycopg2.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: postgresql+psycopg2 + :name: psycopg2 + :dbapi: psycopg2 + :connectstring: postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...] + :url: https://pypi.org/project/psycopg2/ + +.. _psycopg2_toplevel: + +psycopg2 Connect Arguments +-------------------------- + +Keyword arguments that are specific to the SQLAlchemy psycopg2 dialect +may be passed to :func:`_sa.create_engine()`, and include the following: + + +* ``isolation_level``: This option, available for all PostgreSQL dialects, + includes the ``AUTOCOMMIT`` isolation level when using the psycopg2 + dialect. This option sets the **default** isolation level for the + connection that is set immediately upon connection to the database before + the connection is pooled. This option is generally superseded by the more + modern :paramref:`_engine.Connection.execution_options.isolation_level` + execution option, detailed at :ref:`dbapi_autocommit`. + + .. seealso:: + + :ref:`psycopg2_isolation_level` + + :ref:`dbapi_autocommit` + + +* ``client_encoding``: sets the client encoding in a libpq-agnostic way, + using psycopg2's ``set_client_encoding()`` method. + + .. seealso:: + + :ref:`psycopg2_unicode` + + +* ``executemany_mode``, ``executemany_batch_page_size``, + ``executemany_values_page_size``: Allows use of psycopg2 + extensions for optimizing "executemany"-style queries. See the referenced + section below for details. + + .. seealso:: + + :ref:`psycopg2_executemany_mode` + +.. tip:: + + The above keyword arguments are **dialect** keyword arguments, meaning + that they are passed as explicit keyword arguments to :func:`_sa.create_engine()`:: + + engine = create_engine( + "postgresql+psycopg2://scott:tiger@localhost/test", + isolation_level="SERIALIZABLE", + ) + + These should not be confused with **DBAPI** connect arguments, which + are passed as part of the :paramref:`_sa.create_engine.connect_args` + dictionary and/or are passed in the URL query string, as detailed in + the section :ref:`custom_dbapi_args`. + +.. _psycopg2_ssl: + +SSL Connections +--------------- + +The psycopg2 module has a connection argument named ``sslmode`` for +controlling its behavior regarding secure (SSL) connections. The default is +``sslmode=prefer``; it will attempt an SSL connection and if that fails it +will fall back to an unencrypted connection. ``sslmode=require`` may be used +to ensure that only secure connections are established. Consult the +psycopg2 / libpq documentation for further options that are available. + +Note that ``sslmode`` is specific to psycopg2 so it is included in the +connection URI:: + + engine = sa.create_engine( + "postgresql+psycopg2://scott:tiger@192.168.0.199:5432/test?sslmode=require" + ) + + +Unix Domain Connections +------------------------ + +psycopg2 supports connecting via Unix domain connections. When the ``host`` +portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2, +which specifies Unix-domain communication rather than TCP/IP communication:: + + create_engine("postgresql+psycopg2://user:password@/dbname") + +By default, the socket file used is to connect to a Unix-domain socket +in ``/tmp``, or whatever socket directory was specified when PostgreSQL +was built. This value can be overridden by passing a pathname to psycopg2, +using ``host`` as an additional keyword argument:: + + create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql") + +.. warning:: The format accepted here allows for a hostname in the main URL + in addition to the "host" query string argument. **When using this URL + format, the initial host is silently ignored**. That is, this URL:: + + engine = create_engine("postgresql+psycopg2://user:password@myhost1/dbname?host=myhost2") + + Above, the hostname ``myhost1`` is **silently ignored and discarded.** The + host which is connected is the ``myhost2`` host. + + This is to maintain some degree of compatibility with PostgreSQL's own URL + format which has been tested to behave the same way and for which tools like + PifPaf hardcode two hostnames. + +.. seealso:: + + `PQconnectdbParams \ + `_ + +.. _psycopg2_multi_host: + +Specifying multiple fallback hosts +----------------------------------- + +psycopg2 supports multiple connection points in the connection string. +When the ``host`` parameter is used multiple times in the query section of +the URL, SQLAlchemy will create a single string of the host and port +information provided to make the connections. Tokens may consist of +``host::port`` or just ``host``; in the latter case, the default port +is selected by libpq. In the example below, three host connections +are specified, for ``HostA::PortA``, ``HostB`` connecting to the default port, +and ``HostC::PortC``:: + + create_engine( + "postgresql+psycopg2://user:password@/dbname?host=HostA:PortA&host=HostB&host=HostC:PortC" + ) + +As an alternative, libpq query string format also may be used; this specifies +``host`` and ``port`` as single query string arguments with comma-separated +lists - the default port can be chosen by indicating an empty value +in the comma separated list:: + + create_engine( + "postgresql+psycopg2://user:password@/dbname?host=HostA,HostB,HostC&port=PortA,,PortC" + ) + +With either URL style, connections to each host is attempted based on a +configurable strategy, which may be configured using the libpq +``target_session_attrs`` parameter. Per libpq this defaults to ``any`` +which indicates a connection to each host is then attempted until a connection is successful. +Other strategies include ``primary``, ``prefer-standby``, etc. The complete +list is documented by PostgreSQL at +`libpq connection strings `_. + +For example, to indicate two hosts using the ``primary`` strategy:: + + create_engine( + "postgresql+psycopg2://user:password@/dbname?host=HostA:PortA&host=HostB&host=HostC:PortC&target_session_attrs=primary" + ) + +.. versionchanged:: 1.4.40 Port specification in psycopg2 multiple host format + is repaired, previously ports were not correctly interpreted in this context. + libpq comma-separated format is also now supported. + +.. versionadded:: 1.3.20 Support for multiple hosts in PostgreSQL connection + string. + +.. seealso:: + + `libpq connection strings `_ - please refer + to this section in the libpq documentation for complete background on multiple host support. + + +Empty DSN Connections / Environment Variable Connections +--------------------------------------------------------- + +The psycopg2 DBAPI can connect to PostgreSQL by passing an empty DSN to the +libpq client library, which by default indicates to connect to a localhost +PostgreSQL database that is open for "trust" connections. This behavior can be +further tailored using a particular set of environment variables which are +prefixed with ``PG_...``, which are consumed by ``libpq`` to take the place of +any or all elements of the connection string. + +For this form, the URL can be passed without any elements other than the +initial scheme:: + + engine = create_engine('postgresql+psycopg2://') + +In the above form, a blank "dsn" string is passed to the ``psycopg2.connect()`` +function which in turn represents an empty DSN passed to libpq. + +.. versionadded:: 1.3.2 support for parameter-less connections with psycopg2. + +.. seealso:: + + `Environment Variables\ + `_ - + PostgreSQL documentation on how to use ``PG_...`` + environment variables for connections. + +.. _psycopg2_execution_options: + +Per-Statement/Connection Execution Options +------------------------------------------- + +The following DBAPI-specific options are respected when used with +:meth:`_engine.Connection.execution_options`, +:meth:`.Executable.execution_options`, +:meth:`_query.Query.execution_options`, +in addition to those not specific to DBAPIs: + +* ``isolation_level`` - Set the transaction isolation level for the lifespan + of a :class:`_engine.Connection` (can only be set on a connection, + not a statement + or query). See :ref:`psycopg2_isolation_level`. + +* ``stream_results`` - Enable or disable usage of psycopg2 server side + cursors - this feature makes use of "named" cursors in combination with + special result handling methods so that result rows are not fully buffered. + Defaults to False, meaning cursors are buffered by default. + +* ``max_row_buffer`` - when using ``stream_results``, an integer value that + specifies the maximum number of rows to buffer at a time. This is + interpreted by the :class:`.BufferedRowCursorResult`, and if omitted the + buffer will grow to ultimately store 1000 rows at a time. + + .. versionchanged:: 1.4 The ``max_row_buffer`` size can now be greater than + 1000, and the buffer will grow to that size. + +.. _psycopg2_batch_mode: + +.. _psycopg2_executemany_mode: + +Psycopg2 Fast Execution Helpers +------------------------------- + +Modern versions of psycopg2 include a feature known as +`Fast Execution Helpers \ +`_, which +have been shown in benchmarking to improve psycopg2's executemany() +performance, primarily with INSERT statements, by at least +an order of magnitude. + +SQLAlchemy implements a native form of the "insert many values" +handler that will rewrite a single-row INSERT statement to accommodate for +many values at once within an extended VALUES clause; this handler is +equivalent to psycopg2's ``execute_values()`` handler; an overview of this +feature and its configuration are at :ref:`engine_insertmanyvalues`. + +.. versionadded:: 2.0 Replaced psycopg2's ``execute_values()`` fast execution + helper with a native SQLAlchemy mechanism known as + :ref:`insertmanyvalues `. + +The psycopg2 dialect retains the ability to use the psycopg2-specific +``execute_batch()`` feature, although it is not expected that this is a widely +used feature. The use of this extension may be enabled using the +``executemany_mode`` flag which may be passed to :func:`_sa.create_engine`:: + + engine = create_engine( + "postgresql+psycopg2://scott:tiger@host/dbname", + executemany_mode='values_plus_batch') + + +Possible options for ``executemany_mode`` include: + +* ``values_only`` - this is the default value. SQLAlchemy's native + :ref:`insertmanyvalues ` handler is used for qualifying + INSERT statements, assuming + :paramref:`_sa.create_engine.use_insertmanyvalues` is left at + its default value of ``True``. This handler rewrites simple + INSERT statements to include multiple VALUES clauses so that many + parameter sets can be inserted with one statement. + +* ``'values_plus_batch'``- SQLAlchemy's native + :ref:`insertmanyvalues ` handler is used for qualifying + INSERT statements, assuming + :paramref:`_sa.create_engine.use_insertmanyvalues` is left at its default + value of ``True``. Then, psycopg2's ``execute_batch()`` handler is used for + qualifying UPDATE and DELETE statements when executed with multiple parameter + sets. When using this mode, the :attr:`_engine.CursorResult.rowcount` + attribute will not contain a value for executemany-style executions against + UPDATE and DELETE statements. + +.. versionchanged:: 2.0 Removed the ``'batch'`` and ``'None'`` options + from psycopg2 ``executemany_mode``. Control over batching for INSERT + statements is now configured via the + :paramref:`_sa.create_engine.use_insertmanyvalues` engine-level parameter. + +The term "qualifying statements" refers to the statement being executed +being a Core :func:`_expression.insert`, :func:`_expression.update` +or :func:`_expression.delete` construct, and **not** a plain textual SQL +string or one constructed using :func:`_expression.text`. It also may **not** be +a special "extension" statement such as an "ON CONFLICT" "upsert" statement. +When using the ORM, all insert/update/delete statements used by the ORM flush process +are qualifying. + +The "page size" for the psycopg2 "batch" strategy can be affected +by using the ``executemany_batch_page_size`` parameter, which defaults to +100. + +For the "insertmanyvalues" feature, the page size can be controlled using the +:paramref:`_sa.create_engine.insertmanyvalues_page_size` parameter, +which defaults to 1000. An example of modifying both parameters +is below:: + + engine = create_engine( + "postgresql+psycopg2://scott:tiger@host/dbname", + executemany_mode='values_plus_batch', + insertmanyvalues_page_size=5000, executemany_batch_page_size=500) + +.. seealso:: + + :ref:`engine_insertmanyvalues` - background on "insertmanyvalues" + + :ref:`tutorial_multiple_parameters` - General information on using the + :class:`_engine.Connection` + object to execute statements in such a way as to make + use of the DBAPI ``.executemany()`` method. + + +.. _psycopg2_unicode: + +Unicode with Psycopg2 +---------------------- + +The psycopg2 DBAPI driver supports Unicode data transparently. + +The client character encoding can be controlled for the psycopg2 dialect +in the following ways: + +* For PostgreSQL 9.1 and above, the ``client_encoding`` parameter may be + passed in the database URL; this parameter is consumed by the underlying + ``libpq`` PostgreSQL client library:: + + engine = create_engine("postgresql+psycopg2://user:pass@host/dbname?client_encoding=utf8") + + Alternatively, the above ``client_encoding`` value may be passed using + :paramref:`_sa.create_engine.connect_args` for programmatic establishment with + ``libpq``:: + + engine = create_engine( + "postgresql+psycopg2://user:pass@host/dbname", + connect_args={'client_encoding': 'utf8'} + ) + +* For all PostgreSQL versions, psycopg2 supports a client-side encoding + value that will be passed to database connections when they are first + established. The SQLAlchemy psycopg2 dialect supports this using the + ``client_encoding`` parameter passed to :func:`_sa.create_engine`:: + + engine = create_engine( + "postgresql+psycopg2://user:pass@host/dbname", + client_encoding="utf8" + ) + + .. tip:: The above ``client_encoding`` parameter admittedly is very similar + in appearance to usage of the parameter within the + :paramref:`_sa.create_engine.connect_args` dictionary; the difference + above is that the parameter is consumed by psycopg2 and is + passed to the database connection using ``SET client_encoding TO + 'utf8'``; in the previously mentioned style, the parameter is instead + passed through psycopg2 and consumed by the ``libpq`` library. + +* A common way to set up client encoding with PostgreSQL databases is to + ensure it is configured within the server-side postgresql.conf file; + this is the recommended way to set encoding for a server that is + consistently of one encoding in all databases:: + + # postgresql.conf file + + # client_encoding = sql_ascii # actually, defaults to database + # encoding + client_encoding = utf8 + + + +Transactions +------------ + +The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations. + +.. _psycopg2_isolation_level: + +Psycopg2 Transaction Isolation Level +------------------------------------- + +As discussed in :ref:`postgresql_isolation_level`, +all PostgreSQL dialects support setting of transaction isolation level +both via the ``isolation_level`` parameter passed to :func:`_sa.create_engine` +, +as well as the ``isolation_level`` argument used by +:meth:`_engine.Connection.execution_options`. When using the psycopg2 dialect +, these +options make use of psycopg2's ``set_isolation_level()`` connection method, +rather than emitting a PostgreSQL directive; this is because psycopg2's +API-level setting is always emitted at the start of each transaction in any +case. + +The psycopg2 dialect supports these constants for isolation level: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +.. seealso:: + + :ref:`postgresql_isolation_level` + + :ref:`pg8000_isolation_level` + + +NOTICE logging +--------------- + +The psycopg2 dialect will log PostgreSQL NOTICE messages +via the ``sqlalchemy.dialects.postgresql`` logger. When this logger +is set to the ``logging.INFO`` level, notice messages will be logged:: + + import logging + + logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO) + +Above, it is assumed that logging is configured externally. If this is not +the case, configuration such as ``logging.basicConfig()`` must be utilized:: + + import logging + + logging.basicConfig() # log messages to stdout + logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO) + +.. seealso:: + + `Logging HOWTO `_ - on the python.org website + +.. _psycopg2_hstore: + +HSTORE type +------------ + +The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of +the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension +by default when psycopg2 version 2.4 or greater is used, and +it is detected that the target database has the HSTORE type set up for use. +In other words, when the dialect makes the first +connection, a sequence like the following is performed: + +1. Request the available HSTORE oids using + ``psycopg2.extras.HstoreAdapter.get_oids()``. + If this function returns a list of HSTORE identifiers, we then determine + that the ``HSTORE`` extension is present. + This function is **skipped** if the version of psycopg2 installed is + less than version 2.4. + +2. If the ``use_native_hstore`` flag is at its default of ``True``, and + we've detected that ``HSTORE`` oids are available, the + ``psycopg2.extensions.register_hstore()`` extension is invoked for all + connections. + +The ``register_hstore()`` extension has the effect of **all Python +dictionaries being accepted as parameters regardless of the type of target +column in SQL**. The dictionaries are converted by this extension into a +textual HSTORE expression. If this behavior is not desired, disable the +use of the hstore extension by setting ``use_native_hstore`` to ``False`` as +follows:: + + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test", + use_native_hstore=False) + +The ``HSTORE`` type is **still supported** when the +``psycopg2.extensions.register_hstore()`` extension is not used. It merely +means that the coercion between Python dictionaries and the HSTORE +string format, on both the parameter side and the result side, will take +place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2`` +which may be more performant. + +""" # noqa +from __future__ import annotations + +import collections.abc as collections_abc +import logging +import re +from typing import cast + +from . import ranges +from ._psycopg_common import _PGDialect_common_psycopg +from ._psycopg_common import _PGExecutionContext_common_psycopg +from .base import PGIdentifierPreparer +from .json import JSON +from .json import JSONB +from ... import types as sqltypes +from ... import util +from ...util import FastIntFlag +from ...util import parse_user_argument_for_enum + +logger = logging.getLogger("sqlalchemy.dialects.postgresql") + + +class _PGJSON(JSON): + def result_processor(self, dialect, coltype): + return None + + +class _PGJSONB(JSONB): + def result_processor(self, dialect, coltype): + return None + + +class _Psycopg2Range(ranges.AbstractSingleRangeImpl): + _psycopg2_range_cls = "none" + + def bind_processor(self, dialect): + psycopg2_Range = getattr( + cast(PGDialect_psycopg2, dialect)._psycopg2_extras, + self._psycopg2_range_cls, + ) + + def to_range(value): + if isinstance(value, ranges.Range): + value = psycopg2_Range( + value.lower, value.upper, value.bounds, value.empty + ) + return value + + return to_range + + def result_processor(self, dialect, coltype): + def to_range(value): + if value is not None: + value = ranges.Range( + value._lower, + value._upper, + bounds=value._bounds if value._bounds else "[)", + empty=not value._bounds, + ) + return value + + return to_range + + +class _Psycopg2NumericRange(_Psycopg2Range): + _psycopg2_range_cls = "NumericRange" + + +class _Psycopg2DateRange(_Psycopg2Range): + _psycopg2_range_cls = "DateRange" + + +class _Psycopg2DateTimeRange(_Psycopg2Range): + _psycopg2_range_cls = "DateTimeRange" + + +class _Psycopg2DateTimeTZRange(_Psycopg2Range): + _psycopg2_range_cls = "DateTimeTZRange" + + +class PGExecutionContext_psycopg2(_PGExecutionContext_common_psycopg): + _psycopg2_fetched_rows = None + + def post_exec(self): + self._log_notices(self.cursor) + + def _log_notices(self, cursor): + # check also that notices is an iterable, after it's already + # established that we will be iterating through it. This is to get + # around test suites such as SQLAlchemy's using a Mock object for + # cursor + if not cursor.connection.notices or not isinstance( + cursor.connection.notices, collections_abc.Iterable + ): + return + + for notice in cursor.connection.notices: + # NOTICE messages have a + # newline character at the end + logger.info(notice.rstrip()) + + cursor.connection.notices[:] = [] + + +class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer): + pass + + +class ExecutemanyMode(FastIntFlag): + EXECUTEMANY_VALUES = 0 + EXECUTEMANY_VALUES_PLUS_BATCH = 1 + + +( + EXECUTEMANY_VALUES, + EXECUTEMANY_VALUES_PLUS_BATCH, +) = ExecutemanyMode.__members__.values() + + +class PGDialect_psycopg2(_PGDialect_common_psycopg): + driver = "psycopg2" + + supports_statement_cache = True + supports_server_side_cursors = True + + default_paramstyle = "pyformat" + # set to true based on psycopg2 version + supports_sane_multi_rowcount = False + execution_ctx_cls = PGExecutionContext_psycopg2 + preparer = PGIdentifierPreparer_psycopg2 + psycopg2_version = (0, 0) + use_insertmanyvalues_wo_returning = True + + returns_native_bytes = False + + _has_native_hstore = True + + colspecs = util.update_copy( + _PGDialect_common_psycopg.colspecs, + { + JSON: _PGJSON, + sqltypes.JSON: _PGJSON, + JSONB: _PGJSONB, + ranges.INT4RANGE: _Psycopg2NumericRange, + ranges.INT8RANGE: _Psycopg2NumericRange, + ranges.NUMRANGE: _Psycopg2NumericRange, + ranges.DATERANGE: _Psycopg2DateRange, + ranges.TSRANGE: _Psycopg2DateTimeRange, + ranges.TSTZRANGE: _Psycopg2DateTimeTZRange, + }, + ) + + def __init__( + self, + executemany_mode="values_only", + executemany_batch_page_size=100, + **kwargs, + ): + _PGDialect_common_psycopg.__init__(self, **kwargs) + + if self._native_inet_types: + raise NotImplementedError( + "The psycopg2 dialect does not implement " + "ipaddress type handling; native_inet_types cannot be set " + "to ``True`` when using this dialect." + ) + + # Parse executemany_mode argument, allowing it to be only one of the + # symbol names + self.executemany_mode = parse_user_argument_for_enum( + executemany_mode, + { + EXECUTEMANY_VALUES: ["values_only"], + EXECUTEMANY_VALUES_PLUS_BATCH: ["values_plus_batch"], + }, + "executemany_mode", + ) + + self.executemany_batch_page_size = executemany_batch_page_size + + if self.dbapi and hasattr(self.dbapi, "__version__"): + m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__) + if m: + self.psycopg2_version = tuple( + int(x) for x in m.group(1, 2, 3) if x is not None + ) + + if self.psycopg2_version < (2, 7): + raise ImportError( + "psycopg2 version 2.7 or higher is required." + ) + + def initialize(self, connection): + super().initialize(connection) + self._has_native_hstore = ( + self.use_native_hstore + and self._hstore_oids(connection.connection.dbapi_connection) + is not None + ) + + self.supports_sane_multi_rowcount = ( + self.executemany_mode is not EXECUTEMANY_VALUES_PLUS_BATCH + ) + + @classmethod + def import_dbapi(cls): + import psycopg2 + + return psycopg2 + + @util.memoized_property + def _psycopg2_extensions(cls): + from psycopg2 import extensions + + return extensions + + @util.memoized_property + def _psycopg2_extras(cls): + from psycopg2 import extras + + return extras + + @util.memoized_property + def _isolation_lookup(self): + extensions = self._psycopg2_extensions + return { + "AUTOCOMMIT": extensions.ISOLATION_LEVEL_AUTOCOMMIT, + "READ COMMITTED": extensions.ISOLATION_LEVEL_READ_COMMITTED, + "READ UNCOMMITTED": extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, + "REPEATABLE READ": extensions.ISOLATION_LEVEL_REPEATABLE_READ, + "SERIALIZABLE": extensions.ISOLATION_LEVEL_SERIALIZABLE, + } + + def set_isolation_level(self, dbapi_connection, level): + dbapi_connection.set_isolation_level(self._isolation_lookup[level]) + + def set_readonly(self, connection, value): + connection.readonly = value + + def get_readonly(self, connection): + return connection.readonly + + def set_deferrable(self, connection, value): + connection.deferrable = value + + def get_deferrable(self, connection): + return connection.deferrable + + def on_connect(self): + extras = self._psycopg2_extras + + fns = [] + if self.client_encoding is not None: + + def on_connect(dbapi_conn): + dbapi_conn.set_client_encoding(self.client_encoding) + + fns.append(on_connect) + + if self.dbapi: + + def on_connect(dbapi_conn): + extras.register_uuid(None, dbapi_conn) + + fns.append(on_connect) + + if self.dbapi and self.use_native_hstore: + + def on_connect(dbapi_conn): + hstore_oids = self._hstore_oids(dbapi_conn) + if hstore_oids is not None: + oid, array_oid = hstore_oids + kw = {"oid": oid} + kw["array_oid"] = array_oid + extras.register_hstore(dbapi_conn, **kw) + + fns.append(on_connect) + + if self.dbapi and self._json_deserializer: + + def on_connect(dbapi_conn): + extras.register_default_json( + dbapi_conn, loads=self._json_deserializer + ) + extras.register_default_jsonb( + dbapi_conn, loads=self._json_deserializer + ) + + fns.append(on_connect) + + if fns: + + def on_connect(dbapi_conn): + for fn in fns: + fn(dbapi_conn) + + return on_connect + else: + return None + + def do_executemany(self, cursor, statement, parameters, context=None): + if self.executemany_mode is EXECUTEMANY_VALUES_PLUS_BATCH: + if self.executemany_batch_page_size: + kwargs = {"page_size": self.executemany_batch_page_size} + else: + kwargs = {} + self._psycopg2_extras.execute_batch( + cursor, statement, parameters, **kwargs + ) + else: + cursor.executemany(statement, parameters) + + def do_begin_twophase(self, connection, xid): + connection.connection.tpc_begin(xid) + + def do_prepare_twophase(self, connection, xid): + connection.connection.tpc_prepare() + + def _do_twophase(self, dbapi_conn, operation, xid, recover=False): + if recover: + if dbapi_conn.status != self._psycopg2_extensions.STATUS_READY: + dbapi_conn.rollback() + operation(xid) + else: + operation() + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + dbapi_conn = connection.connection.dbapi_connection + self._do_twophase( + dbapi_conn, dbapi_conn.tpc_rollback, xid, recover=recover + ) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + dbapi_conn = connection.connection.dbapi_connection + self._do_twophase( + dbapi_conn, dbapi_conn.tpc_commit, xid, recover=recover + ) + + @util.memoized_instancemethod + def _hstore_oids(self, dbapi_connection): + extras = self._psycopg2_extras + oids = extras.HstoreAdapter.get_oids(dbapi_connection) + if oids is not None and oids[0]: + return oids[0:2] + else: + return None + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.Error): + # check the "closed" flag. this might not be + # present on old psycopg2 versions. Also, + # this flag doesn't actually help in a lot of disconnect + # situations, so don't rely on it. + if getattr(connection, "closed", False): + return True + + # checks based on strings. in the case that .closed + # didn't cut it, fall back onto these. + str_e = str(e).partition("\n")[0] + for msg in self._is_disconnect_messages: + idx = str_e.find(msg) + if idx >= 0 and '"' not in str_e[:idx]: + return True + return False + + @util.memoized_property + def _is_disconnect_messages(self): + return ( + # these error messages from libpq: interfaces/libpq/fe-misc.c + # and interfaces/libpq/fe-secure.c. + "terminating connection", + "closed the connection", + "connection not open", + "could not receive data from server", + "could not send data to server", + # psycopg2 client errors, psycopg2/connection.h, + # psycopg2/cursor.h + "connection already closed", + "cursor already closed", + # not sure where this path is originally from, it may + # be obsolete. It really says "losed", not "closed". + "losed the connection unexpectedly", + # these can occur in newer SSL + "connection has been closed unexpectedly", + "SSL error: decryption failed or bad record mac", + "SSL SYSCALL error: Bad file descriptor", + "SSL SYSCALL error: EOF detected", + "SSL SYSCALL error: Operation timed out", + "SSL SYSCALL error: Bad address", + # This can occur in OpenSSL 1 when an unexpected EOF occurs. + # https://www.openssl.org/docs/man1.1.1/man3/SSL_get_error.html#BUGS + # It may also occur in newer OpenSSL for a non-recoverable I/O + # error as a result of a system call that does not set 'errno' + # in libc. + "SSL SYSCALL error: Success", + ) + + +dialect = PGDialect_psycopg2 diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py new file mode 100644 index 00000000..3cc3b69f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -0,0 +1,61 @@ +# dialects/postgresql/psycopg2cffi.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r""" +.. dialect:: postgresql+psycopg2cffi + :name: psycopg2cffi + :dbapi: psycopg2cffi + :connectstring: postgresql+psycopg2cffi://user:password@host:port/dbname[?key=value&key=value...] + :url: https://pypi.org/project/psycopg2cffi/ + +``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C +layer. This makes it suitable for use in e.g. PyPy. Documentation +is as per ``psycopg2``. + +.. seealso:: + + :mod:`sqlalchemy.dialects.postgresql.psycopg2` + +""" # noqa +from .psycopg2 import PGDialect_psycopg2 +from ... import util + + +class PGDialect_psycopg2cffi(PGDialect_psycopg2): + driver = "psycopg2cffi" + supports_unicode_statements = True + supports_statement_cache = True + + # psycopg2cffi's first release is 2.5.0, but reports + # __version__ as 2.4.4. Subsequent releases seem to have + # fixed this. + + FEATURE_VERSION_MAP = dict( + native_json=(2, 4, 4), + native_jsonb=(2, 7, 1), + sane_multi_rowcount=(2, 4, 4), + array_oid=(2, 4, 4), + hstore_adapter=(2, 4, 4), + ) + + @classmethod + def import_dbapi(cls): + return __import__("psycopg2cffi") + + @util.memoized_property + def _psycopg2_extensions(cls): + root = __import__("psycopg2cffi", fromlist=["extensions"]) + return root.extensions + + @util.memoized_property + def _psycopg2_extras(cls): + root = __import__("psycopg2cffi", fromlist=["extras"]) + return root.extras + + +dialect = PGDialect_psycopg2cffi diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/ranges.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/ranges.py new file mode 100644 index 00000000..b793ca49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/ranges.py @@ -0,0 +1,1029 @@ +# dialects/postgresql/ranges.py +# Copyright (C) 2013-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import dataclasses +from datetime import date +from datetime import datetime +from datetime import timedelta +from decimal import Decimal +from typing import Any +from typing import cast +from typing import Generic +from typing import List +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .operators import ADJACENT_TO +from .operators import CONTAINED_BY +from .operators import CONTAINS +from .operators import NOT_EXTEND_LEFT_OF +from .operators import NOT_EXTEND_RIGHT_OF +from .operators import OVERLAP +from .operators import STRICTLY_LEFT_OF +from .operators import STRICTLY_RIGHT_OF +from ... import types as sqltypes +from ...sql import operators +from ...sql.type_api import TypeEngine +from ...util import py310 +from ...util.typing import Literal + +if TYPE_CHECKING: + from ...sql.elements import ColumnElement + from ...sql.type_api import _TE + from ...sql.type_api import TypeEngineMixin + +_T = TypeVar("_T", bound=Any) + +_BoundsType = Literal["()", "[)", "(]", "[]"] + +if py310: + dc_slots = {"slots": True} + dc_kwonly = {"kw_only": True} +else: + dc_slots = {} + dc_kwonly = {} + + +@dataclasses.dataclass(frozen=True, **dc_slots) +class Range(Generic[_T]): + """Represent a PostgreSQL range. + + E.g.:: + + r = Range(10, 50, bounds="()") + + The calling style is similar to that of psycopg and psycopg2, in part + to allow easier migration from previous SQLAlchemy versions that used + these objects directly. + + :param lower: Lower bound value, or None + :param upper: Upper bound value, or None + :param bounds: keyword-only, optional string value that is one of + ``"()"``, ``"[)"``, ``"(]"``, ``"[]"``. Defaults to ``"[)"``. + :param empty: keyword-only, optional bool indicating this is an "empty" + range + + .. versionadded:: 2.0 + + """ + + lower: Optional[_T] = None + """the lower bound""" + + upper: Optional[_T] = None + """the upper bound""" + + if TYPE_CHECKING: + bounds: _BoundsType = dataclasses.field(default="[)") + empty: bool = dataclasses.field(default=False) + else: + bounds: _BoundsType = dataclasses.field(default="[)", **dc_kwonly) + empty: bool = dataclasses.field(default=False, **dc_kwonly) + + if not py310: + + def __init__( + self, + lower: Optional[_T] = None, + upper: Optional[_T] = None, + *, + bounds: _BoundsType = "[)", + empty: bool = False, + ): + # no __slots__ either so we can update dict + self.__dict__.update( + { + "lower": lower, + "upper": upper, + "bounds": bounds, + "empty": empty, + } + ) + + def __bool__(self) -> bool: + return not self.empty + + @property + def isempty(self) -> bool: + "A synonym for the 'empty' attribute." + + return self.empty + + @property + def is_empty(self) -> bool: + "A synonym for the 'empty' attribute." + + return self.empty + + @property + def lower_inc(self) -> bool: + """Return True if the lower bound is inclusive.""" + + return self.bounds[0] == "[" + + @property + def lower_inf(self) -> bool: + """Return True if this range is non-empty and lower bound is + infinite.""" + + return not self.empty and self.lower is None + + @property + def upper_inc(self) -> bool: + """Return True if the upper bound is inclusive.""" + + return self.bounds[1] == "]" + + @property + def upper_inf(self) -> bool: + """Return True if this range is non-empty and the upper bound is + infinite.""" + + return not self.empty and self.upper is None + + @property + def __sa_type_engine__(self) -> AbstractSingleRange[_T]: + return AbstractSingleRange() + + def _contains_value(self, value: _T) -> bool: + """Return True if this range contains the given value.""" + + if self.empty: + return False + + if self.lower is None: + return self.upper is None or ( + value < self.upper + if self.bounds[1] == ")" + else value <= self.upper + ) + + if self.upper is None: + return ( # type: ignore + value > self.lower + if self.bounds[0] == "(" + else value >= self.lower + ) + + return ( # type: ignore + value > self.lower + if self.bounds[0] == "(" + else value >= self.lower + ) and ( + value < self.upper + if self.bounds[1] == ")" + else value <= self.upper + ) + + def _get_discrete_step(self) -> Any: + "Determine the “step†for this range, if it is a discrete one." + + # See + # https://www.postgresql.org/docs/current/rangetypes.html#RANGETYPES-DISCRETE + # for the rationale + + if isinstance(self.lower, int) or isinstance(self.upper, int): + return 1 + elif isinstance(self.lower, datetime) or isinstance( + self.upper, datetime + ): + # This is required, because a `isinstance(datetime.now(), date)` + # is True + return None + elif isinstance(self.lower, date) or isinstance(self.upper, date): + return timedelta(days=1) + else: + return None + + def _compare_edges( + self, + value1: Optional[_T], + bound1: str, + value2: Optional[_T], + bound2: str, + only_values: bool = False, + ) -> int: + """Compare two range bounds. + + Return -1, 0 or 1 respectively when `value1` is less than, + equal to or greater than `value2`. + + When `only_value` is ``True``, do not consider the *inclusivity* + of the edges, just their values. + """ + + value1_is_lower_bound = bound1 in {"[", "("} + value2_is_lower_bound = bound2 in {"[", "("} + + # Infinite edges are equal when they are on the same side, + # otherwise a lower edge is considered less than the upper end + if value1 is value2 is None: + if value1_is_lower_bound == value2_is_lower_bound: + return 0 + else: + return -1 if value1_is_lower_bound else 1 + elif value1 is None: + return -1 if value1_is_lower_bound else 1 + elif value2 is None: + return 1 if value2_is_lower_bound else -1 + + # Short path for trivial case + if bound1 == bound2 and value1 == value2: + return 0 + + value1_inc = bound1 in {"[", "]"} + value2_inc = bound2 in {"[", "]"} + step = self._get_discrete_step() + + if step is not None: + # "Normalize" the two edges as '[)', to simplify successive + # logic when the range is discrete: otherwise we would need + # to handle the comparison between ``(0`` and ``[1`` that + # are equal when dealing with integers while for floats the + # former is lesser than the latter + + if value1_is_lower_bound: + if not value1_inc: + value1 += step + value1_inc = True + else: + if value1_inc: + value1 += step + value1_inc = False + if value2_is_lower_bound: + if not value2_inc: + value2 += step + value2_inc = True + else: + if value2_inc: + value2 += step + value2_inc = False + + if value1 < value2: # type: ignore + return -1 + elif value1 > value2: # type: ignore + return 1 + elif only_values: + return 0 + else: + # Neither one is infinite but are equal, so we + # need to consider the respective inclusive/exclusive + # flag + + if value1_inc and value2_inc: + return 0 + elif not value1_inc and not value2_inc: + if value1_is_lower_bound == value2_is_lower_bound: + return 0 + else: + return 1 if value1_is_lower_bound else -1 + elif not value1_inc: + return 1 if value1_is_lower_bound else -1 + elif not value2_inc: + return -1 if value2_is_lower_bound else 1 + else: + return 0 + + def __eq__(self, other: Any) -> bool: + """Compare this range to the `other` taking into account + bounds inclusivity, returning ``True`` if they are equal. + """ + + if not isinstance(other, Range): + return NotImplemented + + if self.empty and other.empty: + return True + elif self.empty != other.empty: + return False + + slower = self.lower + slower_b = self.bounds[0] + olower = other.lower + olower_b = other.bounds[0] + supper = self.upper + supper_b = self.bounds[1] + oupper = other.upper + oupper_b = other.bounds[1] + + return ( + self._compare_edges(slower, slower_b, olower, olower_b) == 0 + and self._compare_edges(supper, supper_b, oupper, oupper_b) == 0 + ) + + def contained_by(self, other: Range[_T]) -> bool: + "Determine whether this range is a contained by `other`." + + # Any range contains the empty one + if self.empty: + return True + + # An empty range does not contain any range except the empty one + if other.empty: + return False + + slower = self.lower + slower_b = self.bounds[0] + olower = other.lower + olower_b = other.bounds[0] + + if self._compare_edges(slower, slower_b, olower, olower_b) < 0: + return False + + supper = self.upper + supper_b = self.bounds[1] + oupper = other.upper + oupper_b = other.bounds[1] + + if self._compare_edges(supper, supper_b, oupper, oupper_b) > 0: + return False + + return True + + def contains(self, value: Union[_T, Range[_T]]) -> bool: + "Determine whether this range contains `value`." + + if isinstance(value, Range): + return value.contained_by(self) + else: + return self._contains_value(value) + + def overlaps(self, other: Range[_T]) -> bool: + "Determine whether this range overlaps with `other`." + + # Empty ranges never overlap with any other range + if self.empty or other.empty: + return False + + slower = self.lower + slower_b = self.bounds[0] + supper = self.upper + supper_b = self.bounds[1] + olower = other.lower + olower_b = other.bounds[0] + oupper = other.upper + oupper_b = other.bounds[1] + + # Check whether this lower bound is contained in the other range + if ( + self._compare_edges(slower, slower_b, olower, olower_b) >= 0 + and self._compare_edges(slower, slower_b, oupper, oupper_b) <= 0 + ): + return True + + # Check whether other lower bound is contained in this range + if ( + self._compare_edges(olower, olower_b, slower, slower_b) >= 0 + and self._compare_edges(olower, olower_b, supper, supper_b) <= 0 + ): + return True + + return False + + def strictly_left_of(self, other: Range[_T]) -> bool: + "Determine whether this range is completely to the left of `other`." + + # Empty ranges are neither to left nor to the right of any other range + if self.empty or other.empty: + return False + + supper = self.upper + supper_b = self.bounds[1] + olower = other.lower + olower_b = other.bounds[0] + + # Check whether this upper edge is less than other's lower end + return self._compare_edges(supper, supper_b, olower, olower_b) < 0 + + __lshift__ = strictly_left_of + + def strictly_right_of(self, other: Range[_T]) -> bool: + "Determine whether this range is completely to the right of `other`." + + # Empty ranges are neither to left nor to the right of any other range + if self.empty or other.empty: + return False + + slower = self.lower + slower_b = self.bounds[0] + oupper = other.upper + oupper_b = other.bounds[1] + + # Check whether this lower edge is greater than other's upper end + return self._compare_edges(slower, slower_b, oupper, oupper_b) > 0 + + __rshift__ = strictly_right_of + + def not_extend_left_of(self, other: Range[_T]) -> bool: + "Determine whether this does not extend to the left of `other`." + + # Empty ranges are neither to left nor to the right of any other range + if self.empty or other.empty: + return False + + slower = self.lower + slower_b = self.bounds[0] + olower = other.lower + olower_b = other.bounds[0] + + # Check whether this lower edge is not less than other's lower end + return self._compare_edges(slower, slower_b, olower, olower_b) >= 0 + + def not_extend_right_of(self, other: Range[_T]) -> bool: + "Determine whether this does not extend to the right of `other`." + + # Empty ranges are neither to left nor to the right of any other range + if self.empty or other.empty: + return False + + supper = self.upper + supper_b = self.bounds[1] + oupper = other.upper + oupper_b = other.bounds[1] + + # Check whether this upper edge is not greater than other's upper end + return self._compare_edges(supper, supper_b, oupper, oupper_b) <= 0 + + def _upper_edge_adjacent_to_lower( + self, + value1: Optional[_T], + bound1: str, + value2: Optional[_T], + bound2: str, + ) -> bool: + """Determine whether an upper bound is immediately successive to a + lower bound.""" + + # Since we need a peculiar way to handle the bounds inclusivity, + # just do a comparison by value here + res = self._compare_edges(value1, bound1, value2, bound2, True) + if res == -1: + step = self._get_discrete_step() + if step is None: + return False + if bound1 == "]": + if bound2 == "[": + return value1 == value2 - step # type: ignore + else: + return value1 == value2 + else: + if bound2 == "[": + return value1 == value2 + else: + return value1 == value2 - step # type: ignore + elif res == 0: + # Cover cases like [0,0] -|- [1,] and [0,2) -|- (1,3] + if ( + bound1 == "]" + and bound2 == "[" + or bound1 == ")" + and bound2 == "(" + ): + step = self._get_discrete_step() + if step is not None: + return True + return ( + bound1 == ")" + and bound2 == "[" + or bound1 == "]" + and bound2 == "(" + ) + else: + return False + + def adjacent_to(self, other: Range[_T]) -> bool: + "Determine whether this range is adjacent to the `other`." + + # Empty ranges are not adjacent to any other range + if self.empty or other.empty: + return False + + slower = self.lower + slower_b = self.bounds[0] + supper = self.upper + supper_b = self.bounds[1] + olower = other.lower + olower_b = other.bounds[0] + oupper = other.upper + oupper_b = other.bounds[1] + + return self._upper_edge_adjacent_to_lower( + supper, supper_b, olower, olower_b + ) or self._upper_edge_adjacent_to_lower( + oupper, oupper_b, slower, slower_b + ) + + def union(self, other: Range[_T]) -> Range[_T]: + """Compute the union of this range with the `other`. + + This raises a ``ValueError`` exception if the two ranges are + "disjunct", that is neither adjacent nor overlapping. + """ + + # Empty ranges are "additive identities" + if self.empty: + return other + if other.empty: + return self + + if not self.overlaps(other) and not self.adjacent_to(other): + raise ValueError( + "Adding non-overlapping and non-adjacent" + " ranges is not implemented" + ) + + slower = self.lower + slower_b = self.bounds[0] + supper = self.upper + supper_b = self.bounds[1] + olower = other.lower + olower_b = other.bounds[0] + oupper = other.upper + oupper_b = other.bounds[1] + + if self._compare_edges(slower, slower_b, olower, olower_b) < 0: + rlower = slower + rlower_b = slower_b + else: + rlower = olower + rlower_b = olower_b + + if self._compare_edges(supper, supper_b, oupper, oupper_b) > 0: + rupper = supper + rupper_b = supper_b + else: + rupper = oupper + rupper_b = oupper_b + + return Range( + rlower, rupper, bounds=cast(_BoundsType, rlower_b + rupper_b) + ) + + def __add__(self, other: Range[_T]) -> Range[_T]: + return self.union(other) + + def difference(self, other: Range[_T]) -> Range[_T]: + """Compute the difference between this range and the `other`. + + This raises a ``ValueError`` exception if the two ranges are + "disjunct", that is neither adjacent nor overlapping. + """ + + # Subtracting an empty range is a no-op + if self.empty or other.empty: + return self + + slower = self.lower + slower_b = self.bounds[0] + supper = self.upper + supper_b = self.bounds[1] + olower = other.lower + olower_b = other.bounds[0] + oupper = other.upper + oupper_b = other.bounds[1] + + sl_vs_ol = self._compare_edges(slower, slower_b, olower, olower_b) + su_vs_ou = self._compare_edges(supper, supper_b, oupper, oupper_b) + if sl_vs_ol < 0 and su_vs_ou > 0: + raise ValueError( + "Subtracting a strictly inner range is not implemented" + ) + + sl_vs_ou = self._compare_edges(slower, slower_b, oupper, oupper_b) + su_vs_ol = self._compare_edges(supper, supper_b, olower, olower_b) + + # If the ranges do not overlap, result is simply the first + if sl_vs_ou > 0 or su_vs_ol < 0: + return self + + # If this range is completely contained by the other, result is empty + if sl_vs_ol >= 0 and su_vs_ou <= 0: + return Range(None, None, empty=True) + + # If this range extends to the left of the other and ends in its + # middle + if sl_vs_ol <= 0 and su_vs_ol >= 0 and su_vs_ou <= 0: + rupper_b = ")" if olower_b == "[" else "]" + if ( + slower_b != "[" + and rupper_b != "]" + and self._compare_edges(slower, slower_b, olower, rupper_b) + == 0 + ): + return Range(None, None, empty=True) + else: + return Range( + slower, + olower, + bounds=cast(_BoundsType, slower_b + rupper_b), + ) + + # If this range starts in the middle of the other and extends to its + # right + if sl_vs_ol >= 0 and su_vs_ou >= 0 and sl_vs_ou <= 0: + rlower_b = "(" if oupper_b == "]" else "[" + if ( + rlower_b != "[" + and supper_b != "]" + and self._compare_edges(oupper, rlower_b, supper, supper_b) + == 0 + ): + return Range(None, None, empty=True) + else: + return Range( + oupper, + supper, + bounds=cast(_BoundsType, rlower_b + supper_b), + ) + + assert False, f"Unhandled case computing {self} - {other}" + + def __sub__(self, other: Range[_T]) -> Range[_T]: + return self.difference(other) + + def intersection(self, other: Range[_T]) -> Range[_T]: + """Compute the intersection of this range with the `other`. + + .. versionadded:: 2.0.10 + + """ + if self.empty or other.empty or not self.overlaps(other): + return Range(None, None, empty=True) + + slower = self.lower + slower_b = self.bounds[0] + supper = self.upper + supper_b = self.bounds[1] + olower = other.lower + olower_b = other.bounds[0] + oupper = other.upper + oupper_b = other.bounds[1] + + if self._compare_edges(slower, slower_b, olower, olower_b) < 0: + rlower = olower + rlower_b = olower_b + else: + rlower = slower + rlower_b = slower_b + + if self._compare_edges(supper, supper_b, oupper, oupper_b) > 0: + rupper = oupper + rupper_b = oupper_b + else: + rupper = supper + rupper_b = supper_b + + return Range( + rlower, + rupper, + bounds=cast(_BoundsType, rlower_b + rupper_b), + ) + + def __mul__(self, other: Range[_T]) -> Range[_T]: + return self.intersection(other) + + def __str__(self) -> str: + return self._stringify() + + def _stringify(self) -> str: + if self.empty: + return "empty" + + l, r = self.lower, self.upper + l = "" if l is None else l # type: ignore + r = "" if r is None else r # type: ignore + + b0, b1 = cast("Tuple[str, str]", self.bounds) + + return f"{b0}{l},{r}{b1}" + + +class MultiRange(List[Range[_T]]): + """Represents a multirange sequence. + + This list subclass is an utility to allow automatic type inference of + the proper multi-range SQL type depending on the single range values. + This is useful when operating on literal multi-ranges:: + + import sqlalchemy as sa + from sqlalchemy.dialects.postgresql import MultiRange, Range + + value = literal(MultiRange([Range(2, 4)])) + + select(tbl).where(tbl.c.value.op("@")(MultiRange([Range(-3, 7)]))) + + .. versionadded:: 2.0.26 + + .. seealso:: + + - :ref:`postgresql_multirange_list_use`. + """ + + @property + def __sa_type_engine__(self) -> AbstractMultiRange[_T]: + return AbstractMultiRange() + + +class AbstractRange(sqltypes.TypeEngine[_T]): + """Base class for single and multi Range SQL types.""" + + render_bind_cast = True + + __abstract__ = True + + @overload + def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ... + + @overload + def adapt( + self, cls: Type[TypeEngineMixin], **kw: Any + ) -> TypeEngine[Any]: ... + + def adapt( + self, + cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], + **kw: Any, + ) -> TypeEngine[Any]: + """Dynamically adapt a range type to an abstract impl. + + For example ``INT4RANGE().adapt(_Psycopg2NumericRange)`` should + produce a type that will have ``_Psycopg2NumericRange`` behaviors + and also render as ``INT4RANGE`` in SQL and DDL. + + """ + if ( + issubclass(cls, (AbstractSingleRangeImpl, AbstractMultiRangeImpl)) + and cls is not self.__class__ + ): + # two ways to do this are: 1. create a new type on the fly + # or 2. have AbstractRangeImpl(visit_name) constructor and a + # visit_abstract_range_impl() method in the PG compiler. + # I'm choosing #1 as the resulting type object + # will then make use of the same mechanics + # as if we had made all these sub-types explicitly, and will + # also look more obvious under pdb etc. + # The adapt() operation here is cached per type-class-per-dialect, + # so is not much of a performance concern + visit_name = self.__visit_name__ + return type( # type: ignore + f"{visit_name}RangeImpl", + (cls, self.__class__), + {"__visit_name__": visit_name}, + )() + else: + return super().adapt(cls) + + class comparator_factory(TypeEngine.Comparator[Range[Any]]): + """Define comparison operations for range types.""" + + def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the right hand operand, + which can be an element or a range, is contained within the + column. + + kwargs may be ignored by this operator but are required for API + conformance. + """ + return self.expr.operate(CONTAINS, other) + + def contained_by(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the column is contained + within the right hand operand. + """ + return self.expr.operate(CONTAINED_BY, other) + + def overlaps(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the column overlaps + (has points in common with) the right hand operand. + """ + return self.expr.operate(OVERLAP, other) + + def strictly_left_of(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the column is strictly + left of the right hand operand. + """ + return self.expr.operate(STRICTLY_LEFT_OF, other) + + __lshift__ = strictly_left_of + + def strictly_right_of(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the column is strictly + right of the right hand operand. + """ + return self.expr.operate(STRICTLY_RIGHT_OF, other) + + __rshift__ = strictly_right_of + + def not_extend_right_of(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the range in the column + does not extend right of the range in the operand. + """ + return self.expr.operate(NOT_EXTEND_RIGHT_OF, other) + + def not_extend_left_of(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the range in the column + does not extend left of the range in the operand. + """ + return self.expr.operate(NOT_EXTEND_LEFT_OF, other) + + def adjacent_to(self, other: Any) -> ColumnElement[bool]: + """Boolean expression. Returns true if the range in the column + is adjacent to the range in the operand. + """ + return self.expr.operate(ADJACENT_TO, other) + + def union(self, other: Any) -> ColumnElement[bool]: + """Range expression. Returns the union of the two ranges. + Will raise an exception if the resulting range is not + contiguous. + """ + return self.expr.operate(operators.add, other) + + def difference(self, other: Any) -> ColumnElement[bool]: + """Range expression. Returns the union of the two ranges. + Will raise an exception if the resulting range is not + contiguous. + """ + return self.expr.operate(operators.sub, other) + + def intersection(self, other: Any) -> ColumnElement[Range[_T]]: + """Range expression. Returns the intersection of the two ranges. + Will raise an exception if the resulting range is not + contiguous. + """ + return self.expr.operate(operators.mul, other) + + +class AbstractSingleRange(AbstractRange[Range[_T]]): + """Base for PostgreSQL RANGE types. + + These are types that return a single :class:`_postgresql.Range` object. + + .. seealso:: + + `PostgreSQL range functions `_ + + """ # noqa: E501 + + __abstract__ = True + + def _resolve_for_literal(self, value: Range[Any]) -> Any: + spec = value.lower if value.lower is not None else value.upper + + if isinstance(spec, int): + # pg is unreasonably picky here: the query + # "select 1::INTEGER <@ '[1, 4)'::INT8RANGE" raises + # "operator does not exist: integer <@ int8range" as of pg 16 + if _is_int32(value): + return INT4RANGE() + else: + return INT8RANGE() + elif isinstance(spec, (Decimal, float)): + return NUMRANGE() + elif isinstance(spec, datetime): + return TSRANGE() if not spec.tzinfo else TSTZRANGE() + elif isinstance(spec, date): + return DATERANGE() + else: + # empty Range, SQL datatype can't be determined here + return sqltypes.NULLTYPE + + +class AbstractSingleRangeImpl(AbstractSingleRange[_T]): + """Marker for AbstractSingleRange that will apply a subclass-specific + adaptation""" + + +class AbstractMultiRange(AbstractRange[Sequence[Range[_T]]]): + """Base for PostgreSQL MULTIRANGE types. + + these are types that return a sequence of :class:`_postgresql.Range` + objects. + + """ + + __abstract__ = True + + def _resolve_for_literal(self, value: Sequence[Range[Any]]) -> Any: + if not value: + # empty MultiRange, SQL datatype can't be determined here + return sqltypes.NULLTYPE + first = value[0] + spec = first.lower if first.lower is not None else first.upper + + if isinstance(spec, int): + # pg is unreasonably picky here: the query + # "select 1::INTEGER <@ '{[1, 4),[6,19)}'::INT8MULTIRANGE" raises + # "operator does not exist: integer <@ int8multirange" as of pg 16 + if all(_is_int32(r) for r in value): + return INT4MULTIRANGE() + else: + return INT8MULTIRANGE() + elif isinstance(spec, (Decimal, float)): + return NUMMULTIRANGE() + elif isinstance(spec, datetime): + return TSMULTIRANGE() if not spec.tzinfo else TSTZMULTIRANGE() + elif isinstance(spec, date): + return DATEMULTIRANGE() + else: + # empty Range, SQL datatype can't be determined here + return sqltypes.NULLTYPE + + +class AbstractMultiRangeImpl(AbstractMultiRange[_T]): + """Marker for AbstractMultiRange that will apply a subclass-specific + adaptation""" + + +class INT4RANGE(AbstractSingleRange[int]): + """Represent the PostgreSQL INT4RANGE type.""" + + __visit_name__ = "INT4RANGE" + + +class INT8RANGE(AbstractSingleRange[int]): + """Represent the PostgreSQL INT8RANGE type.""" + + __visit_name__ = "INT8RANGE" + + +class NUMRANGE(AbstractSingleRange[Decimal]): + """Represent the PostgreSQL NUMRANGE type.""" + + __visit_name__ = "NUMRANGE" + + +class DATERANGE(AbstractSingleRange[date]): + """Represent the PostgreSQL DATERANGE type.""" + + __visit_name__ = "DATERANGE" + + +class TSRANGE(AbstractSingleRange[datetime]): + """Represent the PostgreSQL TSRANGE type.""" + + __visit_name__ = "TSRANGE" + + +class TSTZRANGE(AbstractSingleRange[datetime]): + """Represent the PostgreSQL TSTZRANGE type.""" + + __visit_name__ = "TSTZRANGE" + + +class INT4MULTIRANGE(AbstractMultiRange[int]): + """Represent the PostgreSQL INT4MULTIRANGE type.""" + + __visit_name__ = "INT4MULTIRANGE" + + +class INT8MULTIRANGE(AbstractMultiRange[int]): + """Represent the PostgreSQL INT8MULTIRANGE type.""" + + __visit_name__ = "INT8MULTIRANGE" + + +class NUMMULTIRANGE(AbstractMultiRange[Decimal]): + """Represent the PostgreSQL NUMMULTIRANGE type.""" + + __visit_name__ = "NUMMULTIRANGE" + + +class DATEMULTIRANGE(AbstractMultiRange[date]): + """Represent the PostgreSQL DATEMULTIRANGE type.""" + + __visit_name__ = "DATEMULTIRANGE" + + +class TSMULTIRANGE(AbstractMultiRange[datetime]): + """Represent the PostgreSQL TSRANGE type.""" + + __visit_name__ = "TSMULTIRANGE" + + +class TSTZMULTIRANGE(AbstractMultiRange[datetime]): + """Represent the PostgreSQL TSTZRANGE type.""" + + __visit_name__ = "TSTZMULTIRANGE" + + +_max_int_32 = 2**31 - 1 +_min_int_32 = -(2**31) + + +def _is_int32(r: Range[int]) -> bool: + return (r.lower is None or _min_int_32 <= r.lower <= _max_int_32) and ( + r.upper is None or _min_int_32 <= r.upper <= _max_int_32 + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/types.py new file mode 100644 index 00000000..2acf63be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/types.py @@ -0,0 +1,303 @@ +# dialects/postgresql/types.py +# Copyright (C) 2013-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import datetime as dt +from typing import Any +from typing import Optional +from typing import overload +from typing import Type +from typing import TYPE_CHECKING +from uuid import UUID as _python_UUID + +from ...sql import sqltypes +from ...sql import type_api +from ...util.typing import Literal + +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql.operators import OperatorType + from ...sql.type_api import _LiteralProcessorType + from ...sql.type_api import TypeEngine + +_DECIMAL_TYPES = (1231, 1700) +_FLOAT_TYPES = (700, 701, 1021, 1022) +_INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016) + + +class PGUuid(sqltypes.UUID[sqltypes._UUID_RETURN]): + render_bind_cast = True + render_literal_cast = True + + if TYPE_CHECKING: + + @overload + def __init__( + self: PGUuid[_python_UUID], as_uuid: Literal[True] = ... + ) -> None: ... + + @overload + def __init__( + self: PGUuid[str], as_uuid: Literal[False] = ... + ) -> None: ... + + def __init__(self, as_uuid: bool = True) -> None: ... + + +class BYTEA(sqltypes.LargeBinary): + __visit_name__ = "BYTEA" + + +class INET(sqltypes.TypeEngine[str]): + __visit_name__ = "INET" + + +PGInet = INET + + +class CIDR(sqltypes.TypeEngine[str]): + __visit_name__ = "CIDR" + + +PGCidr = CIDR + + +class MACADDR(sqltypes.TypeEngine[str]): + __visit_name__ = "MACADDR" + + +PGMacAddr = MACADDR + + +class MACADDR8(sqltypes.TypeEngine[str]): + __visit_name__ = "MACADDR8" + + +PGMacAddr8 = MACADDR8 + + +class MONEY(sqltypes.TypeEngine[str]): + r"""Provide the PostgreSQL MONEY type. + + Depending on driver, result rows using this type may return a + string value which includes currency symbols. + + For this reason, it may be preferable to provide conversion to a + numerically-based currency datatype using :class:`_types.TypeDecorator`:: + + import re + import decimal + from sqlalchemy import Dialect + from sqlalchemy import TypeDecorator + + class NumericMoney(TypeDecorator): + impl = MONEY + + def process_result_value( + self, value: Any, dialect: Dialect + ) -> None: + if value is not None: + # adjust this for the currency and numeric + m = re.match(r"\$([\d.]+)", value) + if m: + value = decimal.Decimal(m.group(1)) + return value + + Alternatively, the conversion may be applied as a CAST using + the :meth:`_types.TypeDecorator.column_expression` method as follows:: + + import decimal + from sqlalchemy import cast + from sqlalchemy import TypeDecorator + + class NumericMoney(TypeDecorator): + impl = MONEY + + def column_expression(self, column: Any): + return cast(column, Numeric()) + + .. versionadded:: 1.2 + + """ + + __visit_name__ = "MONEY" + + +class OID(sqltypes.TypeEngine[int]): + """Provide the PostgreSQL OID type.""" + + __visit_name__ = "OID" + + +class REGCONFIG(sqltypes.TypeEngine[str]): + """Provide the PostgreSQL REGCONFIG type. + + .. versionadded:: 2.0.0rc1 + + """ + + __visit_name__ = "REGCONFIG" + + +class TSQUERY(sqltypes.TypeEngine[str]): + """Provide the PostgreSQL TSQUERY type. + + .. versionadded:: 2.0.0rc1 + + """ + + __visit_name__ = "TSQUERY" + + +class REGCLASS(sqltypes.TypeEngine[str]): + """Provide the PostgreSQL REGCLASS type. + + .. versionadded:: 1.2.7 + + """ + + __visit_name__ = "REGCLASS" + + +class TIMESTAMP(sqltypes.TIMESTAMP): + """Provide the PostgreSQL TIMESTAMP type.""" + + __visit_name__ = "TIMESTAMP" + + def __init__( + self, timezone: bool = False, precision: Optional[int] = None + ) -> None: + """Construct a TIMESTAMP. + + :param timezone: boolean value if timezone present, default False + :param precision: optional integer precision value + + .. versionadded:: 1.4 + + """ + super().__init__(timezone=timezone) + self.precision = precision + + +class TIME(sqltypes.TIME): + """PostgreSQL TIME type.""" + + __visit_name__ = "TIME" + + def __init__( + self, timezone: bool = False, precision: Optional[int] = None + ) -> None: + """Construct a TIME. + + :param timezone: boolean value if timezone present, default False + :param precision: optional integer precision value + + .. versionadded:: 1.4 + + """ + super().__init__(timezone=timezone) + self.precision = precision + + +class INTERVAL(type_api.NativeForEmulated, sqltypes._AbstractInterval): + """PostgreSQL INTERVAL type.""" + + __visit_name__ = "INTERVAL" + native = True + + def __init__( + self, precision: Optional[int] = None, fields: Optional[str] = None + ) -> None: + """Construct an INTERVAL. + + :param precision: optional integer precision value + :param fields: string fields specifier. allows storage of fields + to be limited, such as ``"YEAR"``, ``"MONTH"``, ``"DAY TO HOUR"``, + etc. + + .. versionadded:: 1.2 + + """ + self.precision = precision + self.fields = fields + + @classmethod + def adapt_emulated_to_native( + cls, interval: sqltypes.Interval, **kw: Any # type: ignore[override] + ) -> INTERVAL: + return INTERVAL(precision=interval.second_precision) + + @property + def _type_affinity(self) -> Type[sqltypes.Interval]: + return sqltypes.Interval + + def as_generic(self, allow_nulltype: bool = False) -> sqltypes.Interval: + return sqltypes.Interval(native=True, second_precision=self.precision) + + @property + def python_type(self) -> Type[dt.timedelta]: + return dt.timedelta + + def literal_processor( + self, dialect: Dialect + ) -> Optional[_LiteralProcessorType[dt.timedelta]]: + def process(value: dt.timedelta) -> str: + return f"make_interval(secs=>{value.total_seconds()})" + + return process + + +PGInterval = INTERVAL + + +class BIT(sqltypes.TypeEngine[int]): + __visit_name__ = "BIT" + + def __init__( + self, length: Optional[int] = None, varying: bool = False + ) -> None: + if varying: + # BIT VARYING can be unlimited-length, so no default + self.length = length + else: + # BIT without VARYING defaults to length 1 + self.length = length or 1 + self.varying = varying + + +PGBit = BIT + + +class TSVECTOR(sqltypes.TypeEngine[str]): + """The :class:`_postgresql.TSVECTOR` type implements the PostgreSQL + text search type TSVECTOR. + + It can be used to do full text queries on natural language + documents. + + .. seealso:: + + :ref:`postgresql_match` + + """ + + __visit_name__ = "TSVECTOR" + + +class CITEXT(sqltypes.TEXT): + """Provide the PostgreSQL CITEXT type. + + .. versionadded:: 2.0.7 + + """ + + __visit_name__ = "CITEXT" + + def coerce_compared_value( + self, op: Optional[OperatorType], value: Any + ) -> TypeEngine[Any]: + return self diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__init__.py new file mode 100644 index 00000000..45f088e2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__init__.py @@ -0,0 +1,57 @@ +# dialects/sqlite/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from . import aiosqlite # noqa +from . import base # noqa +from . import pysqlcipher # noqa +from . import pysqlite # noqa +from .base import BLOB +from .base import BOOLEAN +from .base import CHAR +from .base import DATE +from .base import DATETIME +from .base import DECIMAL +from .base import FLOAT +from .base import INTEGER +from .base import JSON +from .base import NUMERIC +from .base import REAL +from .base import SMALLINT +from .base import TEXT +from .base import TIME +from .base import TIMESTAMP +from .base import VARCHAR +from .dml import Insert +from .dml import insert + +# default dialect +base.dialect = dialect = pysqlite.dialect + + +__all__ = ( + "BLOB", + "BOOLEAN", + "CHAR", + "DATE", + "DATETIME", + "DECIMAL", + "FLOAT", + "INTEGER", + "JSON", + "NUMERIC", + "SMALLINT", + "TEXT", + "TIME", + "TIMESTAMP", + "VARCHAR", + "REAL", + "Insert", + "insert", + "dialect", +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..ed832c10 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc new file mode 100644 index 00000000..ebf41e17 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..d078ee9e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc new file mode 100644 index 00000000..303e5ce1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc new file mode 100644 index 00000000..50668eab Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc new file mode 100644 index 00000000..d77b8b84 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc new file mode 100644 index 00000000..f444bc68 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc new file mode 100644 index 00000000..99e3e85a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py new file mode 100644 index 00000000..796a80cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -0,0 +1,396 @@ +# dialects/sqlite/aiosqlite.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" + +.. dialect:: sqlite+aiosqlite + :name: aiosqlite + :dbapi: aiosqlite + :connectstring: sqlite+aiosqlite:///file_path + :url: https://pypi.org/project/aiosqlite/ + +The aiosqlite dialect provides support for the SQLAlchemy asyncio interface +running on top of pysqlite. + +aiosqlite is a wrapper around pysqlite that uses a background thread for +each connection. It does not actually use non-blocking IO, as SQLite +databases are not socket-based. However it does provide a working asyncio +interface that's useful for testing and prototyping purposes. + +Using a special asyncio mediation layer, the aiosqlite dialect is usable +as the backend for the :ref:`SQLAlchemy asyncio ` +extension package. + +This dialect should normally be used only with the +:func:`_asyncio.create_async_engine` engine creation function:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("sqlite+aiosqlite:///filename") + +The URL passes through all arguments to the ``pysqlite`` driver, so all +connection arguments are the same as they are for that of :ref:`pysqlite`. + +.. _aiosqlite_udfs: + +User-Defined Functions +---------------------- + +aiosqlite extends pysqlite to support async, so we can create our own user-defined functions (UDFs) +in Python and use them directly in SQLite queries as described here: :ref:`pysqlite_udfs`. + +.. _aiosqlite_serializable: + +Serializable isolation / Savepoints / Transactional DDL (asyncio version) +------------------------------------------------------------------------- + +Similarly to pysqlite, aiosqlite does not support SAVEPOINT feature. + +The solution is similar to :ref:`pysqlite_serializable`. This is achieved by the event listeners in async:: + + from sqlalchemy import create_engine, event + from sqlalchemy.ext.asyncio import create_async_engine + + engine = create_async_engine("sqlite+aiosqlite:///myfile.db") + + @event.listens_for(engine.sync_engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable aiosqlite's emitting of the BEGIN statement entirely. + # also stops it from emitting COMMIT before any DDL. + dbapi_connection.isolation_level = None + + @event.listens_for(engine.sync_engine, "begin") + def do_begin(conn): + # emit our own BEGIN + conn.exec_driver_sql("BEGIN") + +.. warning:: When using the above recipe, it is advised to not use the + :paramref:`.Connection.execution_options.isolation_level` setting on + :class:`_engine.Connection` and :func:`_sa.create_engine` + with the SQLite driver, + as this function necessarily will also alter the ".isolation_level" setting. + +""" # noqa + +import asyncio +from collections import deque +from functools import partial + +from .base import SQLiteExecutionContext +from .pysqlite import SQLiteDialect_pysqlite +from ... import pool +from ... import util +from ...engine import AdaptedConnection +from ...util.concurrency import await_fallback +from ...util.concurrency import await_only + + +class AsyncAdapt_aiosqlite_cursor: + # TODO: base on connectors/asyncio.py + # see #10415 + + __slots__ = ( + "_adapt_connection", + "_connection", + "description", + "await_", + "_rows", + "arraysize", + "rowcount", + "lastrowid", + ) + + server_side = False + + def __init__(self, adapt_connection): + self._adapt_connection = adapt_connection + self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ + self.arraysize = 1 + self.rowcount = -1 + self.description = None + self._rows = deque() + + def close(self): + self._rows.clear() + + def execute(self, operation, parameters=None): + try: + _cursor = self.await_(self._connection.cursor()) + + if parameters is None: + self.await_(_cursor.execute(operation)) + else: + self.await_(_cursor.execute(operation, parameters)) + + if _cursor.description: + self.description = _cursor.description + self.lastrowid = self.rowcount = -1 + + if not self.server_side: + self._rows = deque(self.await_(_cursor.fetchall())) + else: + self.description = None + self.lastrowid = _cursor.lastrowid + self.rowcount = _cursor.rowcount + + if not self.server_side: + self.await_(_cursor.close()) + else: + self._cursor = _cursor + except Exception as error: + self._adapt_connection._handle_exception(error) + + def executemany(self, operation, seq_of_parameters): + try: + _cursor = self.await_(self._connection.cursor()) + self.await_(_cursor.executemany(operation, seq_of_parameters)) + self.description = None + self.lastrowid = _cursor.lastrowid + self.rowcount = _cursor.rowcount + self.await_(_cursor.close()) + except Exception as error: + self._adapt_connection._handle_exception(error) + + def setinputsizes(self, *inputsizes): + pass + + def __iter__(self): + while self._rows: + yield self._rows.popleft() + + def fetchone(self): + if self._rows: + return self._rows.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + size = self.arraysize + + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] + + def fetchall(self): + retval = list(self._rows) + self._rows.clear() + return retval + + +class AsyncAdapt_aiosqlite_ss_cursor(AsyncAdapt_aiosqlite_cursor): + # TODO: base on connectors/asyncio.py + # see #10415 + __slots__ = "_cursor" + + server_side = True + + def __init__(self, *arg, **kw): + super().__init__(*arg, **kw) + self._cursor = None + + def close(self): + if self._cursor is not None: + self.await_(self._cursor.close()) + self._cursor = None + + def fetchone(self): + return self.await_(self._cursor.fetchone()) + + def fetchmany(self, size=None): + if size is None: + size = self.arraysize + return self.await_(self._cursor.fetchmany(size=size)) + + def fetchall(self): + return self.await_(self._cursor.fetchall()) + + +class AsyncAdapt_aiosqlite_connection(AdaptedConnection): + await_ = staticmethod(await_only) + __slots__ = ("dbapi",) + + def __init__(self, dbapi, connection): + self.dbapi = dbapi + self._connection = connection + + @property + def isolation_level(self): + return self._connection.isolation_level + + @isolation_level.setter + def isolation_level(self, value): + # aiosqlite's isolation_level setter works outside the Thread + # that it's supposed to, necessitating setting check_same_thread=False. + # for improved stability, we instead invent our own awaitable version + # using aiosqlite's async queue directly. + + def set_iso(connection, value): + connection.isolation_level = value + + function = partial(set_iso, self._connection._conn, value) + future = asyncio.get_event_loop().create_future() + + self._connection._tx.put_nowait((future, function)) + + try: + return self.await_(future) + except Exception as error: + self._handle_exception(error) + + def create_function(self, *args, **kw): + try: + self.await_(self._connection.create_function(*args, **kw)) + except Exception as error: + self._handle_exception(error) + + def cursor(self, server_side=False): + if server_side: + return AsyncAdapt_aiosqlite_ss_cursor(self) + else: + return AsyncAdapt_aiosqlite_cursor(self) + + def execute(self, *args, **kw): + return self.await_(self._connection.execute(*args, **kw)) + + def rollback(self): + try: + self.await_(self._connection.rollback()) + except Exception as error: + self._handle_exception(error) + + def commit(self): + try: + self.await_(self._connection.commit()) + except Exception as error: + self._handle_exception(error) + + def close(self): + try: + self.await_(self._connection.close()) + except ValueError: + # this is undocumented for aiosqlite, that ValueError + # was raised if .close() was called more than once, which is + # both not customary for DBAPI and is also not a DBAPI.Error + # exception. This is now fixed in aiosqlite via my PR + # https://github.com/omnilib/aiosqlite/pull/238, so we can be + # assured this will not become some other kind of exception, + # since it doesn't raise anymore. + + pass + except Exception as error: + self._handle_exception(error) + + def _handle_exception(self, error): + if ( + isinstance(error, ValueError) + and error.args[0] == "no active connection" + ): + raise self.dbapi.sqlite.OperationalError( + "no active connection" + ) from error + else: + raise error + + +class AsyncAdaptFallback_aiosqlite_connection(AsyncAdapt_aiosqlite_connection): + __slots__ = () + + await_ = staticmethod(await_fallback) + + +class AsyncAdapt_aiosqlite_dbapi: + def __init__(self, aiosqlite, sqlite): + self.aiosqlite = aiosqlite + self.sqlite = sqlite + self.paramstyle = "qmark" + self._init_dbapi_attributes() + + def _init_dbapi_attributes(self): + for name in ( + "DatabaseError", + "Error", + "IntegrityError", + "NotSupportedError", + "OperationalError", + "ProgrammingError", + "sqlite_version", + "sqlite_version_info", + ): + setattr(self, name, getattr(self.aiosqlite, name)) + + for name in ("PARSE_COLNAMES", "PARSE_DECLTYPES"): + setattr(self, name, getattr(self.sqlite, name)) + + for name in ("Binary",): + setattr(self, name, getattr(self.sqlite, name)) + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + + creator_fn = kw.pop("async_creator_fn", None) + if creator_fn: + connection = creator_fn(*arg, **kw) + else: + connection = self.aiosqlite.connect(*arg, **kw) + # it's a Thread. you'll thank us later + connection.daemon = True + + if util.asbool(async_fallback): + return AsyncAdaptFallback_aiosqlite_connection( + self, + await_fallback(connection), + ) + else: + return AsyncAdapt_aiosqlite_connection( + self, + await_only(connection), + ) + + +class SQLiteExecutionContext_aiosqlite(SQLiteExecutionContext): + def create_server_side_cursor(self): + return self._dbapi_connection.cursor(server_side=True) + + +class SQLiteDialect_aiosqlite(SQLiteDialect_pysqlite): + driver = "aiosqlite" + supports_statement_cache = True + + is_async = True + + supports_server_side_cursors = True + + execution_ctx_cls = SQLiteExecutionContext_aiosqlite + + @classmethod + def import_dbapi(cls): + return AsyncAdapt_aiosqlite_dbapi( + __import__("aiosqlite"), __import__("sqlite3") + ) + + @classmethod + def get_pool_class(cls, url): + if cls._is_url_file_db(url): + return pool.NullPool + else: + return pool.StaticPool + + def is_disconnect(self, e, connection, cursor): + if isinstance( + e, self.dbapi.OperationalError + ) and "no active connection" in str(e): + return True + + return super().is_disconnect(e, connection, cursor) + + def get_driver_connection(self, connection): + return connection._connection + + +dialect = SQLiteDialect_aiosqlite diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/base.py new file mode 100644 index 00000000..0e2dc3b6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/base.py @@ -0,0 +1,2806 @@ +# dialects/sqlite/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" +.. dialect:: sqlite + :name: SQLite + :full_support: 3.36.0 + :normal_support: 3.12+ + :best_effort: 3.7.16+ + +.. _sqlite_datetime: + +Date and Time Types +------------------- + +SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does +not provide out of the box functionality for translating values between Python +`datetime` objects and a SQLite-supported format. SQLAlchemy's own +:class:`~sqlalchemy.types.DateTime` and related types provide date formatting +and parsing functionality when SQLite is used. The implementation classes are +:class:`_sqlite.DATETIME`, :class:`_sqlite.DATE` and :class:`_sqlite.TIME`. +These types represent dates and times as ISO formatted strings, which also +nicely support ordering. There's no reliance on typical "libc" internals for +these functions so historical dates are fully supported. + +Ensuring Text affinity +^^^^^^^^^^^^^^^^^^^^^^ + +The DDL rendered for these types is the standard ``DATE``, ``TIME`` +and ``DATETIME`` indicators. However, custom storage formats can also be +applied to these types. When the +storage format is detected as containing no alpha characters, the DDL for +these types is rendered as ``DATE_CHAR``, ``TIME_CHAR``, and ``DATETIME_CHAR``, +so that the column continues to have textual affinity. + +.. seealso:: + + `Type Affinity `_ - + in the SQLite documentation + +.. _sqlite_autoincrement: + +SQLite Auto Incrementing Behavior +---------------------------------- + +Background on SQLite's autoincrement is at: https://sqlite.org/autoinc.html + +Key concepts: + +* SQLite has an implicit "auto increment" feature that takes place for any + non-composite primary-key column that is specifically created using + "INTEGER PRIMARY KEY" for the type + primary key. + +* SQLite also has an explicit "AUTOINCREMENT" keyword, that is **not** + equivalent to the implicit autoincrement feature; this keyword is not + recommended for general use. SQLAlchemy does not render this keyword + unless a special SQLite-specific directive is used (see below). However, + it still requires that the column's type is named "INTEGER". + +Using the AUTOINCREMENT Keyword +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To specifically render the AUTOINCREMENT keyword on the primary key column +when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table +construct:: + + Table('sometable', metadata, + Column('id', Integer, primary_key=True), + sqlite_autoincrement=True) + +Allowing autoincrement behavior SQLAlchemy types other than Integer/INTEGER +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SQLite's typing model is based on naming conventions. Among other things, this +means that any type name which contains the substring ``"INT"`` will be +determined to be of "integer affinity". A type named ``"BIGINT"``, +``"SPECIAL_INT"`` or even ``"XYZINTQPR"``, will be considered by SQLite to be +of "integer" affinity. However, **the SQLite autoincrement feature, whether +implicitly or explicitly enabled, requires that the name of the column's type +is exactly the string "INTEGER"**. Therefore, if an application uses a type +like :class:`.BigInteger` for a primary key, on SQLite this type will need to +be rendered as the name ``"INTEGER"`` when emitting the initial ``CREATE +TABLE`` statement in order for the autoincrement behavior to be available. + +One approach to achieve this is to use :class:`.Integer` on SQLite +only using :meth:`.TypeEngine.with_variant`:: + + table = Table( + "my_table", metadata, + Column("id", BigInteger().with_variant(Integer, "sqlite"), primary_key=True) + ) + +Another is to use a subclass of :class:`.BigInteger` that overrides its DDL +name to be ``INTEGER`` when compiled against SQLite:: + + from sqlalchemy import BigInteger + from sqlalchemy.ext.compiler import compiles + + class SLBigInteger(BigInteger): + pass + + @compiles(SLBigInteger, 'sqlite') + def bi_c(element, compiler, **kw): + return "INTEGER" + + @compiles(SLBigInteger) + def bi_c(element, compiler, **kw): + return compiler.visit_BIGINT(element, **kw) + + + table = Table( + "my_table", metadata, + Column("id", SLBigInteger(), primary_key=True) + ) + +.. seealso:: + + :meth:`.TypeEngine.with_variant` + + :ref:`sqlalchemy.ext.compiler_toplevel` + + `Datatypes In SQLite Version 3 `_ + +.. _sqlite_concurrency: + +Database Locking Behavior / Concurrency +--------------------------------------- + +SQLite is not designed for a high level of write concurrency. The database +itself, being a file, is locked completely during write operations within +transactions, meaning exactly one "connection" (in reality a file handle) +has exclusive access to the database during this period - all other +"connections" will be blocked during this time. + +The Python DBAPI specification also calls for a connection model that is +always in a transaction; there is no ``connection.begin()`` method, +only ``connection.commit()`` and ``connection.rollback()``, upon which a +new transaction is to be begun immediately. This may seem to imply +that the SQLite driver would in theory allow only a single filehandle on a +particular database file at any time; however, there are several +factors both within SQLite itself as well as within the pysqlite driver +which loosen this restriction significantly. + +However, no matter what locking modes are used, SQLite will still always +lock the database file once a transaction is started and DML (e.g. INSERT, +UPDATE, DELETE) has at least been emitted, and this will block +other transactions at least at the point that they also attempt to emit DML. +By default, the length of time on this block is very short before it times out +with an error. + +This behavior becomes more critical when used in conjunction with the +SQLAlchemy ORM. SQLAlchemy's :class:`.Session` object by default runs +within a transaction, and with its autoflush model, may emit DML preceding +any SELECT statement. This may lead to a SQLite database that locks +more quickly than is expected. The locking mode of SQLite and the pysqlite +driver can be manipulated to some degree, however it should be noted that +achieving a high degree of write-concurrency with SQLite is a losing battle. + +For more information on SQLite's lack of write concurrency by design, please +see +`Situations Where Another RDBMS May Work Better - High Concurrency +`_ near the bottom of the page. + +The following subsections introduce areas that are impacted by SQLite's +file-based architecture and additionally will usually require workarounds to +work when using the pysqlite driver. + +.. _sqlite_isolation_level: + +Transaction Isolation Level / Autocommit +---------------------------------------- + +SQLite supports "transaction isolation" in a non-standard way, along two +axes. One is that of the +`PRAGMA read_uncommitted `_ +instruction. This setting can essentially switch SQLite between its +default mode of ``SERIALIZABLE`` isolation, and a "dirty read" isolation +mode normally referred to as ``READ UNCOMMITTED``. + +SQLAlchemy ties into this PRAGMA statement using the +:paramref:`_sa.create_engine.isolation_level` parameter of +:func:`_sa.create_engine`. +Valid values for this parameter when used with SQLite are ``"SERIALIZABLE"`` +and ``"READ UNCOMMITTED"`` corresponding to a value of 0 and 1, respectively. +SQLite defaults to ``SERIALIZABLE``, however its behavior is impacted by +the pysqlite driver's default behavior. + +When using the pysqlite driver, the ``"AUTOCOMMIT"`` isolation level is also +available, which will alter the pysqlite connection using the ``.isolation_level`` +attribute on the DBAPI connection and set it to None for the duration +of the setting. + +.. versionadded:: 1.3.16 added support for SQLite AUTOCOMMIT isolation level + when using the pysqlite / sqlite3 SQLite driver. + + +The other axis along which SQLite's transactional locking is impacted is +via the nature of the ``BEGIN`` statement used. The three varieties +are "deferred", "immediate", and "exclusive", as described at +`BEGIN TRANSACTION `_. A straight +``BEGIN`` statement uses the "deferred" mode, where the database file is +not locked until the first read or write operation, and read access remains +open to other transactions until the first write operation. But again, +it is critical to note that the pysqlite driver interferes with this behavior +by *not even emitting BEGIN* until the first write operation. + +.. warning:: + + SQLite's transactional scope is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + or :ref:`aiosqlite_serializable` for techniques to work around this behavior. + +.. seealso:: + + :ref:`dbapi_autocommit` + +INSERT/UPDATE/DELETE...RETURNING +--------------------------------- + +The SQLite dialect supports SQLite 3.35's ``INSERT|UPDATE|DELETE..RETURNING`` +syntax. ``INSERT..RETURNING`` may be used +automatically in some cases in order to fetch newly generated identifiers in +place of the traditional approach of using ``cursor.lastrowid``, however +``cursor.lastrowid`` is currently still preferred for simple single-statement +cases for its better performance. + +To specify an explicit ``RETURNING`` clause, use the +:meth:`._UpdateBase.returning` method on a per-statement basis:: + + # INSERT..RETURNING + result = connection.execute( + table.insert(). + values(name='foo'). + returning(table.c.col1, table.c.col2) + ) + print(result.all()) + + # UPDATE..RETURNING + result = connection.execute( + table.update(). + where(table.c.name=='foo'). + values(name='bar'). + returning(table.c.col1, table.c.col2) + ) + print(result.all()) + + # DELETE..RETURNING + result = connection.execute( + table.delete(). + where(table.c.name=='foo'). + returning(table.c.col1, table.c.col2) + ) + print(result.all()) + +.. versionadded:: 2.0 Added support for SQLite RETURNING + +SAVEPOINT Support +---------------------------- + +SQLite supports SAVEPOINTs, which only function once a transaction is +begun. SQLAlchemy's SAVEPOINT support is available using the +:meth:`_engine.Connection.begin_nested` method at the Core level, and +:meth:`.Session.begin_nested` at the ORM level. However, SAVEPOINTs +won't work at all with pysqlite unless workarounds are taken. + +.. warning:: + + SQLite's SAVEPOINT feature is impacted by unresolved + issues in the pysqlite and aiosqlite drivers, which defer BEGIN statements + to a greater degree than is often feasible. See the sections + :ref:`pysqlite_serializable` and :ref:`aiosqlite_serializable` + for techniques to work around this behavior. + +Transactional DDL +---------------------------- + +The SQLite database supports transactional :term:`DDL` as well. +In this case, the pysqlite driver is not only failing to start transactions, +it also is ending any existing transaction when DDL is detected, so again, +workarounds are required. + +.. warning:: + + SQLite's transactional DDL is impacted by unresolved issues + in the pysqlite driver, which fails to emit BEGIN and additionally + forces a COMMIT to cancel any transaction when DDL is encountered. + See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +.. _sqlite_foreign_keys: + +Foreign Key Support +------------------- + +SQLite supports FOREIGN KEY syntax when emitting CREATE statements for tables, +however by default these constraints have no effect on the operation of the +table. + +Constraint checking on SQLite has three prerequisites: + +* At least version 3.6.19 of SQLite must be in use +* The SQLite library must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY + or SQLITE_OMIT_TRIGGER symbols enabled. +* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all + connections before use -- including the initial call to + :meth:`sqlalchemy.schema.MetaData.create_all`. + +SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically for +new connections through the usage of events:: + + from sqlalchemy.engine import Engine + from sqlalchemy import event + + @event.listens_for(Engine, "connect") + def set_sqlite_pragma(dbapi_connection, connection_record): + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + +.. warning:: + + When SQLite foreign keys are enabled, it is **not possible** + to emit CREATE or DROP statements for tables that contain + mutually-dependent foreign key constraints; + to emit the DDL for these tables requires that ALTER TABLE be used to + create or drop these constraints separately, for which SQLite has + no support. + +.. seealso:: + + `SQLite Foreign Key Support `_ + - on the SQLite web site. + + :ref:`event_toplevel` - SQLAlchemy event API. + + :ref:`use_alter` - more information on SQLAlchemy's facilities for handling + mutually-dependent foreign key constraints. + +.. _sqlite_on_conflict_ddl: + +ON CONFLICT support for constraints +----------------------------------- + +.. seealso:: This section describes the :term:`DDL` version of "ON CONFLICT" for + SQLite, which occurs within a CREATE TABLE statement. For "ON CONFLICT" as + applied to an INSERT statement, see :ref:`sqlite_on_conflict_insert`. + +SQLite supports a non-standard DDL clause known as ON CONFLICT which can be applied +to primary key, unique, check, and not null constraints. In DDL, it is +rendered either within the "CONSTRAINT" clause or within the column definition +itself depending on the location of the target constraint. To render this +clause within DDL, the extension parameter ``sqlite_on_conflict`` can be +specified with a string conflict resolution algorithm within the +:class:`.PrimaryKeyConstraint`, :class:`.UniqueConstraint`, +:class:`.CheckConstraint` objects. Within the :class:`_schema.Column` object, +there +are individual parameters ``sqlite_on_conflict_not_null``, +``sqlite_on_conflict_primary_key``, ``sqlite_on_conflict_unique`` which each +correspond to the three types of relevant constraint types that can be +indicated from a :class:`_schema.Column` object. + +.. seealso:: + + `ON CONFLICT `_ - in the SQLite + documentation + +.. versionadded:: 1.3 + + +The ``sqlite_on_conflict`` parameters accept a string argument which is just +the resolution name to be chosen, which on SQLite can be one of ROLLBACK, +ABORT, FAIL, IGNORE, and REPLACE. For example, to add a UNIQUE constraint +that specifies the IGNORE algorithm:: + + some_table = Table( + 'some_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', Integer), + UniqueConstraint('id', 'data', sqlite_on_conflict='IGNORE') + ) + +The above renders CREATE TABLE DDL as:: + + CREATE TABLE some_table ( + id INTEGER NOT NULL, + data INTEGER, + PRIMARY KEY (id), + UNIQUE (id, data) ON CONFLICT IGNORE + ) + + +When using the :paramref:`_schema.Column.unique` +flag to add a UNIQUE constraint +to a single column, the ``sqlite_on_conflict_unique`` parameter can +be added to the :class:`_schema.Column` as well, which will be added to the +UNIQUE constraint in the DDL:: + + some_table = Table( + 'some_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', Integer, unique=True, + sqlite_on_conflict_unique='IGNORE') + ) + +rendering:: + + CREATE TABLE some_table ( + id INTEGER NOT NULL, + data INTEGER, + PRIMARY KEY (id), + UNIQUE (data) ON CONFLICT IGNORE + ) + +To apply the FAIL algorithm for a NOT NULL constraint, +``sqlite_on_conflict_not_null`` is used:: + + some_table = Table( + 'some_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', Integer, nullable=False, + sqlite_on_conflict_not_null='FAIL') + ) + +this renders the column inline ON CONFLICT phrase:: + + CREATE TABLE some_table ( + id INTEGER NOT NULL, + data INTEGER NOT NULL ON CONFLICT FAIL, + PRIMARY KEY (id) + ) + + +Similarly, for an inline primary key, use ``sqlite_on_conflict_primary_key``:: + + some_table = Table( + 'some_table', metadata, + Column('id', Integer, primary_key=True, + sqlite_on_conflict_primary_key='FAIL') + ) + +SQLAlchemy renders the PRIMARY KEY constraint separately, so the conflict +resolution algorithm is applied to the constraint itself:: + + CREATE TABLE some_table ( + id INTEGER NOT NULL, + PRIMARY KEY (id) ON CONFLICT FAIL + ) + +.. _sqlite_on_conflict_insert: + +INSERT...ON CONFLICT (Upsert) +----------------------------------- + +.. seealso:: This section describes the :term:`DML` version of "ON CONFLICT" for + SQLite, which occurs within an INSERT statement. For "ON CONFLICT" as + applied to a CREATE TABLE statement, see :ref:`sqlite_on_conflict_ddl`. + +From version 3.24.0 onwards, SQLite supports "upserts" (update or insert) +of rows into a table via the ``ON CONFLICT`` clause of the ``INSERT`` +statement. A candidate row will only be inserted if that row does not violate +any unique or primary key constraints. In the case of a unique constraint violation, a +secondary action can occur which can be either "DO UPDATE", indicating that +the data in the target row should be updated, or "DO NOTHING", which indicates +to silently skip this row. + +Conflicts are determined using columns that are part of existing unique +constraints and indexes. These constraints are identified by stating the +columns and conditions that comprise the indexes. + +SQLAlchemy provides ``ON CONFLICT`` support via the SQLite-specific +:func:`_sqlite.insert()` function, which provides +the generative methods :meth:`_sqlite.Insert.on_conflict_do_update` +and :meth:`_sqlite.Insert.on_conflict_do_nothing`: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy.dialects.sqlite import insert + + >>> insert_stmt = insert(my_table).values( + ... id='some_existing_id', + ... data='inserted value') + + >>> do_update_stmt = insert_stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value') + ... ) + + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) + ON CONFLICT (id) DO UPDATE SET data = ?{stop} + + >>> do_nothing_stmt = insert_stmt.on_conflict_do_nothing( + ... index_elements=['id'] + ... ) + + >>> print(do_nothing_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) + ON CONFLICT (id) DO NOTHING + +.. versionadded:: 1.4 + +.. seealso:: + + `Upsert + `_ + - in the SQLite documentation. + + +Specifying the Target +^^^^^^^^^^^^^^^^^^^^^ + +Both methods supply the "target" of the conflict using column inference: + +* The :paramref:`_sqlite.Insert.on_conflict_do_update.index_elements` argument + specifies a sequence containing string column names, :class:`_schema.Column` + objects, and/or SQL expression elements, which would identify a unique index + or unique constraint. + +* When using :paramref:`_sqlite.Insert.on_conflict_do_update.index_elements` + to infer an index, a partial index can be inferred by also specifying the + :paramref:`_sqlite.Insert.on_conflict_do_update.index_where` parameter: + + .. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(user_email='a@b.com', data='inserted data') + + >>> do_update_stmt = stmt.on_conflict_do_update( + ... index_elements=[my_table.c.user_email], + ... index_where=my_table.c.user_email.like('%@gmail.com'), + ... set_=dict(data=stmt.excluded.data) + ... ) + + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (data, user_email) VALUES (?, ?) + ON CONFLICT (user_email) + WHERE user_email LIKE '%@gmail.com' + DO UPDATE SET data = excluded.data + +The SET Clause +^^^^^^^^^^^^^^^ + +``ON CONFLICT...DO UPDATE`` is used to perform an update of the already +existing row, using any combination of new values as well as values +from the proposed insertion. These values are specified using the +:paramref:`_sqlite.Insert.on_conflict_do_update.set_` parameter. This +parameter accepts a dictionary which consists of direct values +for UPDATE: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + + >>> do_update_stmt = stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value') + ... ) + + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) + ON CONFLICT (id) DO UPDATE SET data = ? + +.. warning:: + + The :meth:`_sqlite.Insert.on_conflict_do_update` method does **not** take + into account Python-side default UPDATE values or generation functions, + e.g. those specified using :paramref:`_schema.Column.onupdate`. These + values will not be exercised for an ON CONFLICT style of UPDATE, unless + they are manually specified in the + :paramref:`_sqlite.Insert.on_conflict_do_update.set_` dictionary. + +Updating using the Excluded INSERT Values +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In order to refer to the proposed insertion row, the special alias +:attr:`~.sqlite.Insert.excluded` is available as an attribute on +the :class:`_sqlite.Insert` object; this object creates an "excluded." prefix +on a column, that informs the DO UPDATE to update the row with the value that +would have been inserted had the constraint not failed: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values( + ... id='some_id', + ... data='inserted value', + ... author='jlh' + ... ) + + >>> do_update_stmt = stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value', author=stmt.excluded.author) + ... ) + + >>> print(do_update_stmt) + {printsql}INSERT INTO my_table (id, data, author) VALUES (?, ?, ?) + ON CONFLICT (id) DO UPDATE SET data = ?, author = excluded.author + +Additional WHERE Criteria +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The :meth:`_sqlite.Insert.on_conflict_do_update` method also accepts +a WHERE clause using the :paramref:`_sqlite.Insert.on_conflict_do_update.where` +parameter, which will limit those rows which receive an UPDATE: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values( + ... id='some_id', + ... data='inserted value', + ... author='jlh' + ... ) + + >>> on_update_stmt = stmt.on_conflict_do_update( + ... index_elements=['id'], + ... set_=dict(data='updated value', author=stmt.excluded.author), + ... where=(my_table.c.status == 2) + ... ) + >>> print(on_update_stmt) + {printsql}INSERT INTO my_table (id, data, author) VALUES (?, ?, ?) + ON CONFLICT (id) DO UPDATE SET data = ?, author = excluded.author + WHERE my_table.status = ? + + +Skipping Rows with DO NOTHING +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``ON CONFLICT`` may be used to skip inserting a row entirely +if any conflict with a unique constraint occurs; below this is illustrated +using the :meth:`_sqlite.Insert.on_conflict_do_nothing` method: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = stmt.on_conflict_do_nothing(index_elements=['id']) + >>> print(stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) ON CONFLICT (id) DO NOTHING + + +If ``DO NOTHING`` is used without specifying any columns or constraint, +it has the effect of skipping the INSERT for any unique violation which +occurs: + +.. sourcecode:: pycon+sql + + >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = stmt.on_conflict_do_nothing() + >>> print(stmt) + {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) ON CONFLICT DO NOTHING + +.. _sqlite_type_reflection: + +Type Reflection +--------------- + +SQLite types are unlike those of most other database backends, in that +the string name of the type usually does not correspond to a "type" in a +one-to-one fashion. Instead, SQLite links per-column typing behavior +to one of five so-called "type affinities" based on a string matching +pattern for the type. + +SQLAlchemy's reflection process, when inspecting types, uses a simple +lookup table to link the keywords returned to provided SQLAlchemy types. +This lookup table is present within the SQLite dialect as it is for all +other dialects. However, the SQLite dialect has a different "fallback" +routine for when a particular type name is not located in the lookup map; +it instead implements the SQLite "type affinity" scheme located at +https://www.sqlite.org/datatype3.html section 2.1. + +The provided typemap will make direct associations from an exact string +name match for the following types: + +:class:`_types.BIGINT`, :class:`_types.BLOB`, +:class:`_types.BOOLEAN`, :class:`_types.BOOLEAN`, +:class:`_types.CHAR`, :class:`_types.DATE`, +:class:`_types.DATETIME`, :class:`_types.FLOAT`, +:class:`_types.DECIMAL`, :class:`_types.FLOAT`, +:class:`_types.INTEGER`, :class:`_types.INTEGER`, +:class:`_types.NUMERIC`, :class:`_types.REAL`, +:class:`_types.SMALLINT`, :class:`_types.TEXT`, +:class:`_types.TIME`, :class:`_types.TIMESTAMP`, +:class:`_types.VARCHAR`, :class:`_types.NVARCHAR`, +:class:`_types.NCHAR` + +When a type name does not match one of the above types, the "type affinity" +lookup is used instead: + +* :class:`_types.INTEGER` is returned if the type name includes the + string ``INT`` +* :class:`_types.TEXT` is returned if the type name includes the + string ``CHAR``, ``CLOB`` or ``TEXT`` +* :class:`_types.NullType` is returned if the type name includes the + string ``BLOB`` +* :class:`_types.REAL` is returned if the type name includes the string + ``REAL``, ``FLOA`` or ``DOUB``. +* Otherwise, the :class:`_types.NUMERIC` type is used. + +.. _sqlite_partial_index: + +Partial Indexes +--------------- + +A partial index, e.g. one which uses a WHERE clause, can be specified +with the DDL system using the argument ``sqlite_where``:: + + tbl = Table('testtbl', m, Column('data', Integer)) + idx = Index('test_idx1', tbl.c.data, + sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10)) + +The index will be rendered at create time as:: + + CREATE INDEX test_idx1 ON testtbl (data) + WHERE data > 5 AND data < 10 + +.. _sqlite_dotted_column_names: + +Dotted Column Names +------------------- + +Using table or column names that explicitly have periods in them is +**not recommended**. While this is generally a bad idea for relational +databases in general, as the dot is a syntactically significant character, +the SQLite driver up until version **3.10.0** of SQLite has a bug which +requires that SQLAlchemy filter out these dots in result sets. + +The bug, entirely outside of SQLAlchemy, can be illustrated thusly:: + + import sqlite3 + + assert sqlite3.sqlite_version_info < (3, 10, 0), "bug is fixed in this version" + + conn = sqlite3.connect(":memory:") + cursor = conn.cursor() + + cursor.execute("create table x (a integer, b integer)") + cursor.execute("insert into x (a, b) values (1, 1)") + cursor.execute("insert into x (a, b) values (2, 2)") + + cursor.execute("select x.a, x.b from x") + assert [c[0] for c in cursor.description] == ['a', 'b'] + + cursor.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert [c[0] for c in cursor.description] == ['a', 'b'], \ + [c[0] for c in cursor.description] + +The second assertion fails:: + + Traceback (most recent call last): + File "test.py", line 19, in + [c[0] for c in cursor.description] + AssertionError: ['x.a', 'x.b'] + +Where above, the driver incorrectly reports the names of the columns +including the name of the table, which is entirely inconsistent vs. +when the UNION is not present. + +SQLAlchemy relies upon column names being predictable in how they match +to the original statement, so the SQLAlchemy dialect has no choice but +to filter these out:: + + + from sqlalchemy import create_engine + + eng = create_engine("sqlite://") + conn = eng.connect() + + conn.exec_driver_sql("create table x (a integer, b integer)") + conn.exec_driver_sql("insert into x (a, b) values (1, 1)") + conn.exec_driver_sql("insert into x (a, b) values (2, 2)") + + result = conn.exec_driver_sql("select x.a, x.b from x") + assert result.keys() == ["a", "b"] + + result = conn.exec_driver_sql(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["a", "b"] + +Note that above, even though SQLAlchemy filters out the dots, *both +names are still addressable*:: + + >>> row = result.first() + >>> row["a"] + 1 + >>> row["x.a"] + 1 + >>> row["b"] + 1 + >>> row["x.b"] + 1 + +Therefore, the workaround applied by SQLAlchemy only impacts +:meth:`_engine.CursorResult.keys` and :meth:`.Row.keys()` in the public API. In +the very specific case where an application is forced to use column names that +contain dots, and the functionality of :meth:`_engine.CursorResult.keys` and +:meth:`.Row.keys()` is required to return these dotted names unmodified, +the ``sqlite_raw_colnames`` execution option may be provided, either on a +per-:class:`_engine.Connection` basis:: + + result = conn.execution_options(sqlite_raw_colnames=True).exec_driver_sql(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["x.a", "x.b"] + +or on a per-:class:`_engine.Engine` basis:: + + engine = create_engine("sqlite://", execution_options={"sqlite_raw_colnames": True}) + +When using the per-:class:`_engine.Engine` execution option, note that +**Core and ORM queries that use UNION may not function properly**. + +SQLite-specific table options +----------------------------- + +One option for CREATE TABLE is supported directly by the SQLite +dialect in conjunction with the :class:`_schema.Table` construct: + +* ``WITHOUT ROWID``:: + + Table("some_table", metadata, ..., sqlite_with_rowid=False) + +.. seealso:: + + `SQLite CREATE TABLE options + `_ + + +.. _sqlite_include_internal: + +Reflecting internal schema tables +---------------------------------- + +Reflection methods that return lists of tables will omit so-called +"SQLite internal schema object" names, which are considered by SQLite +as any object name that is prefixed with ``sqlite_``. An example of +such an object is the ``sqlite_sequence`` table that's generated when +the ``AUTOINCREMENT`` column parameter is used. In order to return +these objects, the parameter ``sqlite_include_internal=True`` may be +passed to methods such as :meth:`_schema.MetaData.reflect` or +:meth:`.Inspector.get_table_names`. + +.. versionadded:: 2.0 Added the ``sqlite_include_internal=True`` parameter. + Previously, these tables were not ignored by SQLAlchemy reflection + methods. + +.. note:: + + The ``sqlite_include_internal`` parameter does not refer to the + "system" tables that are present in schemas such as ``sqlite_master``. + +.. seealso:: + + `SQLite Internal Schema Objects `_ - in the SQLite + documentation. + +""" # noqa +from __future__ import annotations + +import datetime +import numbers +import re +from typing import Optional + +from .json import JSON +from .json import JSONIndexType +from .json import JSONPathType +from ... import exc +from ... import schema as sa_schema +from ... import sql +from ... import text +from ... import types as sqltypes +from ... import util +from ...engine import default +from ...engine import processors +from ...engine import reflection +from ...engine.reflection import ReflectionDefaults +from ...sql import coercions +from ...sql import ColumnElement +from ...sql import compiler +from ...sql import elements +from ...sql import roles +from ...sql import schema +from ...types import BLOB # noqa +from ...types import BOOLEAN # noqa +from ...types import CHAR # noqa +from ...types import DECIMAL # noqa +from ...types import FLOAT # noqa +from ...types import INTEGER # noqa +from ...types import NUMERIC # noqa +from ...types import REAL # noqa +from ...types import SMALLINT # noqa +from ...types import TEXT # noqa +from ...types import TIMESTAMP # noqa +from ...types import VARCHAR # noqa + + +class _SQliteJson(JSON): + def result_processor(self, dialect, coltype): + default_processor = super().result_processor(dialect, coltype) + + def process(value): + try: + return default_processor(value) + except TypeError: + if isinstance(value, numbers.Number): + return value + else: + raise + + return process + + +class _DateTimeMixin: + _reg = None + _storage_format = None + + def __init__(self, storage_format=None, regexp=None, **kw): + super().__init__(**kw) + if regexp is not None: + self._reg = re.compile(regexp) + if storage_format is not None: + self._storage_format = storage_format + + @property + def format_is_text_affinity(self): + """return True if the storage format will automatically imply + a TEXT affinity. + + If the storage format contains no non-numeric characters, + it will imply a NUMERIC storage format on SQLite; in this case, + the type will generate its DDL as DATE_CHAR, DATETIME_CHAR, + TIME_CHAR. + + """ + spec = self._storage_format % { + "year": 0, + "month": 0, + "day": 0, + "hour": 0, + "minute": 0, + "second": 0, + "microsecond": 0, + } + return bool(re.search(r"[^0-9]", spec)) + + def adapt(self, cls, **kw): + if issubclass(cls, _DateTimeMixin): + if self._storage_format: + kw["storage_format"] = self._storage_format + if self._reg: + kw["regexp"] = self._reg + return super().adapt(cls, **kw) + + def literal_processor(self, dialect): + bp = self.bind_processor(dialect) + + def process(value): + return "'%s'" % bp(value) + + return process + + +class DATETIME(_DateTimeMixin, sqltypes.DateTime): + r"""Represent a Python datetime object in SQLite using a string. + + The default string storage format is:: + + "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + + e.g.:: + + 2021-03-15 12:05:57.105542 + + The incoming storage format is by default parsed using the + Python ``datetime.fromisoformat()`` function. + + .. versionchanged:: 2.0 ``datetime.fromisoformat()`` is used for default + datetime string parsing. + + The storage format can be customized to some degree using the + ``storage_format`` and ``regexp`` parameters, such as:: + + import re + from sqlalchemy.dialects.sqlite import DATETIME + + dt = DATETIME(storage_format="%(year)04d/%(month)02d/%(day)02d " + "%(hour)02d:%(minute)02d:%(second)02d", + regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)" + ) + + :param storage_format: format string which will be applied to the dict + with keys year, month, day, hour, minute, second, and microsecond. + + :param regexp: regular expression which will be applied to incoming result + rows, replacing the use of ``datetime.fromisoformat()`` to parse incoming + strings. If the regexp contains named groups, the resulting match dict is + applied to the Python datetime() constructor as keyword arguments. + Otherwise, if positional groups are used, the datetime() constructor + is called with positional arguments via + ``*map(int, match_obj.groups(0))``. + + """ # noqa + + _storage_format = ( + "%(year)04d-%(month)02d-%(day)02d " + "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + ) + + def __init__(self, *args, **kwargs): + truncate_microseconds = kwargs.pop("truncate_microseconds", False) + super().__init__(*args, **kwargs) + if truncate_microseconds: + assert "storage_format" not in kwargs, ( + "You can specify only " + "one of truncate_microseconds or storage_format." + ) + assert "regexp" not in kwargs, ( + "You can specify only one of " + "truncate_microseconds or regexp." + ) + self._storage_format = ( + "%(year)04d-%(month)02d-%(day)02d " + "%(hour)02d:%(minute)02d:%(second)02d" + ) + + def bind_processor(self, dialect): + datetime_datetime = datetime.datetime + datetime_date = datetime.date + format_ = self._storage_format + + def process(value): + if value is None: + return None + elif isinstance(value, datetime_datetime): + return format_ % { + "year": value.year, + "month": value.month, + "day": value.day, + "hour": value.hour, + "minute": value.minute, + "second": value.second, + "microsecond": value.microsecond, + } + elif isinstance(value, datetime_date): + return format_ % { + "year": value.year, + "month": value.month, + "day": value.day, + "hour": 0, + "minute": 0, + "second": 0, + "microsecond": 0, + } + else: + raise TypeError( + "SQLite DateTime type only accepts Python " + "datetime and date objects as input." + ) + + return process + + def result_processor(self, dialect, coltype): + if self._reg: + return processors.str_to_datetime_processor_factory( + self._reg, datetime.datetime + ) + else: + return processors.str_to_datetime + + +class DATE(_DateTimeMixin, sqltypes.Date): + r"""Represent a Python date object in SQLite using a string. + + The default string storage format is:: + + "%(year)04d-%(month)02d-%(day)02d" + + e.g.:: + + 2011-03-15 + + The incoming storage format is by default parsed using the + Python ``date.fromisoformat()`` function. + + .. versionchanged:: 2.0 ``date.fromisoformat()`` is used for default + date string parsing. + + + The storage format can be customized to some degree using the + ``storage_format`` and ``regexp`` parameters, such as:: + + import re + from sqlalchemy.dialects.sqlite import DATE + + d = DATE( + storage_format="%(month)02d/%(day)02d/%(year)04d", + regexp=re.compile("(?P\d+)/(?P\d+)/(?P\d+)") + ) + + :param storage_format: format string which will be applied to the + dict with keys year, month, and day. + + :param regexp: regular expression which will be applied to + incoming result rows, replacing the use of ``date.fromisoformat()`` to + parse incoming strings. If the regexp contains named groups, the resulting + match dict is applied to the Python date() constructor as keyword + arguments. Otherwise, if positional groups are used, the date() + constructor is called with positional arguments via + ``*map(int, match_obj.groups(0))``. + + """ + + _storage_format = "%(year)04d-%(month)02d-%(day)02d" + + def bind_processor(self, dialect): + datetime_date = datetime.date + format_ = self._storage_format + + def process(value): + if value is None: + return None + elif isinstance(value, datetime_date): + return format_ % { + "year": value.year, + "month": value.month, + "day": value.day, + } + else: + raise TypeError( + "SQLite Date type only accepts Python " + "date objects as input." + ) + + return process + + def result_processor(self, dialect, coltype): + if self._reg: + return processors.str_to_datetime_processor_factory( + self._reg, datetime.date + ) + else: + return processors.str_to_date + + +class TIME(_DateTimeMixin, sqltypes.Time): + r"""Represent a Python time object in SQLite using a string. + + The default string storage format is:: + + "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + + e.g.:: + + 12:05:57.10558 + + The incoming storage format is by default parsed using the + Python ``time.fromisoformat()`` function. + + .. versionchanged:: 2.0 ``time.fromisoformat()`` is used for default + time string parsing. + + The storage format can be customized to some degree using the + ``storage_format`` and ``regexp`` parameters, such as:: + + import re + from sqlalchemy.dialects.sqlite import TIME + + t = TIME(storage_format="%(hour)02d-%(minute)02d-" + "%(second)02d-%(microsecond)06d", + regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") + ) + + :param storage_format: format string which will be applied to the dict + with keys hour, minute, second, and microsecond. + + :param regexp: regular expression which will be applied to incoming result + rows, replacing the use of ``datetime.fromisoformat()`` to parse incoming + strings. If the regexp contains named groups, the resulting match dict is + applied to the Python time() constructor as keyword arguments. Otherwise, + if positional groups are used, the time() constructor is called with + positional arguments via ``*map(int, match_obj.groups(0))``. + + """ + + _storage_format = "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + + def __init__(self, *args, **kwargs): + truncate_microseconds = kwargs.pop("truncate_microseconds", False) + super().__init__(*args, **kwargs) + if truncate_microseconds: + assert "storage_format" not in kwargs, ( + "You can specify only " + "one of truncate_microseconds or storage_format." + ) + assert "regexp" not in kwargs, ( + "You can specify only one of " + "truncate_microseconds or regexp." + ) + self._storage_format = "%(hour)02d:%(minute)02d:%(second)02d" + + def bind_processor(self, dialect): + datetime_time = datetime.time + format_ = self._storage_format + + def process(value): + if value is None: + return None + elif isinstance(value, datetime_time): + return format_ % { + "hour": value.hour, + "minute": value.minute, + "second": value.second, + "microsecond": value.microsecond, + } + else: + raise TypeError( + "SQLite Time type only accepts Python " + "time objects as input." + ) + + return process + + def result_processor(self, dialect, coltype): + if self._reg: + return processors.str_to_datetime_processor_factory( + self._reg, datetime.time + ) + else: + return processors.str_to_time + + +colspecs = { + sqltypes.Date: DATE, + sqltypes.DateTime: DATETIME, + sqltypes.JSON: _SQliteJson, + sqltypes.JSON.JSONIndexType: JSONIndexType, + sqltypes.JSON.JSONPathType: JSONPathType, + sqltypes.Time: TIME, +} + +ischema_names = { + "BIGINT": sqltypes.BIGINT, + "BLOB": sqltypes.BLOB, + "BOOL": sqltypes.BOOLEAN, + "BOOLEAN": sqltypes.BOOLEAN, + "CHAR": sqltypes.CHAR, + "DATE": sqltypes.DATE, + "DATE_CHAR": sqltypes.DATE, + "DATETIME": sqltypes.DATETIME, + "DATETIME_CHAR": sqltypes.DATETIME, + "DOUBLE": sqltypes.DOUBLE, + "DECIMAL": sqltypes.DECIMAL, + "FLOAT": sqltypes.FLOAT, + "INT": sqltypes.INTEGER, + "INTEGER": sqltypes.INTEGER, + "JSON": JSON, + "NUMERIC": sqltypes.NUMERIC, + "REAL": sqltypes.REAL, + "SMALLINT": sqltypes.SMALLINT, + "TEXT": sqltypes.TEXT, + "TIME": sqltypes.TIME, + "TIME_CHAR": sqltypes.TIME, + "TIMESTAMP": sqltypes.TIMESTAMP, + "VARCHAR": sqltypes.VARCHAR, + "NVARCHAR": sqltypes.NVARCHAR, + "NCHAR": sqltypes.NCHAR, +} + + +class SQLiteCompiler(compiler.SQLCompiler): + extract_map = util.update_copy( + compiler.SQLCompiler.extract_map, + { + "month": "%m", + "day": "%d", + "year": "%Y", + "second": "%S", + "hour": "%H", + "doy": "%j", + "minute": "%M", + "epoch": "%s", + "dow": "%w", + "week": "%W", + }, + ) + + def visit_truediv_binary(self, binary, operator, **kw): + return ( + self.process(binary.left, **kw) + + " / " + + "(%s + 0.0)" % self.process(binary.right, **kw) + ) + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_localtimestamp_func(self, func, **kw): + return 'DATETIME(CURRENT_TIMESTAMP, "localtime")' + + def visit_true(self, expr, **kw): + return "1" + + def visit_false(self, expr, **kw): + return "0" + + def visit_char_length_func(self, fn, **kw): + return "length%s" % self.function_argspec(fn) + + def visit_aggregate_strings_func(self, fn, **kw): + return "group_concat%s" % self.function_argspec(fn) + + def visit_cast(self, cast, **kwargs): + if self.dialect.supports_cast: + return super().visit_cast(cast, **kwargs) + else: + return self.process(cast.clause, **kwargs) + + def visit_extract(self, extract, **kw): + try: + return "CAST(STRFTIME('%s', %s) AS INTEGER)" % ( + self.extract_map[extract.field], + self.process(extract.expr, **kw), + ) + except KeyError as err: + raise exc.CompileError( + "%s is not a valid extract argument." % extract.field + ) from err + + def returning_clause( + self, + stmt, + returning_cols, + *, + populate_result_map, + **kw, + ): + kw["include_table"] = False + return super().returning_clause( + stmt, returning_cols, populate_result_map=populate_result_map, **kw + ) + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += "\n LIMIT " + self.process(sql.literal(-1)) + text += " OFFSET " + self.process(select._offset_clause, **kw) + else: + text += " OFFSET " + self.process(sql.literal(0), **kw) + return text + + def for_update_clause(self, select, **kw): + # sqlite has no "FOR UPDATE" AFAICT + return "" + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + kw["asfrom"] = True + return "FROM " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def visit_is_distinct_from_binary(self, binary, operator, **kw): + return "%s IS NOT %s" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): + return "%s IS %s" % ( + self.process(binary.left), + self.process(binary.right), + ) + + def visit_json_getitem_op_binary(self, binary, operator, **kw): + if binary.type._type_affinity is sqltypes.JSON: + expr = "JSON_QUOTE(JSON_EXTRACT(%s, %s))" + else: + expr = "JSON_EXTRACT(%s, %s)" + + return expr % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + if binary.type._type_affinity is sqltypes.JSON: + expr = "JSON_QUOTE(JSON_EXTRACT(%s, %s))" + else: + expr = "JSON_EXTRACT(%s, %s)" + + return expr % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_empty_set_op_expr(self, type_, expand_op, **kw): + # slightly old SQLite versions don't seem to be able to handle + # the empty set impl + return self.visit_empty_set_expr(type_) + + def visit_empty_set_expr(self, element_types, **kw): + return "SELECT %s FROM (SELECT %s) WHERE 1!=1" % ( + ", ".join("1" for type_ in element_types or [INTEGER()]), + ", ".join("1" for type_ in element_types or [INTEGER()]), + ) + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " REGEXP ", **kw) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " NOT REGEXP ", **kw) + + def _on_conflict_target(self, clause, **kw): + if clause.constraint_target is not None: + target_text = "(%s)" % clause.constraint_target + elif clause.inferred_target_elements is not None: + target_text = "(%s)" % ", ".join( + ( + self.preparer.quote(c) + if isinstance(c, str) + else self.process(c, include_table=False, use_schema=False) + ) + for c in clause.inferred_target_elements + ) + if clause.inferred_target_whereclause is not None: + target_text += " WHERE %s" % self.process( + clause.inferred_target_whereclause, + include_table=False, + use_schema=False, + literal_binds=True, + ) + + else: + target_text = "" + + return target_text + + def visit_on_conflict_do_nothing(self, on_conflict, **kw): + target_text = self._on_conflict_target(on_conflict, **kw) + + if target_text: + return "ON CONFLICT %s DO NOTHING" % target_text + else: + return "ON CONFLICT DO NOTHING" + + def visit_on_conflict_do_update(self, on_conflict, **kw): + clause = on_conflict + + target_text = self._on_conflict_target(on_conflict, **kw) + + action_set_ops = [] + + set_parameters = dict(clause.update_values_to_set) + # create a list of column assignment clauses as tuples + + insert_statement = self.stack[-1]["selectable"] + cols = insert_statement.table.c + for c in cols: + col_key = c.key + + if col_key in set_parameters: + value = set_parameters.pop(col_key) + elif c in set_parameters: + value = set_parameters.pop(c) + else: + continue + + if coercions._is_literal(value): + value = elements.BindParameter(None, value, type_=c.type) + + else: + if ( + isinstance(value, elements.BindParameter) + and value.type._isnull + ): + value = value._clone() + value.type = c.type + value_text = self.process(value.self_group(), use_schema=False) + + key_text = self.preparer.quote(c.name) + action_set_ops.append("%s = %s" % (key_text, value_text)) + + # check for names that don't match columns + if set_parameters: + util.warn( + "Additional column names not matching " + "any column keys in table '%s': %s" + % ( + self.current_executable.table.name, + (", ".join("'%s'" % c for c in set_parameters)), + ) + ) + for k, v in set_parameters.items(): + key_text = ( + self.preparer.quote(k) + if isinstance(k, str) + else self.process(k, use_schema=False) + ) + value_text = self.process( + coercions.expect(roles.ExpressionElementRole, v), + use_schema=False, + ) + action_set_ops.append("%s = %s" % (key_text, value_text)) + + action_text = ", ".join(action_set_ops) + if clause.update_whereclause is not None: + action_text += " WHERE %s" % self.process( + clause.update_whereclause, include_table=True, use_schema=False + ) + + return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text) + + def visit_bitwise_xor_op_binary(self, binary, operator, **kw): + # sqlite has no xor. Use "a XOR b" = "(a | b) - (a & b)". + kw["eager_grouping"] = True + or_ = self._generate_generic_binary(binary, " | ", **kw) + and_ = self._generate_generic_binary(binary, " & ", **kw) + return f"({or_} - {and_})" + + +class SQLiteDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): + coltype = self.dialect.type_compiler_instance.process( + column.type, type_expression=column + ) + colspec = self.preparer.format_column(column) + " " + coltype + default = self.get_column_default_string(column) + if default is not None: + if isinstance(column.server_default.arg, ColumnElement): + default = "(" + default + ")" + colspec += " DEFAULT " + default + + if not column.nullable: + colspec += " NOT NULL" + + on_conflict_clause = column.dialect_options["sqlite"][ + "on_conflict_not_null" + ] + if on_conflict_clause is not None: + colspec += " ON CONFLICT " + on_conflict_clause + + if column.primary_key: + if ( + column.autoincrement is True + and len(column.table.primary_key.columns) != 1 + ): + raise exc.CompileError( + "SQLite does not support autoincrement for " + "composite primary keys" + ) + + if ( + column.table.dialect_options["sqlite"]["autoincrement"] + and len(column.table.primary_key.columns) == 1 + and issubclass(column.type._type_affinity, sqltypes.Integer) + and not column.foreign_keys + ): + colspec += " PRIMARY KEY" + + on_conflict_clause = column.dialect_options["sqlite"][ + "on_conflict_primary_key" + ] + if on_conflict_clause is not None: + colspec += " ON CONFLICT " + on_conflict_clause + + colspec += " AUTOINCREMENT" + + if column.computed is not None: + colspec += " " + self.process(column.computed) + + return colspec + + def visit_primary_key_constraint(self, constraint, **kw): + # for columns with sqlite_autoincrement=True, + # the PRIMARY KEY constraint can only be inline + # with the column itself. + if len(constraint.columns) == 1: + c = list(constraint)[0] + if ( + c.primary_key + and c.table.dialect_options["sqlite"]["autoincrement"] + and issubclass(c.type._type_affinity, sqltypes.Integer) + and not c.foreign_keys + ): + return None + + text = super().visit_primary_key_constraint(constraint) + + on_conflict_clause = constraint.dialect_options["sqlite"][ + "on_conflict" + ] + if on_conflict_clause is None and len(constraint.columns) == 1: + on_conflict_clause = list(constraint)[0].dialect_options["sqlite"][ + "on_conflict_primary_key" + ] + + if on_conflict_clause is not None: + text += " ON CONFLICT " + on_conflict_clause + + return text + + def visit_unique_constraint(self, constraint, **kw): + text = super().visit_unique_constraint(constraint) + + on_conflict_clause = constraint.dialect_options["sqlite"][ + "on_conflict" + ] + if on_conflict_clause is None and len(constraint.columns) == 1: + col1 = list(constraint)[0] + if isinstance(col1, schema.SchemaItem): + on_conflict_clause = list(constraint)[0].dialect_options[ + "sqlite" + ]["on_conflict_unique"] + + if on_conflict_clause is not None: + text += " ON CONFLICT " + on_conflict_clause + + return text + + def visit_check_constraint(self, constraint, **kw): + text = super().visit_check_constraint(constraint) + + on_conflict_clause = constraint.dialect_options["sqlite"][ + "on_conflict" + ] + + if on_conflict_clause is not None: + text += " ON CONFLICT " + on_conflict_clause + + return text + + def visit_column_check_constraint(self, constraint, **kw): + text = super().visit_column_check_constraint(constraint) + + if constraint.dialect_options["sqlite"]["on_conflict"] is not None: + raise exc.CompileError( + "SQLite does not support on conflict clause for " + "column check constraint" + ) + + return text + + def visit_foreign_key_constraint(self, constraint, **kw): + local_table = constraint.elements[0].parent.table + remote_table = constraint.elements[0].column.table + + if local_table.schema != remote_table.schema: + return None + else: + return super().visit_foreign_key_constraint(constraint) + + def define_constraint_remote_table(self, constraint, table, preparer): + """Format the remote table clause of a CREATE CONSTRAINT clause.""" + + return preparer.format_table(table, use_schema=False) + + def visit_create_index( + self, create, include_schema=False, include_table_schema=True, **kw + ): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + + text += "INDEX " + + if create.if_not_exists: + text += "IF NOT EXISTS " + + text += "%s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=True), + preparer.format_table(index.table, use_schema=False), + ", ".join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True + ) + for expr in index.expressions + ), + ) + + whereclause = index.dialect_options["sqlite"]["where"] + if whereclause is not None: + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, literal_binds=True + ) + text += " WHERE " + where_compiled + + return text + + def post_create_table(self, table): + if table.dialect_options["sqlite"]["with_rowid"] is False: + return "\n WITHOUT ROWID" + return "" + + +class SQLiteTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_) + + def visit_DATETIME(self, type_, **kw): + if ( + not isinstance(type_, _DateTimeMixin) + or type_.format_is_text_affinity + ): + return super().visit_DATETIME(type_) + else: + return "DATETIME_CHAR" + + def visit_DATE(self, type_, **kw): + if ( + not isinstance(type_, _DateTimeMixin) + or type_.format_is_text_affinity + ): + return super().visit_DATE(type_) + else: + return "DATE_CHAR" + + def visit_TIME(self, type_, **kw): + if ( + not isinstance(type_, _DateTimeMixin) + or type_.format_is_text_affinity + ): + return super().visit_TIME(type_) + else: + return "TIME_CHAR" + + def visit_JSON(self, type_, **kw): + # note this name provides NUMERIC affinity, not TEXT. + # should not be an issue unless the JSON value consists of a single + # numeric value. JSONTEXT can be used if this case is required. + return "JSON" + + +class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = { + "add", + "after", + "all", + "alter", + "analyze", + "and", + "as", + "asc", + "attach", + "autoincrement", + "before", + "begin", + "between", + "by", + "cascade", + "case", + "cast", + "check", + "collate", + "column", + "commit", + "conflict", + "constraint", + "create", + "cross", + "current_date", + "current_time", + "current_timestamp", + "database", + "default", + "deferrable", + "deferred", + "delete", + "desc", + "detach", + "distinct", + "drop", + "each", + "else", + "end", + "escape", + "except", + "exclusive", + "exists", + "explain", + "false", + "fail", + "for", + "foreign", + "from", + "full", + "glob", + "group", + "having", + "if", + "ignore", + "immediate", + "in", + "index", + "indexed", + "initially", + "inner", + "insert", + "instead", + "intersect", + "into", + "is", + "isnull", + "join", + "key", + "left", + "like", + "limit", + "match", + "natural", + "not", + "notnull", + "null", + "of", + "offset", + "on", + "or", + "order", + "outer", + "plan", + "pragma", + "primary", + "query", + "raise", + "references", + "reindex", + "rename", + "replace", + "restrict", + "right", + "rollback", + "row", + "select", + "set", + "table", + "temp", + "temporary", + "then", + "to", + "transaction", + "trigger", + "true", + "union", + "unique", + "update", + "using", + "vacuum", + "values", + "view", + "virtual", + "when", + "where", + } + + +class SQLiteExecutionContext(default.DefaultExecutionContext): + @util.memoized_property + def _preserve_raw_colnames(self): + return ( + not self.dialect._broken_dotted_colnames + or self.execution_options.get("sqlite_raw_colnames", False) + ) + + def _translate_colname(self, colname): + # TODO: detect SQLite version 3.10.0 or greater; + # see [ticket:3633] + + # adjust for dotted column names. SQLite + # in the case of UNION may store col names as + # "tablename.colname", or if using an attached database, + # "database.tablename.colname", in cursor.description + if not self._preserve_raw_colnames and "." in colname: + return colname.split(".")[-1], colname + else: + return colname, None + + +class SQLiteDialect(default.DefaultDialect): + name = "sqlite" + supports_alter = False + + # SQlite supports "DEFAULT VALUES" but *does not* support + # "VALUES (DEFAULT)" + supports_default_values = True + supports_default_metavalue = False + + # sqlite issue: + # https://github.com/python/cpython/issues/93421 + # note this parameter is no longer used by the ORM or default dialect + # see #9414 + supports_sane_rowcount_returning = False + + supports_empty_insert = False + supports_cast = True + supports_multivalues_insert = True + use_insertmanyvalues = True + tuple_in_values = True + supports_statement_cache = True + insert_null_pk_still_autoincrements = True + insert_returning = True + update_returning = True + update_returning_multifrom = True + delete_returning = True + update_returning_multifrom = True + + supports_default_metavalue = True + """dialect supports INSERT... VALUES (DEFAULT) syntax""" + + default_metavalue_token = "NULL" + """for INSERT... VALUES (DEFAULT) syntax, the token to put in the + parenthesis.""" + + default_paramstyle = "qmark" + execution_ctx_cls = SQLiteExecutionContext + statement_compiler = SQLiteCompiler + ddl_compiler = SQLiteDDLCompiler + type_compiler_cls = SQLiteTypeCompiler + preparer = SQLiteIdentifierPreparer + ischema_names = ischema_names + colspecs = colspecs + + construct_arguments = [ + ( + sa_schema.Table, + { + "autoincrement": False, + "with_rowid": True, + }, + ), + (sa_schema.Index, {"where": None}), + ( + sa_schema.Column, + { + "on_conflict_primary_key": None, + "on_conflict_not_null": None, + "on_conflict_unique": None, + }, + ), + (sa_schema.Constraint, {"on_conflict": None}), + ] + + _broken_fk_pragma_quotes = False + _broken_dotted_colnames = False + + @util.deprecated_params( + _json_serializer=( + "1.3.7", + "The _json_serializer argument to the SQLite dialect has " + "been renamed to the correct name of json_serializer. The old " + "argument name will be removed in a future release.", + ), + _json_deserializer=( + "1.3.7", + "The _json_deserializer argument to the SQLite dialect has " + "been renamed to the correct name of json_deserializer. The old " + "argument name will be removed in a future release.", + ), + ) + def __init__( + self, + native_datetime=False, + json_serializer=None, + json_deserializer=None, + _json_serializer=None, + _json_deserializer=None, + **kwargs, + ): + default.DefaultDialect.__init__(self, **kwargs) + + if _json_serializer: + json_serializer = _json_serializer + if _json_deserializer: + json_deserializer = _json_deserializer + self._json_serializer = json_serializer + self._json_deserializer = json_deserializer + + # this flag used by pysqlite dialect, and perhaps others in the + # future, to indicate the driver is handling date/timestamp + # conversions (and perhaps datetime/time as well on some hypothetical + # driver ?) + self.native_datetime = native_datetime + + if self.dbapi is not None: + if self.dbapi.sqlite_version_info < (3, 7, 16): + util.warn( + "SQLite version %s is older than 3.7.16, and will not " + "support right nested joins, as are sometimes used in " + "more complex ORM scenarios. SQLAlchemy 1.4 and above " + "no longer tries to rewrite these joins." + % (self.dbapi.sqlite_version_info,) + ) + + # NOTE: python 3.7 on fedora for me has SQLite 3.34.1. These + # version checks are getting very stale. + self._broken_dotted_colnames = self.dbapi.sqlite_version_info < ( + 3, + 10, + 0, + ) + self.supports_default_values = self.dbapi.sqlite_version_info >= ( + 3, + 3, + 8, + ) + self.supports_cast = self.dbapi.sqlite_version_info >= (3, 2, 3) + self.supports_multivalues_insert = ( + # https://www.sqlite.org/releaselog/3_7_11.html + self.dbapi.sqlite_version_info + >= (3, 7, 11) + ) + # see https://www.sqlalchemy.org/trac/ticket/2568 + # as well as https://www.sqlite.org/src/info/600482d161 + self._broken_fk_pragma_quotes = self.dbapi.sqlite_version_info < ( + 3, + 6, + 14, + ) + + if self.dbapi.sqlite_version_info < (3, 35) or util.pypy: + self.update_returning = self.delete_returning = ( + self.insert_returning + ) = False + + if self.dbapi.sqlite_version_info < (3, 32, 0): + # https://www.sqlite.org/limits.html + self.insertmanyvalues_max_parameters = 999 + + _isolation_lookup = util.immutabledict( + {"READ UNCOMMITTED": 1, "SERIALIZABLE": 0} + ) + + def get_isolation_level_values(self, dbapi_connection): + return list(self._isolation_lookup) + + def set_isolation_level(self, dbapi_connection, level): + isolation_level = self._isolation_lookup[level] + + cursor = dbapi_connection.cursor() + cursor.execute(f"PRAGMA read_uncommitted = {isolation_level}") + cursor.close() + + def get_isolation_level(self, dbapi_connection): + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA read_uncommitted") + res = cursor.fetchone() + if res: + value = res[0] + else: + # https://www.sqlite.org/changes.html#version_3_3_3 + # "Optional READ UNCOMMITTED isolation (instead of the + # default isolation level of SERIALIZABLE) and + # table level locking when database connections + # share a common cache."" + # pre-SQLite 3.3.0 default to 0 + value = 0 + cursor.close() + if value == 0: + return "SERIALIZABLE" + elif value == 1: + return "READ UNCOMMITTED" + else: + assert False, "Unknown isolation level %s" % value + + @reflection.cache + def get_schema_names(self, connection, **kw): + s = "PRAGMA database_list" + dl = connection.exec_driver_sql(s) + + return [db[1] for db in dl if db[1] != "temp"] + + def _format_schema(self, schema, table_name): + if schema is not None: + qschema = self.identifier_preparer.quote_identifier(schema) + name = f"{qschema}.{table_name}" + else: + name = table_name + return name + + def _sqlite_main_query( + self, + table: str, + type_: str, + schema: Optional[str], + sqlite_include_internal: bool, + ): + main = self._format_schema(schema, table) + if not sqlite_include_internal: + filter_table = " AND name NOT LIKE 'sqlite~_%' ESCAPE '~'" + else: + filter_table = "" + query = ( + f"SELECT name FROM {main} " + f"WHERE type='{type_}'{filter_table} " + "ORDER BY name" + ) + return query + + @reflection.cache + def get_table_names( + self, connection, schema=None, sqlite_include_internal=False, **kw + ): + query = self._sqlite_main_query( + "sqlite_master", "table", schema, sqlite_include_internal + ) + names = connection.exec_driver_sql(query).scalars().all() + return names + + @reflection.cache + def get_temp_table_names( + self, connection, sqlite_include_internal=False, **kw + ): + query = self._sqlite_main_query( + "sqlite_temp_master", "table", None, sqlite_include_internal + ) + names = connection.exec_driver_sql(query).scalars().all() + return names + + @reflection.cache + def get_temp_view_names( + self, connection, sqlite_include_internal=False, **kw + ): + query = self._sqlite_main_query( + "sqlite_temp_master", "view", None, sqlite_include_internal + ) + names = connection.exec_driver_sql(query).scalars().all() + return names + + @reflection.cache + def has_table(self, connection, table_name, schema=None, **kw): + self._ensure_has_table_connection(connection) + + if schema is not None and schema not in self.get_schema_names( + connection, **kw + ): + return False + + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema + ) + return bool(info) + + def _get_default_schema_name(self, connection): + return "main" + + @reflection.cache + def get_view_names( + self, connection, schema=None, sqlite_include_internal=False, **kw + ): + query = self._sqlite_main_query( + "sqlite_master", "view", schema, sqlite_include_internal + ) + names = connection.exec_driver_sql(query).scalars().all() + return names + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + if schema is not None: + qschema = self.identifier_preparer.quote_identifier(schema) + master = f"{qschema}.sqlite_master" + s = ("SELECT sql FROM %s WHERE name = ? AND type='view'") % ( + master, + ) + rs = connection.exec_driver_sql(s, (view_name,)) + else: + try: + s = ( + "SELECT sql FROM " + " (SELECT * FROM sqlite_master UNION ALL " + " SELECT * FROM sqlite_temp_master) " + "WHERE name = ? " + "AND type='view'" + ) + rs = connection.exec_driver_sql(s, (view_name,)) + except exc.DBAPIError: + s = ( + "SELECT sql FROM sqlite_master WHERE name = ? " + "AND type='view'" + ) + rs = connection.exec_driver_sql(s, (view_name,)) + + result = rs.fetchall() + if result: + return result[0].sql + else: + raise exc.NoSuchTableError( + f"{schema}.{view_name}" if schema else view_name + ) + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + pragma = "table_info" + # computed columns are threaded as hidden, they require table_xinfo + if self.server_version_info >= (3, 31): + pragma = "table_xinfo" + info = self._get_table_pragma( + connection, pragma, table_name, schema=schema + ) + columns = [] + tablesql = None + for row in info: + name = row[1] + type_ = row[2].upper() + nullable = not row[3] + default = row[4] + primary_key = row[5] + hidden = row[6] if pragma == "table_xinfo" else 0 + + # hidden has value 0 for normal columns, 1 for hidden columns, + # 2 for computed virtual columns and 3 for computed stored columns + # https://www.sqlite.org/src/info/069351b85f9a706f60d3e98fbc8aaf40c374356b967c0464aede30ead3d9d18b + if hidden == 1: + continue + + generated = bool(hidden) + persisted = hidden == 3 + + if tablesql is None and generated: + tablesql = self._get_table_sql( + connection, table_name, schema, **kw + ) + # remove create table + match = re.match( + r"create table .*?\((.*)\)$", + tablesql.strip(), + re.DOTALL | re.IGNORECASE, + ) + assert match, f"create table not found in {tablesql}" + tablesql = match.group(1).strip() + + columns.append( + self._get_column_info( + name, + type_, + nullable, + default, + primary_key, + generated, + persisted, + tablesql, + ) + ) + if columns: + return columns + elif not self.has_table(connection, table_name, schema): + raise exc.NoSuchTableError( + f"{schema}.{table_name}" if schema else table_name + ) + else: + return ReflectionDefaults.columns() + + def _get_column_info( + self, + name, + type_, + nullable, + default, + primary_key, + generated, + persisted, + tablesql, + ): + if generated: + # the type of a column "cc INTEGER GENERATED ALWAYS AS (1 + 42)" + # somehow is "INTEGER GENERATED ALWAYS" + type_ = re.sub("generated", "", type_, flags=re.IGNORECASE) + type_ = re.sub("always", "", type_, flags=re.IGNORECASE).strip() + + coltype = self._resolve_type_affinity(type_) + + if default is not None: + default = str(default) + + colspec = { + "name": name, + "type": coltype, + "nullable": nullable, + "default": default, + "primary_key": primary_key, + } + if generated: + sqltext = "" + if tablesql: + pattern = ( + r"[^,]*\s+GENERATED\s+ALWAYS\s+AS" + r"\s+\((.*)\)\s*(?:virtual|stored)?" + ) + match = re.search( + re.escape(name) + pattern, tablesql, re.IGNORECASE + ) + if match: + sqltext = match.group(1) + colspec["computed"] = {"sqltext": sqltext, "persisted": persisted} + return colspec + + def _resolve_type_affinity(self, type_): + """Return a data type from a reflected column, using affinity rules. + + SQLite's goal for universal compatibility introduces some complexity + during reflection, as a column's defined type might not actually be a + type that SQLite understands - or indeed, my not be defined *at all*. + Internally, SQLite handles this with a 'data type affinity' for each + column definition, mapping to one of 'TEXT', 'NUMERIC', 'INTEGER', + 'REAL', or 'NONE' (raw bits). The algorithm that determines this is + listed in https://www.sqlite.org/datatype3.html section 2.1. + + This method allows SQLAlchemy to support that algorithm, while still + providing access to smarter reflection utilities by recognizing + column definitions that SQLite only supports through affinity (like + DATE and DOUBLE). + + """ + match = re.match(r"([\w ]+)(\(.*?\))?", type_) + if match: + coltype = match.group(1) + args = match.group(2) + else: + coltype = "" + args = "" + + if coltype in self.ischema_names: + coltype = self.ischema_names[coltype] + elif "INT" in coltype: + coltype = sqltypes.INTEGER + elif "CHAR" in coltype or "CLOB" in coltype or "TEXT" in coltype: + coltype = sqltypes.TEXT + elif "BLOB" in coltype or not coltype: + coltype = sqltypes.NullType + elif "REAL" in coltype or "FLOA" in coltype or "DOUB" in coltype: + coltype = sqltypes.REAL + else: + coltype = sqltypes.NUMERIC + + if args is not None: + args = re.findall(r"(\d+)", args) + try: + coltype = coltype(*[int(a) for a in args]) + except TypeError: + util.warn( + "Could not instantiate type %s with " + "reflected arguments %s; using no arguments." + % (coltype, args) + ) + coltype = coltype() + else: + coltype = coltype() + + return coltype + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + constraint_name = None + table_data = self._get_table_sql(connection, table_name, schema=schema) + if table_data: + PK_PATTERN = r"CONSTRAINT (\w+) PRIMARY KEY" + result = re.search(PK_PATTERN, table_data, re.I) + constraint_name = result.group(1) if result else None + + cols = self.get_columns(connection, table_name, schema, **kw) + # consider only pk columns. This also avoids sorting the cached + # value returned by get_columns + cols = [col for col in cols if col.get("primary_key", 0) > 0] + cols.sort(key=lambda col: col.get("primary_key")) + pkeys = [col["name"] for col in cols] + + if pkeys: + return {"constrained_columns": pkeys, "name": constraint_name} + else: + return ReflectionDefaults.pk_constraint() + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + # sqlite makes this *extremely difficult*. + # First, use the pragma to get the actual FKs. + pragma_fks = self._get_table_pragma( + connection, "foreign_key_list", table_name, schema=schema + ) + + fks = {} + + for row in pragma_fks: + (numerical_id, rtbl, lcol, rcol) = (row[0], row[2], row[3], row[4]) + + if not rcol: + # no referred column, which means it was not named in the + # original DDL. The referred columns of the foreign key + # constraint are therefore the primary key of the referred + # table. + try: + referred_pk = self.get_pk_constraint( + connection, rtbl, schema=schema, **kw + ) + referred_columns = referred_pk["constrained_columns"] + except exc.NoSuchTableError: + # ignore not existing parents + referred_columns = [] + else: + # note we use this list only if this is the first column + # in the constraint. for subsequent columns we ignore the + # list and append "rcol" if present. + referred_columns = [] + + if self._broken_fk_pragma_quotes: + rtbl = re.sub(r"^[\"\[`\']|[\"\]`\']$", "", rtbl) + + if numerical_id in fks: + fk = fks[numerical_id] + else: + fk = fks[numerical_id] = { + "name": None, + "constrained_columns": [], + "referred_schema": schema, + "referred_table": rtbl, + "referred_columns": referred_columns, + "options": {}, + } + fks[numerical_id] = fk + + fk["constrained_columns"].append(lcol) + + if rcol: + fk["referred_columns"].append(rcol) + + def fk_sig(constrained_columns, referred_table, referred_columns): + return ( + tuple(constrained_columns) + + (referred_table,) + + tuple(referred_columns) + ) + + # then, parse the actual SQL and attempt to find DDL that matches + # the names as well. SQLite saves the DDL in whatever format + # it was typed in as, so need to be liberal here. + + keys_by_signature = { + fk_sig( + fk["constrained_columns"], + fk["referred_table"], + fk["referred_columns"], + ): fk + for fk in fks.values() + } + + table_data = self._get_table_sql(connection, table_name, schema=schema) + + def parse_fks(): + if table_data is None: + # system tables, etc. + return + + # note that we already have the FKs from PRAGMA above. This whole + # regexp thing is trying to locate additional detail about the + # FKs, namely the name of the constraint and other options. + # so parsing the columns is really about matching it up to what + # we already have. + FK_PATTERN = ( + r"(?:CONSTRAINT (\w+) +)?" + r"FOREIGN KEY *\( *(.+?) *\) +" + r'REFERENCES +(?:(?:"(.+?)")|([a-z0-9_]+)) *\( *((?:(?:"[^"]+"|[a-z0-9_]+) *(?:, *)?)+)\) *' # noqa: E501 + r"((?:ON (?:DELETE|UPDATE) " + r"(?:SET NULL|SET DEFAULT|CASCADE|RESTRICT|NO ACTION) *)*)" + r"((?:NOT +)?DEFERRABLE)?" + r"(?: +INITIALLY +(DEFERRED|IMMEDIATE))?" + ) + for match in re.finditer(FK_PATTERN, table_data, re.I): + ( + constraint_name, + constrained_columns, + referred_quoted_name, + referred_name, + referred_columns, + onupdatedelete, + deferrable, + initially, + ) = match.group(1, 2, 3, 4, 5, 6, 7, 8) + constrained_columns = list( + self._find_cols_in_sig(constrained_columns) + ) + if not referred_columns: + referred_columns = constrained_columns + else: + referred_columns = list( + self._find_cols_in_sig(referred_columns) + ) + referred_name = referred_quoted_name or referred_name + options = {} + + for token in re.split(r" *\bON\b *", onupdatedelete.upper()): + if token.startswith("DELETE"): + ondelete = token[6:].strip() + if ondelete and ondelete != "NO ACTION": + options["ondelete"] = ondelete + elif token.startswith("UPDATE"): + onupdate = token[6:].strip() + if onupdate and onupdate != "NO ACTION": + options["onupdate"] = onupdate + + if deferrable: + options["deferrable"] = "NOT" not in deferrable.upper() + if initially: + options["initially"] = initially.upper() + + yield ( + constraint_name, + constrained_columns, + referred_name, + referred_columns, + options, + ) + + fkeys = [] + + for ( + constraint_name, + constrained_columns, + referred_name, + referred_columns, + options, + ) in parse_fks(): + sig = fk_sig(constrained_columns, referred_name, referred_columns) + if sig not in keys_by_signature: + util.warn( + "WARNING: SQL-parsed foreign key constraint " + "'%s' could not be located in PRAGMA " + "foreign_keys for table %s" % (sig, table_name) + ) + continue + key = keys_by_signature.pop(sig) + key["name"] = constraint_name + key["options"] = options + fkeys.append(key) + # assume the remainders are the unnamed, inline constraints, just + # use them as is as it's extremely difficult to parse inline + # constraints + fkeys.extend(keys_by_signature.values()) + if fkeys: + return fkeys + else: + return ReflectionDefaults.foreign_keys() + + def _find_cols_in_sig(self, sig): + for match in re.finditer(r'(?:"(.+?)")|([a-z0-9_]+)', sig, re.I): + yield match.group(1) or match.group(2) + + @reflection.cache + def get_unique_constraints( + self, connection, table_name, schema=None, **kw + ): + auto_index_by_sig = {} + for idx in self.get_indexes( + connection, + table_name, + schema=schema, + include_auto_indexes=True, + **kw, + ): + if not idx["name"].startswith("sqlite_autoindex"): + continue + sig = tuple(idx["column_names"]) + auto_index_by_sig[sig] = idx + + table_data = self._get_table_sql( + connection, table_name, schema=schema, **kw + ) + unique_constraints = [] + + def parse_uqs(): + if table_data is None: + return + UNIQUE_PATTERN = r'(?:CONSTRAINT "?(.+?)"? +)?UNIQUE *\((.+?)\)' + INLINE_UNIQUE_PATTERN = ( + r'(?:(".+?")|(?:[\[`])?([a-z0-9_]+)(?:[\]`])?)[\t ]' + r"+[a-z0-9_ ]+?[\t ]+UNIQUE" + ) + + for match in re.finditer(UNIQUE_PATTERN, table_data, re.I): + name, cols = match.group(1, 2) + yield name, list(self._find_cols_in_sig(cols)) + + # we need to match inlines as well, as we seek to differentiate + # a UNIQUE constraint from a UNIQUE INDEX, even though these + # are kind of the same thing :) + for match in re.finditer(INLINE_UNIQUE_PATTERN, table_data, re.I): + cols = list( + self._find_cols_in_sig(match.group(1) or match.group(2)) + ) + yield None, cols + + for name, cols in parse_uqs(): + sig = tuple(cols) + if sig in auto_index_by_sig: + auto_index_by_sig.pop(sig) + parsed_constraint = {"name": name, "column_names": cols} + unique_constraints.append(parsed_constraint) + # NOTE: auto_index_by_sig might not be empty here, + # the PRIMARY KEY may have an entry. + if unique_constraints: + return unique_constraints + else: + return ReflectionDefaults.unique_constraints() + + @reflection.cache + def get_check_constraints(self, connection, table_name, schema=None, **kw): + table_data = self._get_table_sql( + connection, table_name, schema=schema, **kw + ) + + # NOTE NOTE NOTE + # DO NOT CHANGE THIS REGULAR EXPRESSION. There is no known way + # to parse CHECK constraints that contain newlines themselves using + # regular expressions, and the approach here relies upon each + # individual + # CHECK constraint being on a single line by itself. This + # necessarily makes assumptions as to how the CREATE TABLE + # was emitted. A more comprehensive DDL parsing solution would be + # needed to improve upon the current situation. See #11840 for + # background + CHECK_PATTERN = r"(?:CONSTRAINT (.+) +)?CHECK *\( *(.+) *\),? *" + cks = [] + + for match in re.finditer(CHECK_PATTERN, table_data or "", re.I): + + name = match.group(1) + + if name: + name = re.sub(r'^"|"$', "", name) + + cks.append({"sqltext": match.group(2), "name": name}) + cks.sort(key=lambda d: d["name"] or "~") # sort None as last + if cks: + return cks + else: + return ReflectionDefaults.check_constraints() + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + pragma_indexes = self._get_table_pragma( + connection, "index_list", table_name, schema=schema + ) + indexes = [] + + # regular expression to extract the filter predicate of a partial + # index. this could fail to extract the predicate correctly on + # indexes created like + # CREATE INDEX i ON t (col || ') where') WHERE col <> '' + # but as this function does not support expression-based indexes + # this case does not occur. + partial_pred_re = re.compile(r"\)\s+where\s+(.+)", re.IGNORECASE) + + if schema: + schema_expr = "%s." % self.identifier_preparer.quote_identifier( + schema + ) + else: + schema_expr = "" + + include_auto_indexes = kw.pop("include_auto_indexes", False) + for row in pragma_indexes: + # ignore implicit primary key index. + # https://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html + if not include_auto_indexes and row[1].startswith( + "sqlite_autoindex" + ): + continue + indexes.append( + dict( + name=row[1], + column_names=[], + unique=row[2], + dialect_options={}, + ) + ) + + # check partial indexes + if len(row) >= 5 and row[4]: + s = ( + "SELECT sql FROM %(schema)ssqlite_master " + "WHERE name = ? " + "AND type = 'index'" % {"schema": schema_expr} + ) + rs = connection.exec_driver_sql(s, (row[1],)) + index_sql = rs.scalar() + predicate_match = partial_pred_re.search(index_sql) + if predicate_match is None: + # unless the regex is broken this case shouldn't happen + # because we know this is a partial index, so the + # definition sql should match the regex + util.warn( + "Failed to look up filter predicate of " + "partial index %s" % row[1] + ) + else: + predicate = predicate_match.group(1) + indexes[-1]["dialect_options"]["sqlite_where"] = text( + predicate + ) + + # loop thru unique indexes to get the column names. + for idx in list(indexes): + pragma_index = self._get_table_pragma( + connection, "index_info", idx["name"], schema=schema + ) + + for row in pragma_index: + if row[2] is None: + util.warn( + "Skipped unsupported reflection of " + "expression-based index %s" % idx["name"] + ) + indexes.remove(idx) + break + else: + idx["column_names"].append(row[2]) + + indexes.sort(key=lambda d: d["name"] or "~") # sort None as last + if indexes: + return indexes + elif not self.has_table(connection, table_name, schema): + raise exc.NoSuchTableError( + f"{schema}.{table_name}" if schema else table_name + ) + else: + return ReflectionDefaults.indexes() + + def _is_sys_table(self, table_name): + return table_name in { + "sqlite_schema", + "sqlite_master", + "sqlite_temp_schema", + "sqlite_temp_master", + } + + @reflection.cache + def _get_table_sql(self, connection, table_name, schema=None, **kw): + if schema: + schema_expr = "%s." % ( + self.identifier_preparer.quote_identifier(schema) + ) + else: + schema_expr = "" + try: + s = ( + "SELECT sql FROM " + " (SELECT * FROM %(schema)ssqlite_master UNION ALL " + " SELECT * FROM %(schema)ssqlite_temp_master) " + "WHERE name = ? " + "AND type in ('table', 'view')" % {"schema": schema_expr} + ) + rs = connection.exec_driver_sql(s, (table_name,)) + except exc.DBAPIError: + s = ( + "SELECT sql FROM %(schema)ssqlite_master " + "WHERE name = ? " + "AND type in ('table', 'view')" % {"schema": schema_expr} + ) + rs = connection.exec_driver_sql(s, (table_name,)) + value = rs.scalar() + if value is None and not self._is_sys_table(table_name): + raise exc.NoSuchTableError(f"{schema_expr}{table_name}") + return value + + def _get_table_pragma(self, connection, pragma, table_name, schema=None): + quote = self.identifier_preparer.quote_identifier + if schema is not None: + statements = [f"PRAGMA {quote(schema)}."] + else: + # because PRAGMA looks in all attached databases if no schema + # given, need to specify "main" schema, however since we want + # 'temp' tables in the same namespace as 'main', need to run + # the PRAGMA twice + statements = ["PRAGMA main.", "PRAGMA temp."] + + qtable = quote(table_name) + for statement in statements: + statement = f"{statement}{pragma}({qtable})" + cursor = connection.exec_driver_sql(statement) + if not cursor._soft_closed: + # work around SQLite issue whereby cursor.description + # is blank when PRAGMA returns no rows: + # https://www.sqlite.org/cvstrac/tktview?tn=1884 + result = cursor.fetchall() + else: + result = [] + if result: + return result + else: + return [] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/dml.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/dml.py new file mode 100644 index 00000000..dcf5e448 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/dml.py @@ -0,0 +1,240 @@ +# dialects/sqlite/dml.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +from typing import Any + +from .._typing import _OnConflictIndexElementsT +from .._typing import _OnConflictIndexWhereT +from .._typing import _OnConflictSetT +from .._typing import _OnConflictWhereT +from ... import util +from ...sql import coercions +from ...sql import roles +from ...sql._typing import _DMLTableArgument +from ...sql.base import _exclusive_against +from ...sql.base import _generative +from ...sql.base import ColumnCollection +from ...sql.base import ReadOnlyColumnCollection +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...sql.elements import KeyedColumnElement +from ...sql.expression import alias +from ...util.typing import Self + +__all__ = ("Insert", "insert") + + +def insert(table: _DMLTableArgument) -> Insert: + """Construct a sqlite-specific variant :class:`_sqlite.Insert` + construct. + + .. container:: inherited_member + + The :func:`sqlalchemy.dialects.sqlite.insert` function creates + a :class:`sqlalchemy.dialects.sqlite.Insert`. This class is based + on the dialect-agnostic :class:`_sql.Insert` construct which may + be constructed using the :func:`_sql.insert` function in + SQLAlchemy Core. + + The :class:`_sqlite.Insert` construct includes additional methods + :meth:`_sqlite.Insert.on_conflict_do_update`, + :meth:`_sqlite.Insert.on_conflict_do_nothing`. + + """ + return Insert(table) + + +class Insert(StandardInsert): + """SQLite-specific implementation of INSERT. + + Adds methods for SQLite-specific syntaxes such as ON CONFLICT. + + The :class:`_sqlite.Insert` object is created using the + :func:`sqlalchemy.dialects.sqlite.insert` function. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`sqlite_on_conflict_insert` + + """ + + stringify_dialect = "sqlite" + inherit_cache = False + + @util.memoized_property + def excluded( + self, + ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]: + """Provide the ``excluded`` namespace for an ON CONFLICT statement + + SQLite's ON CONFLICT clause allows reference to the row that would + be inserted, known as ``excluded``. This attribute provides + all columns in this row to be referenceable. + + .. tip:: The :attr:`_sqlite.Insert.excluded` attribute is an instance + of :class:`_expression.ColumnCollection`, which provides an + interface the same as that of the :attr:`_schema.Table.c` + collection described at :ref:`metadata_tables_and_columns`. + With this collection, ordinary names are accessible like attributes + (e.g. ``stmt.excluded.some_column``), but special names and + dictionary method names should be accessed using indexed access, + such as ``stmt.excluded["column name"]`` or + ``stmt.excluded["values"]``. See the docstring for + :class:`_expression.ColumnCollection` for further examples. + + """ + return alias(self.table, name="excluded").columns + + _on_conflict_exclusive = _exclusive_against( + "_post_values_clause", + msgs={ + "_post_values_clause": "This Insert construct already has " + "an ON CONFLICT clause established" + }, + ) + + @_generative + @_on_conflict_exclusive + def on_conflict_do_update( + self, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + set_: _OnConflictSetT = None, + where: _OnConflictWhereT = None, + ) -> Self: + r""" + Specifies a DO UPDATE SET action for ON CONFLICT clause. + + :param index_elements: + A sequence consisting of string column names, :class:`_schema.Column` + objects, or other column expression objects that will be used + to infer a target index or unique constraint. + + :param index_where: + Additional WHERE criterion that can be used to infer a + conditional target index. + + :param set\_: + A dictionary or other mapping object + where the keys are either names of columns in the target table, + or :class:`_schema.Column` objects or other ORM-mapped columns + matching that of the target table, and expressions or literals + as values, specifying the ``SET`` actions to take. + + .. versionadded:: 1.4 The + :paramref:`_sqlite.Insert.on_conflict_do_update.set_` + parameter supports :class:`_schema.Column` objects from the target + :class:`_schema.Table` as keys. + + .. warning:: This dictionary does **not** take into account + Python-specified default UPDATE values or generation functions, + e.g. those specified using :paramref:`_schema.Column.onupdate`. + These values will not be exercised for an ON CONFLICT style of + UPDATE, unless they are manually specified in the + :paramref:`.Insert.on_conflict_do_update.set_` dictionary. + + :param where: + Optional argument. If present, can be a literal SQL + string or an acceptable expression for a ``WHERE`` clause + that restricts the rows affected by ``DO UPDATE SET``. Rows + not meeting the ``WHERE`` condition will not be updated + (effectively a ``DO NOTHING`` for those rows). + + """ + + self._post_values_clause = OnConflictDoUpdate( + index_elements, index_where, set_, where + ) + return self + + @_generative + @_on_conflict_exclusive + def on_conflict_do_nothing( + self, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + ) -> Self: + """ + Specifies a DO NOTHING action for ON CONFLICT clause. + + :param index_elements: + A sequence consisting of string column names, :class:`_schema.Column` + objects, or other column expression objects that will be used + to infer a target index or unique constraint. + + :param index_where: + Additional WHERE criterion that can be used to infer a + conditional target index. + + """ + + self._post_values_clause = OnConflictDoNothing( + index_elements, index_where + ) + return self + + +class OnConflictClause(ClauseElement): + stringify_dialect = "sqlite" + + constraint_target: None + inferred_target_elements: _OnConflictIndexElementsT + inferred_target_whereclause: _OnConflictIndexWhereT + + def __init__( + self, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + ): + if index_elements is not None: + self.constraint_target = None + self.inferred_target_elements = index_elements + self.inferred_target_whereclause = index_where + else: + self.constraint_target = self.inferred_target_elements = ( + self.inferred_target_whereclause + ) = None + + +class OnConflictDoNothing(OnConflictClause): + __visit_name__ = "on_conflict_do_nothing" + + +class OnConflictDoUpdate(OnConflictClause): + __visit_name__ = "on_conflict_do_update" + + def __init__( + self, + index_elements: _OnConflictIndexElementsT = None, + index_where: _OnConflictIndexWhereT = None, + set_: _OnConflictSetT = None, + where: _OnConflictWhereT = None, + ): + super().__init__( + index_elements=index_elements, + index_where=index_where, + ) + + if isinstance(set_, dict): + if not set_: + raise ValueError("set parameter dictionary must not be empty") + elif isinstance(set_, ColumnCollection): + set_ = dict(set_) + else: + raise ValueError( + "set parameter must be a non-empty dictionary " + "or a ColumnCollection such as the `.c.` collection " + "of a Table object" + ) + self.update_values_to_set = [ + (coercions.expect(roles.DMLColumnRole, key), value) + for key, value in set_.items() + ] + self.update_whereclause = where diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/json.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/json.py new file mode 100644 index 00000000..ec298029 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/json.py @@ -0,0 +1,92 @@ +# dialects/sqlite/json.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from ... import types as sqltypes + + +class JSON(sqltypes.JSON): + """SQLite JSON type. + + SQLite supports JSON as of version 3.9 through its JSON1_ extension. Note + that JSON1_ is a + `loadable extension `_ and as such + may not be available, or may require run-time loading. + + :class:`_sqlite.JSON` is used automatically whenever the base + :class:`_types.JSON` datatype is used against a SQLite backend. + + .. seealso:: + + :class:`_types.JSON` - main documentation for the generic + cross-platform JSON datatype. + + The :class:`_sqlite.JSON` type supports persistence of JSON values + as well as the core index operations provided by :class:`_types.JSON` + datatype, by adapting the operations to render the ``JSON_EXTRACT`` + function wrapped in the ``JSON_QUOTE`` function at the database level. + Extracted values are quoted in order to ensure that the results are + always JSON string values. + + + .. versionadded:: 1.3 + + + .. _JSON1: https://www.sqlite.org/json1.html + + """ + + +# Note: these objects currently match exactly those of MySQL, however since +# these are not generalizable to all JSON implementations, remain separately +# implemented for each dialect. +class _FormatTypeMixin: + def _format_value(self, value): + raise NotImplementedError() + + def bind_processor(self, dialect): + super_proc = self.string_bind_processor(dialect) + + def process(value): + value = self._format_value(value) + if super_proc: + value = super_proc(value) + return value + + return process + + def literal_processor(self, dialect): + super_proc = self.string_literal_processor(dialect) + + def process(value): + value = self._format_value(value) + if super_proc: + value = super_proc(value) + return value + + return process + + +class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): + def _format_value(self, value): + if isinstance(value, int): + value = "$[%s]" % value + else: + value = '$."%s"' % value + return value + + +class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): + def _format_value(self, value): + return "$%s" % ( + "".join( + [ + "[%s]" % elem if isinstance(elem, int) else '."%s"' % elem + for elem in value + ] + ) + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/provision.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/provision.py new file mode 100644 index 00000000..f18568b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/provision.py @@ -0,0 +1,198 @@ +# dialects/sqlite/provision.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +import os +import re + +from ... import exc +from ...engine import url as sa_url +from ...testing.provision import create_db +from ...testing.provision import drop_db +from ...testing.provision import follower_url_from_main +from ...testing.provision import generate_driver_url +from ...testing.provision import log +from ...testing.provision import post_configure_engine +from ...testing.provision import run_reap_dbs +from ...testing.provision import stop_test_class_outside_fixtures +from ...testing.provision import temp_table_keyword_args +from ...testing.provision import upsert + + +# TODO: I can't get this to build dynamically with pytest-xdist procs +_drivernames = { + "pysqlite", + "aiosqlite", + "pysqlcipher", + "pysqlite_numeric", + "pysqlite_dollar", +} + + +def _format_url(url, driver, ident): + """given a sqlite url + desired driver + ident, make a canonical + URL out of it + + """ + url = sa_url.make_url(url) + + if driver is None: + driver = url.get_driver_name() + + filename = url.database + + needs_enc = driver == "pysqlcipher" + name_token = None + + if filename and filename != ":memory:": + assert "test_schema" not in filename + tokens = re.split(r"[_\.]", filename) + + new_filename = f"{driver}" + + for token in tokens: + if token in _drivernames: + if driver is None: + driver = token + continue + elif token in ("db", "enc"): + continue + elif name_token is None: + name_token = token.strip("_") + + assert name_token, f"sqlite filename has no name token: {url.database}" + + new_filename = f"{name_token}_{driver}" + if ident: + new_filename += f"_{ident}" + new_filename += ".db" + if needs_enc: + new_filename += ".enc" + url = url.set(database=new_filename) + + if needs_enc: + url = url.set(password="test") + + url = url.set(drivername="sqlite+%s" % (driver,)) + + return url + + +@generate_driver_url.for_db("sqlite") +def generate_driver_url(url, driver, query_str): + url = _format_url(url, driver, None) + + try: + url.get_dialect() + except exc.NoSuchModuleError: + return None + else: + return url + + +@follower_url_from_main.for_db("sqlite") +def _sqlite_follower_url_from_main(url, ident): + return _format_url(url, None, ident) + + +@post_configure_engine.for_db("sqlite") +def _sqlite_post_configure_engine(url, engine, follower_ident): + from sqlalchemy import event + + if follower_ident: + attach_path = f"{follower_ident}_{engine.driver}_test_schema.db" + else: + attach_path = f"{engine.driver}_test_schema.db" + + @event.listens_for(engine, "connect") + def connect(dbapi_connection, connection_record): + # use file DBs in all cases, memory acts kind of strangely + # as an attached + + # NOTE! this has to be done *per connection*. New sqlite connection, + # as we get with say, QueuePool, the attaches are gone. + # so schemes to delete those attached files have to be done at the + # filesystem level and not rely upon what attachments are in a + # particular SQLite connection + dbapi_connection.execute( + f'ATTACH DATABASE "{attach_path}" AS test_schema' + ) + + @event.listens_for(engine, "engine_disposed") + def dispose(engine): + """most databases should be dropped using + stop_test_class_outside_fixtures + + however a few tests like AttachedDBTest might not get triggered on + that main hook + + """ + + if os.path.exists(attach_path): + os.remove(attach_path) + + filename = engine.url.database + + if filename and filename != ":memory:" and os.path.exists(filename): + os.remove(filename) + + +@create_db.for_db("sqlite") +def _sqlite_create_db(cfg, eng, ident): + pass + + +@drop_db.for_db("sqlite") +def _sqlite_drop_db(cfg, eng, ident): + _drop_dbs_w_ident(eng.url.database, eng.driver, ident) + + +def _drop_dbs_w_ident(databasename, driver, ident): + for path in os.listdir("."): + fname, ext = os.path.split(path) + if ident in fname and ext in [".db", ".db.enc"]: + log.info("deleting SQLite database file: %s", path) + os.remove(path) + + +@stop_test_class_outside_fixtures.for_db("sqlite") +def stop_test_class_outside_fixtures(config, db, cls): + db.dispose() + + +@temp_table_keyword_args.for_db("sqlite") +def _sqlite_temp_table_keyword_args(cfg, eng): + return {"prefixes": ["TEMPORARY"]} + + +@run_reap_dbs.for_db("sqlite") +def _reap_sqlite_dbs(url, idents): + log.info("db reaper connecting to %r", url) + log.info("identifiers in file: %s", ", ".join(idents)) + url = sa_url.make_url(url) + for ident in idents: + for drivername in _drivernames: + _drop_dbs_w_ident(url.database, drivername, ident) + + +@upsert.for_db("sqlite") +def _upsert( + cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False +): + from sqlalchemy.dialects.sqlite import insert + + stmt = insert(table) + + if set_lambda: + stmt = stmt.on_conflict_do_update(set_=set_lambda(stmt.excluded)) + else: + stmt = stmt.on_conflict_do_nothing() + + stmt = stmt.returning( + *returning, sort_by_parameter_order=sort_by_parameter_order + ) + return stmt diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py new file mode 100644 index 00000000..388a4dff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -0,0 +1,155 @@ +# dialects/sqlite/pysqlcipher.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +""" +.. dialect:: sqlite+pysqlcipher + :name: pysqlcipher + :dbapi: sqlcipher 3 or pysqlcipher + :connectstring: sqlite+pysqlcipher://:passphrase@/file_path[?kdf_iter=] + + Dialect for support of DBAPIs that make use of the + `SQLCipher `_ backend. + + +Driver +------ + +Current dialect selection logic is: + +* If the :paramref:`_sa.create_engine.module` parameter supplies a DBAPI module, + that module is used. +* Otherwise for Python 3, choose https://pypi.org/project/sqlcipher3/ +* If not available, fall back to https://pypi.org/project/pysqlcipher3/ +* For Python 2, https://pypi.org/project/pysqlcipher/ is used. + +.. warning:: The ``pysqlcipher3`` and ``pysqlcipher`` DBAPI drivers are no + longer maintained; the ``sqlcipher3`` driver as of this writing appears + to be current. For future compatibility, any pysqlcipher-compatible DBAPI + may be used as follows:: + + import sqlcipher_compatible_driver + + from sqlalchemy import create_engine + + e = create_engine( + "sqlite+pysqlcipher://:password@/dbname.db", + module=sqlcipher_compatible_driver + ) + +These drivers make use of the SQLCipher engine. This system essentially +introduces new PRAGMA commands to SQLite which allows the setting of a +passphrase and other encryption parameters, allowing the database file to be +encrypted. + + +Connect Strings +--------------- + +The format of the connect string is in every way the same as that +of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the +"password" field is now accepted, which should contain a passphrase:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db') + +For an absolute file path, two leading slashes should be used for the +database name:: + + e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db') + +A selection of additional encryption-related pragmas supported by SQLCipher +as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed +in the query string, and will result in that PRAGMA being called for each +new connection. Currently, ``cipher``, ``kdf_iter`` +``cipher_page_size`` and ``cipher_use_hmac`` are supported:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000') + +.. warning:: Previous versions of sqlalchemy did not take into consideration + the encryption-related pragmas passed in the url string, that were silently + ignored. This may cause errors when opening files saved by a + previous sqlalchemy version if the encryption options do not match. + + +Pooling Behavior +---------------- + +The driver makes a change to the default pool behavior of pysqlite +as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver +has been observed to be significantly slower on connection than the +pysqlite driver, most likely due to the encryption overhead, so the +dialect here defaults to using the :class:`.SingletonThreadPool` +implementation, +instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool +implementation is entirely configurable using the +:paramref:`_sa.create_engine.poolclass` parameter; the :class:`. +StaticPool` may +be more feasible for single-threaded use, or :class:`.NullPool` may be used +to prevent unencrypted connections from being held open for long periods of +time, at the expense of slower startup time for new connections. + + +""" # noqa + +from .pysqlite import SQLiteDialect_pysqlite +from ... import pool + + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver = "pysqlcipher" + supports_statement_cache = True + + pragmas = ("kdf_iter", "cipher", "cipher_page_size", "cipher_use_hmac") + + @classmethod + def import_dbapi(cls): + try: + import sqlcipher3 as sqlcipher + except ImportError: + pass + else: + return sqlcipher + + from pysqlcipher3 import dbapi2 as sqlcipher + + return sqlcipher + + @classmethod + def get_pool_class(cls, url): + return pool.SingletonThreadPool + + def on_connect_url(self, url): + super_on_connect = super().on_connect_url(url) + + # pull the info we need from the URL early. Even though URL + # is immutable, we don't want any in-place changes to the URL + # to affect things + passphrase = url.password or "" + url_query = dict(url.query) + + def on_connect(conn): + cursor = conn.cursor() + cursor.execute('pragma key="%s"' % passphrase) + for prag in self.pragmas: + value = url_query.get(prag, None) + if value is not None: + cursor.execute('pragma %s="%s"' % (prag, value)) + cursor.close() + + if super_on_connect: + super_on_connect(conn) + + return on_connect + + def create_connect_args(self, url): + plain_url = url._replace(password=None) + plain_url = plain_url.difference_update_query(self.pragmas) + return super().create_connect_args(plain_url) + + +dialect = SQLiteDialect_pysqlcipher diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py new file mode 100644 index 00000000..69a902c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py @@ -0,0 +1,756 @@ +# dialects/sqlite/pysqlite.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +r""" +.. dialect:: sqlite+pysqlite + :name: pysqlite + :dbapi: sqlite3 + :connectstring: sqlite+pysqlite:///file_path + :url: https://docs.python.org/library/sqlite3.html + + Note that ``pysqlite`` is the same driver as the ``sqlite3`` + module included with the Python distribution. + +Driver +------ + +The ``sqlite3`` Python DBAPI is standard on all modern Python versions; +for cPython and Pypy, no additional installation is necessary. + + +Connect Strings +--------------- + +The file specification for the SQLite database is taken as the "database" +portion of the URL. Note that the format of a SQLAlchemy url is:: + + driver://user:pass@host/database + +This means that the actual filename to be used starts with the characters to +the **right** of the third slash. So connecting to a relative filepath +looks like:: + + # relative path + e = create_engine('sqlite:///path/to/database.db') + +An absolute path, which is denoted by starting with a slash, means you +need **four** slashes:: + + # absolute path + e = create_engine('sqlite:////path/to/database.db') + +To use a Windows path, regular drive specifications and backslashes can be +used. Double backslashes are probably needed:: + + # absolute path on Windows + e = create_engine('sqlite:///C:\\path\\to\\database.db') + +To use sqlite ``:memory:`` database specify it as the filename using +``sqlite:///:memory:``. It's also the default if no filepath is +present, specifying only ``sqlite://`` and nothing else:: + + # in-memory database (note three slashes) + e = create_engine('sqlite:///:memory:') + # also in-memory database + e2 = create_engine('sqlite://') + +.. _pysqlite_uri_connections: + +URI Connections +^^^^^^^^^^^^^^^ + +Modern versions of SQLite support an alternative system of connecting using a +`driver level URI `_, which has the advantage +that additional driver-level arguments can be passed including options such as +"read only". The Python sqlite3 driver supports this mode under modern Python +3 versions. The SQLAlchemy pysqlite driver supports this mode of use by +specifying "uri=true" in the URL query string. The SQLite-level "URI" is kept +as the "database" portion of the SQLAlchemy url (that is, following a slash):: + + e = create_engine("sqlite:///file:path/to/database?mode=ro&uri=true") + +.. note:: The "uri=true" parameter must appear in the **query string** + of the URL. It will not currently work as expected if it is only + present in the :paramref:`_sa.create_engine.connect_args` + parameter dictionary. + +The logic reconciles the simultaneous presence of SQLAlchemy's query string and +SQLite's query string by separating out the parameters that belong to the +Python sqlite3 driver vs. those that belong to the SQLite URI. This is +achieved through the use of a fixed list of parameters known to be accepted by +the Python side of the driver. For example, to include a URL that indicates +the Python sqlite3 "timeout" and "check_same_thread" parameters, along with the +SQLite "mode" and "nolock" parameters, they can all be passed together on the +query string:: + + e = create_engine( + "sqlite:///file:path/to/database?" + "check_same_thread=true&timeout=10&mode=ro&nolock=1&uri=true" + ) + +Above, the pysqlite / sqlite3 DBAPI would be passed arguments as:: + + sqlite3.connect( + "file:path/to/database?mode=ro&nolock=1", + check_same_thread=True, timeout=10, uri=True + ) + +Regarding future parameters added to either the Python or native drivers. new +parameter names added to the SQLite URI scheme should be automatically +accommodated by this scheme. New parameter names added to the Python driver +side can be accommodated by specifying them in the +:paramref:`_sa.create_engine.connect_args` dictionary, +until dialect support is +added by SQLAlchemy. For the less likely case that the native SQLite driver +adds a new parameter name that overlaps with one of the existing, known Python +driver parameters (such as "timeout" perhaps), SQLAlchemy's dialect would +require adjustment for the URL scheme to continue to support this. + +As is always the case for all SQLAlchemy dialects, the entire "URL" process +can be bypassed in :func:`_sa.create_engine` through the use of the +:paramref:`_sa.create_engine.creator` +parameter which allows for a custom callable +that creates a Python sqlite3 driver level connection directly. + +.. versionadded:: 1.3.9 + +.. seealso:: + + `Uniform Resource Identifiers `_ - in + the SQLite documentation + +.. _pysqlite_regexp: + +Regular Expression Support +--------------------------- + +.. versionadded:: 1.4 + +Support for the :meth:`_sql.ColumnOperators.regexp_match` operator is provided +using Python's re.search_ function. SQLite itself does not include a working +regular expression operator; instead, it includes a non-implemented placeholder +operator ``REGEXP`` that calls a user-defined function that must be provided. + +SQLAlchemy's implementation makes use of the pysqlite create_function_ hook +as follows:: + + + def regexp(a, b): + return re.search(a, b) is not None + + sqlite_connection.create_function( + "regexp", 2, regexp, + ) + +There is currently no support for regular expression flags as a separate +argument, as these are not supported by SQLite's REGEXP operator, however these +may be included inline within the regular expression string. See `Python regular expressions`_ for +details. + +.. seealso:: + + `Python regular expressions`_: Documentation for Python's regular expression syntax. + +.. _create_function: https://docs.python.org/3/library/sqlite3.html#sqlite3.Connection.create_function + +.. _re.search: https://docs.python.org/3/library/re.html#re.search + +.. _Python regular expressions: https://docs.python.org/3/library/re.html#re.search + + + +Compatibility with sqlite3 "native" date and datetime types +----------------------------------------------------------- + +The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and +sqlite3.PARSE_COLNAMES options, which have the effect of any column +or expression explicitly cast as "date" or "timestamp" will be converted +to a Python date or datetime object. The date and datetime types provided +with the pysqlite dialect are not currently compatible with these options, +since they render the ISO date/datetime including microseconds, which +pysqlite's driver does not. Additionally, SQLAlchemy does not at +this time automatically render the "cast" syntax required for the +freestanding functions "current_timestamp" and "current_date" to return +datetime/date types natively. Unfortunately, pysqlite +does not provide the standard DBAPI types in ``cursor.description``, +leaving SQLAlchemy with no way to detect these types on the fly +without expensive per-row type checks. + +Keeping in mind that pysqlite's parsing option is not recommended, +nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES +can be forced if one configures "native_datetime=True" on create_engine():: + + engine = create_engine('sqlite://', + connect_args={'detect_types': + sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, + native_datetime=True + ) + +With this flag enabled, the DATE and TIMESTAMP types (but note - not the +DATETIME or TIME types...confused yet ?) will not perform any bind parameter +or result processing. Execution of "func.current_date()" will return a string. +"func.current_timestamp()" is registered as returning a DATETIME type in +SQLAlchemy, so this function still receives SQLAlchemy-level result +processing. + +.. _pysqlite_threading_pooling: + +Threading/Pooling Behavior +--------------------------- + +The ``sqlite3`` DBAPI by default prohibits the use of a particular connection +in a thread which is not the one in which it was created. As SQLite has +matured, it's behavior under multiple threads has improved, and even includes +options for memory only databases to be used in multiple threads. + +The thread prohibition is known as "check same thread" and may be controlled +using the ``sqlite3`` parameter ``check_same_thread``, which will disable or +enable this check. SQLAlchemy's default behavior here is to set +``check_same_thread`` to ``False`` automatically whenever a file-based database +is in use, to establish compatibility with the default pool class +:class:`.QueuePool`. + +The SQLAlchemy ``pysqlite`` DBAPI establishes the connection pool differently +based on the kind of SQLite database that's requested: + +* When a ``:memory:`` SQLite database is specified, the dialect by default + will use :class:`.SingletonThreadPool`. This pool maintains a single + connection per thread, so that all access to the engine within the current + thread use the same ``:memory:`` database - other threads would access a + different ``:memory:`` database. The ``check_same_thread`` parameter + defaults to ``True``. +* When a file-based database is specified, the dialect will use + :class:`.QueuePool` as the source of connections. at the same time, + the ``check_same_thread`` flag is set to False by default unless overridden. + + .. versionchanged:: 2.0 + + SQLite file database engines now use :class:`.QueuePool` by default. + Previously, :class:`.NullPool` were used. The :class:`.NullPool` class + may be used by specifying it via the + :paramref:`_sa.create_engine.poolclass` parameter. + +Disabling Connection Pooling for File Databases +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Pooling may be disabled for a file based database by specifying the +:class:`.NullPool` implementation for the :func:`_sa.create_engine.poolclass` +parameter:: + + from sqlalchemy import NullPool + engine = create_engine("sqlite:///myfile.db", poolclass=NullPool) + +It's been observed that the :class:`.NullPool` implementation incurs an +extremely small performance overhead for repeated checkouts due to the lack of +connection re-use implemented by :class:`.QueuePool`. However, it still +may be beneficial to use this class if the application is experiencing +issues with files being locked. + +Using a Memory Database in Multiple Threads +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To use a ``:memory:`` database in a multithreaded scenario, the same +connection object must be shared among threads, since the database exists +only within the scope of that connection. The +:class:`.StaticPool` implementation will maintain a single connection +globally, and the ``check_same_thread`` flag can be passed to Pysqlite +as ``False``:: + + from sqlalchemy.pool import StaticPool + engine = create_engine('sqlite://', + connect_args={'check_same_thread':False}, + poolclass=StaticPool) + +Note that using a ``:memory:`` database in multiple threads requires a recent +version of SQLite. + +Using Temporary Tables with SQLite +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Due to the way SQLite deals with temporary tables, if you wish to use a +temporary table in a file-based SQLite database across multiple checkouts +from the connection pool, such as when using an ORM :class:`.Session` where +the temporary table should continue to remain after :meth:`.Session.commit` or +:meth:`.Session.rollback` is called, a pool which maintains a single +connection must be used. Use :class:`.SingletonThreadPool` if the scope is +only needed within the current thread, or :class:`.StaticPool` is scope is +needed within multiple threads for this case:: + + # maintain the same connection per thread + from sqlalchemy.pool import SingletonThreadPool + engine = create_engine('sqlite:///mydb.db', + poolclass=SingletonThreadPool) + + + # maintain the same connection across all threads + from sqlalchemy.pool import StaticPool + engine = create_engine('sqlite:///mydb.db', + poolclass=StaticPool) + +Note that :class:`.SingletonThreadPool` should be configured for the number +of threads that are to be used; beyond that number, connections will be +closed out in a non deterministic way. + + +Dealing with Mixed String / Binary Columns +------------------------------------------------------ + +The SQLite database is weakly typed, and as such it is possible when using +binary values, which in Python are represented as ``b'some string'``, that a +particular SQLite database can have data values within different rows where +some of them will be returned as a ``b''`` value by the Pysqlite driver, and +others will be returned as Python strings, e.g. ``''`` values. This situation +is not known to occur if the SQLAlchemy :class:`.LargeBinary` datatype is used +consistently, however if a particular SQLite database has data that was +inserted using the Pysqlite driver directly, or when using the SQLAlchemy +:class:`.String` type which was later changed to :class:`.LargeBinary`, the +table will not be consistently readable because SQLAlchemy's +:class:`.LargeBinary` datatype does not handle strings so it has no way of +"encoding" a value that is in string format. + +To deal with a SQLite table that has mixed string / binary data in the +same column, use a custom type that will check each row individually:: + + from sqlalchemy import String + from sqlalchemy import TypeDecorator + + class MixedBinary(TypeDecorator): + impl = String + cache_ok = True + + def process_result_value(self, value, dialect): + if isinstance(value, str): + value = bytes(value, 'utf-8') + elif value is not None: + value = bytes(value) + + return value + +Then use the above ``MixedBinary`` datatype in the place where +:class:`.LargeBinary` would normally be used. + +.. _pysqlite_serializable: + +Serializable isolation / Savepoints / Transactional DDL +------------------------------------------------------- + +In the section :ref:`sqlite_concurrency`, we refer to the pysqlite +driver's assortment of issues that prevent several features of SQLite +from working correctly. The pysqlite DBAPI driver has several +long-standing bugs which impact the correctness of its transactional +behavior. In its default mode of operation, SQLite features such as +SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are +non-functional, and in order to use these features, workarounds must +be taken. + +The issue is essentially that the driver attempts to second-guess the user's +intent, failing to start transactions and sometimes ending them prematurely, in +an effort to minimize the SQLite databases's file locking behavior, even +though SQLite itself uses "shared" locks for read-only activities. + +SQLAlchemy chooses to not alter this behavior by default, as it is the +long-expected behavior of the pysqlite driver; if and when the pysqlite +driver attempts to repair these issues, that will be more of a driver towards +defaults for SQLAlchemy. + +The good news is that with a few events, we can implement transactional +support fully, by disabling pysqlite's feature entirely and emitting BEGIN +ourselves. This is achieved using two event listeners:: + + from sqlalchemy import create_engine, event + + engine = create_engine("sqlite:///myfile.db") + + @event.listens_for(engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable pysqlite's emitting of the BEGIN statement entirely. + # also stops it from emitting COMMIT before any DDL. + dbapi_connection.isolation_level = None + + @event.listens_for(engine, "begin") + def do_begin(conn): + # emit our own BEGIN + conn.exec_driver_sql("BEGIN") + +.. warning:: When using the above recipe, it is advised to not use the + :paramref:`.Connection.execution_options.isolation_level` setting on + :class:`_engine.Connection` and :func:`_sa.create_engine` + with the SQLite driver, + as this function necessarily will also alter the ".isolation_level" setting. + + +Above, we intercept a new pysqlite connection and disable any transactional +integration. Then, at the point at which SQLAlchemy knows that transaction +scope is to begin, we emit ``"BEGIN"`` ourselves. + +When we take control of ``"BEGIN"``, we can also control directly SQLite's +locking modes, introduced at +`BEGIN TRANSACTION `_, +by adding the desired locking mode to our ``"BEGIN"``:: + + @event.listens_for(engine, "begin") + def do_begin(conn): + conn.exec_driver_sql("BEGIN EXCLUSIVE") + +.. seealso:: + + `BEGIN TRANSACTION `_ - + on the SQLite site + + `sqlite3 SELECT does not BEGIN a transaction `_ - + on the Python bug tracker + + `sqlite3 module breaks transactions and potentially corrupts data `_ - + on the Python bug tracker + +.. _pysqlite_udfs: + +User-Defined Functions +---------------------- + +pysqlite supports a `create_function() `_ +method that allows us to create our own user-defined functions (UDFs) in Python and use them directly in SQLite queries. +These functions are registered with a specific DBAPI Connection. + +SQLAlchemy uses connection pooling with file-based SQLite databases, so we need to ensure that the UDF is attached to the +connection when it is created. That is accomplished with an event listener:: + + from sqlalchemy import create_engine + from sqlalchemy import event + from sqlalchemy import text + + + def udf(): + return "udf-ok" + + + engine = create_engine("sqlite:///./db_file") + + + @event.listens_for(engine, "connect") + def connect(conn, rec): + conn.create_function("udf", 0, udf) + + + for i in range(5): + with engine.connect() as conn: + print(conn.scalar(text("SELECT UDF()"))) + + +""" # noqa + +import math +import os +import re + +from .base import DATE +from .base import DATETIME +from .base import SQLiteDialect +from ... import exc +from ... import pool +from ... import types as sqltypes +from ... import util + + +class _SQLite_pysqliteTimeStamp(DATETIME): + def bind_processor(self, dialect): + if dialect.native_datetime: + return None + else: + return DATETIME.bind_processor(self, dialect) + + def result_processor(self, dialect, coltype): + if dialect.native_datetime: + return None + else: + return DATETIME.result_processor(self, dialect, coltype) + + +class _SQLite_pysqliteDate(DATE): + def bind_processor(self, dialect): + if dialect.native_datetime: + return None + else: + return DATE.bind_processor(self, dialect) + + def result_processor(self, dialect, coltype): + if dialect.native_datetime: + return None + else: + return DATE.result_processor(self, dialect, coltype) + + +class SQLiteDialect_pysqlite(SQLiteDialect): + default_paramstyle = "qmark" + supports_statement_cache = True + returns_native_bytes = True + + colspecs = util.update_copy( + SQLiteDialect.colspecs, + { + sqltypes.Date: _SQLite_pysqliteDate, + sqltypes.TIMESTAMP: _SQLite_pysqliteTimeStamp, + }, + ) + + description_encoding = None + + driver = "pysqlite" + + @classmethod + def import_dbapi(cls): + from sqlite3 import dbapi2 as sqlite + + return sqlite + + @classmethod + def _is_url_file_db(cls, url): + if (url.database and url.database != ":memory:") and ( + url.query.get("mode", None) != "memory" + ): + return True + else: + return False + + @classmethod + def get_pool_class(cls, url): + if cls._is_url_file_db(url): + return pool.QueuePool + else: + return pool.SingletonThreadPool + + def _get_server_version_info(self, connection): + return self.dbapi.sqlite_version_info + + _isolation_lookup = SQLiteDialect._isolation_lookup.union( + { + "AUTOCOMMIT": None, + } + ) + + def set_isolation_level(self, dbapi_connection, level): + if level == "AUTOCOMMIT": + dbapi_connection.isolation_level = None + else: + dbapi_connection.isolation_level = "" + return super().set_isolation_level(dbapi_connection, level) + + def on_connect(self): + def regexp(a, b): + if b is None: + return None + return re.search(a, b) is not None + + if util.py38 and self._get_server_version_info(None) >= (3, 9): + # sqlite must be greater than 3.8.3 for deterministic=True + # https://docs.python.org/3/library/sqlite3.html#sqlite3.Connection.create_function + # the check is more conservative since there were still issues + # with following 3.8 sqlite versions + create_func_kw = {"deterministic": True} + else: + create_func_kw = {} + + def set_regexp(dbapi_connection): + dbapi_connection.create_function( + "regexp", 2, regexp, **create_func_kw + ) + + def floor_func(dbapi_connection): + # NOTE: floor is optionally present in sqlite 3.35+ , however + # as it is normally non-present we deliver floor() unconditionally + # for now. + # https://www.sqlite.org/lang_mathfunc.html + dbapi_connection.create_function( + "floor", 1, math.floor, **create_func_kw + ) + + fns = [set_regexp, floor_func] + + def connect(conn): + for fn in fns: + fn(conn) + + return connect + + def create_connect_args(self, url): + if url.username or url.password or url.host or url.port: + raise exc.ArgumentError( + "Invalid SQLite URL: %s\n" + "Valid SQLite URL forms are:\n" + " sqlite:///:memory: (or, sqlite://)\n" + " sqlite:///relative/path/to/file.db\n" + " sqlite:////absolute/path/to/file.db" % (url,) + ) + + # theoretically, this list can be augmented, at least as far as + # parameter names accepted by sqlite3/pysqlite, using + # inspect.getfullargspec(). for the moment this seems like overkill + # as these parameters don't change very often, and as always, + # parameters passed to connect_args will always go to the + # sqlite3/pysqlite driver. + pysqlite_args = [ + ("uri", bool), + ("timeout", float), + ("isolation_level", str), + ("detect_types", int), + ("check_same_thread", bool), + ("cached_statements", int), + ] + opts = url.query + pysqlite_opts = {} + for key, type_ in pysqlite_args: + util.coerce_kw_type(opts, key, type_, dest=pysqlite_opts) + + if pysqlite_opts.get("uri", False): + uri_opts = dict(opts) + # here, we are actually separating the parameters that go to + # sqlite3/pysqlite vs. those that go the SQLite URI. What if + # two names conflict? again, this seems to be not the case right + # now, and in the case that new names are added to + # either side which overlap, again the sqlite3/pysqlite parameters + # can be passed through connect_args instead of in the URL. + # If SQLite native URIs add a parameter like "timeout" that + # we already have listed here for the python driver, then we need + # to adjust for that here. + for key, type_ in pysqlite_args: + uri_opts.pop(key, None) + filename = url.database + if uri_opts: + # sorting of keys is for unit test support + filename += "?" + ( + "&".join( + "%s=%s" % (key, uri_opts[key]) + for key in sorted(uri_opts) + ) + ) + else: + filename = url.database or ":memory:" + if filename != ":memory:": + filename = os.path.abspath(filename) + + pysqlite_opts.setdefault( + "check_same_thread", not self._is_url_file_db(url) + ) + + return ([filename], pysqlite_opts) + + def is_disconnect(self, e, connection, cursor): + return isinstance( + e, self.dbapi.ProgrammingError + ) and "Cannot operate on a closed database." in str(e) + + +dialect = SQLiteDialect_pysqlite + + +class _SQLiteDialect_pysqlite_numeric(SQLiteDialect_pysqlite): + """numeric dialect for testing only + + internal use only. This dialect is **NOT** supported by SQLAlchemy + and may change at any time. + + """ + + supports_statement_cache = True + default_paramstyle = "numeric" + driver = "pysqlite_numeric" + + _first_bind = ":1" + _not_in_statement_regexp = None + + def __init__(self, *arg, **kw): + kw.setdefault("paramstyle", "numeric") + super().__init__(*arg, **kw) + + def create_connect_args(self, url): + arg, opts = super().create_connect_args(url) + opts["factory"] = self._fix_sqlite_issue_99953() + return arg, opts + + def _fix_sqlite_issue_99953(self): + import sqlite3 + + first_bind = self._first_bind + if self._not_in_statement_regexp: + nis = self._not_in_statement_regexp + + def _test_sql(sql): + m = nis.search(sql) + assert not m, f"Found {nis.pattern!r} in {sql!r}" + + else: + + def _test_sql(sql): + pass + + def _numeric_param_as_dict(parameters): + if parameters: + assert isinstance(parameters, tuple) + return { + str(idx): value for idx, value in enumerate(parameters, 1) + } + else: + return () + + class SQLiteFix99953Cursor(sqlite3.Cursor): + def execute(self, sql, parameters=()): + _test_sql(sql) + if first_bind in sql: + parameters = _numeric_param_as_dict(parameters) + return super().execute(sql, parameters) + + def executemany(self, sql, parameters): + _test_sql(sql) + if first_bind in sql: + parameters = [ + _numeric_param_as_dict(p) for p in parameters + ] + return super().executemany(sql, parameters) + + class SQLiteFix99953Connection(sqlite3.Connection): + def cursor(self, factory=None): + if factory is None: + factory = SQLiteFix99953Cursor + return super().cursor(factory=factory) + + def execute(self, sql, parameters=()): + _test_sql(sql) + if first_bind in sql: + parameters = _numeric_param_as_dict(parameters) + return super().execute(sql, parameters) + + def executemany(self, sql, parameters): + _test_sql(sql) + if first_bind in sql: + parameters = [ + _numeric_param_as_dict(p) for p in parameters + ] + return super().executemany(sql, parameters) + + return SQLiteFix99953Connection + + +class _SQLiteDialect_pysqlite_dollar(_SQLiteDialect_pysqlite_numeric): + """numeric dialect that uses $ for testing only + + internal use only. This dialect is **NOT** supported by SQLAlchemy + and may change at any time. + + """ + + supports_statement_cache = True + default_paramstyle = "numeric_dollar" + driver = "pysqlite_dollar" + + _first_bind = "$1" + _not_in_statement_regexp = re.compile(r"[^\d]:\d+") + + def __init__(self, *arg, **kw): + kw.setdefault("paramstyle", "numeric_dollar") + super().__init__(*arg, **kw) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/type_migration_guidelines.txt b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/type_migration_guidelines.txt new file mode 100644 index 00000000..e6be2056 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/type_migration_guidelines.txt @@ -0,0 +1,145 @@ +Rules for Migrating TypeEngine classes to 0.6 +--------------------------------------------- + +1. the TypeEngine classes are used for: + + a. Specifying behavior which needs to occur for bind parameters + or result row columns. + + b. Specifying types that are entirely specific to the database + in use and have no analogue in the sqlalchemy.types package. + + c. Specifying types where there is an analogue in sqlalchemy.types, + but the database in use takes vendor-specific flags for those + types. + + d. If a TypeEngine class doesn't provide any of this, it should be + *removed* from the dialect. + +2. the TypeEngine classes are *no longer* used for generating DDL. Dialects +now have a TypeCompiler subclass which uses the same visit_XXX model as +other compilers. + +3. the "ischema_names" and "colspecs" dictionaries are now required members on +the Dialect class. + +4. The names of types within dialects are now important. If a dialect-specific type +is a subclass of an existing generic type and is only provided for bind/result behavior, +the current mixed case naming can remain, i.e. _PGNumeric for Numeric - in this case, +end users would never need to use _PGNumeric directly. However, if a dialect-specific +type is specifying a type *or* arguments that are not present generically, it should +match the real name of the type on that backend, in uppercase. E.g. postgresql.INET, +mysql.ENUM, postgresql.ARRAY. + +Or follow this handy flowchart: + + is the type meant to provide bind/result is the type the same name as an + behavior to a generic type (i.e. MixedCase) ---- no ---> UPPERCASE type in types.py ? + type in types.py ? | | + | no yes + yes | | + | | does your type need special + | +<--- yes --- behavior or arguments ? + | | | + | | no + name the type using | | + _MixedCase, i.e. v V + _OracleBoolean. it name the type don't make a + stays private to the dialect identically as that type, make sure the dialect's + and is invoked *only* via within the DB, base.py imports the types.py + the colspecs dict. using UPPERCASE UPPERCASE name into its namespace + | (i.e. BIT, NCHAR, INTERVAL). + | Users can import it. + | | + v v + subclass the closest is the name of this type + MixedCase type types.py, identical to an UPPERCASE + i.e. <--- no ------- name in types.py ? + class _DateTime(types.DateTime), + class DATETIME2(types.DateTime), | + class BIT(types.TypeEngine). yes + | + v + the type should + subclass the + UPPERCASE + type in types.py + (i.e. class BLOB(types.BLOB)) + + +Example 1. pysqlite needs bind/result processing for the DateTime type in types.py, +which applies to all DateTimes and subclasses. It's named _SLDateTime and +subclasses types.DateTime. + +Example 2. MS-SQL has a TIME type which takes a non-standard "precision" argument +that is rendered within DDL. So it's named TIME in the MS-SQL dialect's base.py, +and subclasses types.TIME. Users can then say mssql.TIME(precision=10). + +Example 3. MS-SQL dialects also need special bind/result processing for date +But its DATE type doesn't render DDL differently than that of a plain +DATE, i.e. it takes no special arguments. Therefore we are just adding behavior +to types.Date, so it's named _MSDate in the MS-SQL dialect's base.py, and subclasses +types.Date. + +Example 4. MySQL has a SET type, there's no analogue for this in types.py. So +MySQL names it SET in the dialect's base.py, and it subclasses types.String, since +it ultimately deals with strings. + +Example 5. PostgreSQL has a DATETIME type. The DBAPIs handle dates correctly, +and no special arguments are used in PG's DDL beyond what types.py provides. +PostgreSQL dialect therefore imports types.DATETIME into its base.py. + +Ideally one should be able to specify a schema using names imported completely from a +dialect, all matching the real name on that backend: + + from sqlalchemy.dialects.postgresql import base as pg + + t = Table('mytable', metadata, + Column('id', pg.INTEGER, primary_key=True), + Column('name', pg.VARCHAR(300)), + Column('inetaddr', pg.INET) + ) + +where above, the INTEGER and VARCHAR types are ultimately from sqlalchemy.types, +but the PG dialect makes them available in its own namespace. + +5. "colspecs" now is a dictionary of generic or uppercased types from sqlalchemy.types +linked to types specified in the dialect. Again, if a type in the dialect does not +specify any special behavior for bind_processor() or result_processor() and does not +indicate a special type only available in this database, it must be *removed* from the +module and from this dictionary. + +6. "ischema_names" indicates string descriptions of types as returned from the database +linked to TypeEngine classes. + + a. The string name should be matched to the most specific type possible within + sqlalchemy.types, unless there is no matching type within sqlalchemy.types in which + case it points to a dialect type. *It doesn't matter* if the dialect has its + own subclass of that type with special bind/result behavior - reflect to the types.py + UPPERCASE type as much as possible. With very few exceptions, all types + should reflect to an UPPERCASE type. + + b. If the dialect contains a matching dialect-specific type that takes extra arguments + which the generic one does not, then point to the dialect-specific type. E.g. + mssql.VARCHAR takes a "collation" parameter which should be preserved. + +5. DDL, or what was formerly issued by "get_col_spec()", is now handled exclusively by +a subclass of compiler.GenericTypeCompiler. + + a. your TypeCompiler class will receive generic and uppercase types from + sqlalchemy.types. Do not assume the presence of dialect-specific attributes on + these types. + + b. the visit_UPPERCASE methods on GenericTypeCompiler should *not* be overridden with + methods that produce a different DDL name. Uppercase types don't do any kind of + "guessing" - if visit_TIMESTAMP is called, the DDL should render as TIMESTAMP in + all cases, regardless of whether or not that type is legal on the backend database. + + c. the visit_UPPERCASE methods *should* be overridden with methods that add additional + arguments and flags to those types. + + d. the visit_lowercase methods are overridden to provide an interpretation of a generic + type. E.g. visit_large_binary() might be overridden to say "return self.visit_BIT(type_)". + + e. visit_lowercase methods should *never* render strings directly - it should always + be via calling a visit_UPPERCASE() method. diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py new file mode 100644 index 00000000..af0f7ee8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py @@ -0,0 +1,62 @@ +# engine/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""SQL connections, SQL execution and high-level DB-API interface. + +The engine package defines the basic components used to interface +DB-API modules with higher-level statement construction, +connection-management, execution and result contexts. The primary +"entry point" class into this package is the Engine and its public +constructor ``create_engine()``. + +""" + +from . import events as events +from . import util as util +from .base import Connection as Connection +from .base import Engine as Engine +from .base import NestedTransaction as NestedTransaction +from .base import RootTransaction as RootTransaction +from .base import Transaction as Transaction +from .base import TwoPhaseTransaction as TwoPhaseTransaction +from .create import create_engine as create_engine +from .create import create_pool_from_url as create_pool_from_url +from .create import engine_from_config as engine_from_config +from .cursor import CursorResult as CursorResult +from .cursor import ResultProxy as ResultProxy +from .interfaces import AdaptedConnection as AdaptedConnection +from .interfaces import BindTyping as BindTyping +from .interfaces import Compiled as Compiled +from .interfaces import Connectable as Connectable +from .interfaces import ConnectArgsType as ConnectArgsType +from .interfaces import ConnectionEventsTarget as ConnectionEventsTarget +from .interfaces import CreateEnginePlugin as CreateEnginePlugin +from .interfaces import Dialect as Dialect +from .interfaces import ExceptionContext as ExceptionContext +from .interfaces import ExecutionContext as ExecutionContext +from .interfaces import TypeCompiler as TypeCompiler +from .mock import create_mock_engine as create_mock_engine +from .reflection import Inspector as Inspector +from .reflection import ObjectKind as ObjectKind +from .reflection import ObjectScope as ObjectScope +from .result import ChunkedIteratorResult as ChunkedIteratorResult +from .result import FilterResult as FilterResult +from .result import FrozenResult as FrozenResult +from .result import IteratorResult as IteratorResult +from .result import MappingResult as MappingResult +from .result import MergedResult as MergedResult +from .result import Result as Result +from .result import result_tuple as result_tuple +from .result import ScalarResult as ScalarResult +from .result import TupleResult as TupleResult +from .row import BaseRow as BaseRow +from .row import Row as Row +from .row import RowMapping as RowMapping +from .url import make_url as make_url +from .url import URL as URL +from .util import connection_memoize as connection_memoize +from ..sql import ddl as ddl diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..e035fc42 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc new file mode 100644 index 00000000..18cdde27 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc new file mode 100644 index 00000000..32344131 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc new file mode 100644 index 00000000..3067f7a1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..9a8bd17a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc new file mode 100644 index 00000000..9c956b50 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/create.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/create.cpython-312.pyc new file mode 100644 index 00000000..2cdd360b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/create.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc new file mode 100644 index 00000000..6857434d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/default.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/default.cpython-312.pyc new file mode 100644 index 00000000..d3519140 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/default.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/events.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..daf9a97b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/events.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 00000000..06461b8a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/mock.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/mock.cpython-312.pyc new file mode 100644 index 00000000..9ad5488b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/mock.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/processors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/processors.cpython-312.pyc new file mode 100644 index 00000000..4d98f0e0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/processors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc new file mode 100644 index 00000000..d9d7ce36 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/result.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/result.cpython-312.pyc new file mode 100644 index 00000000..75339e32 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/result.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/row.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/row.cpython-312.pyc new file mode 100644 index 00000000..e84a9544 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/row.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc new file mode 100644 index 00000000..ff95aa43 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/url.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/url.cpython-312.pyc new file mode 100644 index 00000000..b7354232 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/url.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/util.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..00dc0ed3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/util.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py new file mode 100644 index 00000000..2cc35b50 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py @@ -0,0 +1,136 @@ +# engine/_py_processors.py +# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors +# +# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""defines generic type conversion functions, as used in bind and result +processors. + +They all share one common characteristic: None is passed through unchanged. + +""" + +from __future__ import annotations + +import datetime +from datetime import date as date_cls +from datetime import datetime as datetime_cls +from datetime import time as time_cls +from decimal import Decimal +import typing +from typing import Any +from typing import Callable +from typing import Optional +from typing import Type +from typing import TypeVar +from typing import Union + + +_DT = TypeVar( + "_DT", bound=Union[datetime.datetime, datetime.time, datetime.date] +) + + +def str_to_datetime_processor_factory( + regexp: typing.Pattern[str], type_: Callable[..., _DT] +) -> Callable[[Optional[str]], Optional[_DT]]: + rmatch = regexp.match + # Even on python2.6 datetime.strptime is both slower than this code + # and it does not support microseconds. + has_named_groups = bool(regexp.groupindex) + + def process(value: Optional[str]) -> Optional[_DT]: + if value is None: + return None + else: + try: + m = rmatch(value) + except TypeError as err: + raise ValueError( + "Couldn't parse %s string '%r' " + "- value is not a string." % (type_.__name__, value) + ) from err + + if m is None: + raise ValueError( + "Couldn't parse %s string: " + "'%s'" % (type_.__name__, value) + ) + if has_named_groups: + groups = m.groupdict(0) + return type_( + **dict( + list( + zip( + iter(groups.keys()), + list(map(int, iter(groups.values()))), + ) + ) + ) + ) + else: + return type_(*list(map(int, m.groups(0)))) + + return process + + +def to_decimal_processor_factory( + target_class: Type[Decimal], scale: int +) -> Callable[[Optional[float]], Optional[Decimal]]: + fstring = "%%.%df" % scale + + def process(value: Optional[float]) -> Optional[Decimal]: + if value is None: + return None + else: + return target_class(fstring % value) + + return process + + +def to_float(value: Optional[Union[int, float]]) -> Optional[float]: + if value is None: + return None + else: + return float(value) + + +def to_str(value: Optional[Any]) -> Optional[str]: + if value is None: + return None + else: + return str(value) + + +def int_to_boolean(value: Optional[int]) -> Optional[bool]: + if value is None: + return None + else: + return bool(value) + + +def str_to_datetime(value: Optional[str]) -> Optional[datetime.datetime]: + if value is not None: + dt_value = datetime_cls.fromisoformat(value) + else: + dt_value = None + return dt_value + + +def str_to_time(value: Optional[str]) -> Optional[datetime.time]: + if value is not None: + dt_value = time_cls.fromisoformat(value) + else: + dt_value = None + return dt_value + + +def str_to_date(value: Optional[str]) -> Optional[datetime.date]: + if value is not None: + dt_value = date_cls.fromisoformat(value) + else: + dt_value = None + return dt_value diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py new file mode 100644 index 00000000..4e1dd7d4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py @@ -0,0 +1,128 @@ +# engine/_py_row.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import operator +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import Tuple +from typing import Type + +if typing.TYPE_CHECKING: + from .result import _KeyType + from .result import _ProcessorsType + from .result import _RawRowType + from .result import _TupleGetterType + from .result import ResultMetaData + +MD_INDEX = 0 # integer index in cursor.description + + +class BaseRow: + __slots__ = ("_parent", "_data", "_key_to_index") + + _parent: ResultMetaData + _key_to_index: Mapping[_KeyType, int] + _data: _RawRowType + + def __init__( + self, + parent: ResultMetaData, + processors: Optional[_ProcessorsType], + key_to_index: Mapping[_KeyType, int], + data: _RawRowType, + ): + """Row objects are constructed by CursorResult objects.""" + object.__setattr__(self, "_parent", parent) + + object.__setattr__(self, "_key_to_index", key_to_index) + + if processors: + object.__setattr__( + self, + "_data", + tuple( + [ + proc(value) if proc else value + for proc, value in zip(processors, data) + ] + ), + ) + else: + object.__setattr__(self, "_data", tuple(data)) + + def __reduce__(self) -> Tuple[Callable[..., BaseRow], Tuple[Any, ...]]: + return ( + rowproxy_reconstructor, + (self.__class__, self.__getstate__()), + ) + + def __getstate__(self) -> Dict[str, Any]: + return {"_parent": self._parent, "_data": self._data} + + def __setstate__(self, state: Dict[str, Any]) -> None: + parent = state["_parent"] + object.__setattr__(self, "_parent", parent) + object.__setattr__(self, "_data", state["_data"]) + object.__setattr__(self, "_key_to_index", parent._key_to_index) + + def _values_impl(self) -> List[Any]: + return list(self) + + def __iter__(self) -> Iterator[Any]: + return iter(self._data) + + def __len__(self) -> int: + return len(self._data) + + def __hash__(self) -> int: + return hash(self._data) + + def __getitem__(self, key: Any) -> Any: + return self._data[key] + + def _get_by_key_impl_mapping(self, key: str) -> Any: + try: + return self._data[self._key_to_index[key]] + except KeyError: + pass + self._parent._key_not_found(key, False) + + def __getattr__(self, name: str) -> Any: + try: + return self._data[self._key_to_index[name]] + except KeyError: + pass + self._parent._key_not_found(name, True) + + def _to_tuple_instance(self) -> Tuple[Any, ...]: + return self._data + + +# This reconstructor is necessary so that pickles with the Cy extension or +# without use the same Binary format. +def rowproxy_reconstructor( + cls: Type[BaseRow], state: Dict[str, Any] +) -> BaseRow: + obj = cls.__new__(cls) + obj.__setstate__(state) + return obj + + +def tuplegetter(*indexes: int) -> _TupleGetterType: + if len(indexes) != 1: + for i in range(1, len(indexes)): + if indexes[i - 1] != indexes[i] - 1: + return operator.itemgetter(*indexes) + # slice form is faster but returns a list if input is list + return operator.itemgetter(slice(indexes[0], indexes[-1] + 1)) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py new file mode 100644 index 00000000..2be4322a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py @@ -0,0 +1,74 @@ +# engine/_py_util.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import typing +from typing import Any +from typing import Mapping +from typing import Optional +from typing import Tuple + +from .. import exc + +if typing.TYPE_CHECKING: + from .interfaces import _CoreAnyExecuteParams + from .interfaces import _CoreMultiExecuteParams + from .interfaces import _DBAPIAnyExecuteParams + from .interfaces import _DBAPIMultiExecuteParams + + +_no_tuple: Tuple[Any, ...] = () + + +def _distill_params_20( + params: Optional[_CoreAnyExecuteParams], +) -> _CoreMultiExecuteParams: + if params is None: + return _no_tuple + # Assume list is more likely than tuple + elif isinstance(params, list) or isinstance(params, tuple): + # collections_abc.MutableSequence): # avoid abc.__instancecheck__ + if params and not isinstance(params[0], (tuple, Mapping)): + raise exc.ArgumentError( + "List argument must consist only of tuples or dictionaries" + ) + + return params + elif isinstance(params, dict) or isinstance( + # only do immutabledict or abc.__instancecheck__ for Mapping after + # we've checked for plain dictionaries and would otherwise raise + params, + Mapping, + ): + return [params] + else: + raise exc.ArgumentError("mapping or list expected for parameters") + + +def _distill_raw_params( + params: Optional[_DBAPIAnyExecuteParams], +) -> _DBAPIMultiExecuteParams: + if params is None: + return _no_tuple + elif isinstance(params, list): + # collections_abc.MutableSequence): # avoid abc.__instancecheck__ + if params and not isinstance(params[0], (tuple, Mapping)): + raise exc.ArgumentError( + "List argument must consist only of tuples or dictionaries" + ) + + return params + elif isinstance(params, (tuple, dict)) or isinstance( + # only do abc.__instancecheck__ for Mapping after we've checked + # for plain dictionaries and would otherwise raise + params, + Mapping, + ): + # cast("Union[List[Mapping[str, Any]], Tuple[Any, ...]]", [params]) + return [params] # type: ignore + else: + raise exc.ArgumentError("mapping or sequence expected for parameters") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py new file mode 100644 index 00000000..983bdae0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py @@ -0,0 +1,3375 @@ +# engine/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`. + +""" +from __future__ import annotations + +import contextlib +import sys +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union + +from .interfaces import BindTyping +from .interfaces import ConnectionEventsTarget +from .interfaces import DBAPICursor +from .interfaces import ExceptionContext +from .interfaces import ExecuteStyle +from .interfaces import ExecutionContext +from .interfaces import IsolationLevel +from .util import _distill_params_20 +from .util import _distill_raw_params +from .util import TransactionalContext +from .. import exc +from .. import inspection +from .. import log +from .. import util +from ..sql import compiler +from ..sql import util as sql_util + +if typing.TYPE_CHECKING: + from . import CursorResult + from . import ScalarResult + from .interfaces import _AnyExecuteParams + from .interfaces import _AnyMultiExecuteParams + from .interfaces import _CoreAnyExecuteParams + from .interfaces import _CoreMultiExecuteParams + from .interfaces import _CoreSingleExecuteParams + from .interfaces import _DBAPIAnyExecuteParams + from .interfaces import _DBAPISingleExecuteParams + from .interfaces import _ExecuteOptions + from .interfaces import CompiledCacheType + from .interfaces import CoreExecuteOptionsParameter + from .interfaces import Dialect + from .interfaces import SchemaTranslateMapType + from .reflection import Inspector # noqa + from .url import URL + from ..event import dispatcher + from ..log import _EchoFlagType + from ..pool import _ConnectionFairy + from ..pool import Pool + from ..pool import PoolProxiedConnection + from ..sql import Executable + from ..sql._typing import _InfoType + from ..sql.compiler import Compiled + from ..sql.ddl import ExecutableDDLElement + from ..sql.ddl import SchemaDropper + from ..sql.ddl import SchemaGenerator + from ..sql.functions import FunctionElement + from ..sql.schema import DefaultGenerator + from ..sql.schema import HasSchemaAttr + from ..sql.schema import SchemaItem + from ..sql.selectable import TypedReturnsRows + + +_T = TypeVar("_T", bound=Any) +_EMPTY_EXECUTION_OPTS: _ExecuteOptions = util.EMPTY_DICT +NO_OPTIONS: Mapping[str, Any] = util.EMPTY_DICT + + +class Connection(ConnectionEventsTarget, inspection.Inspectable["Inspector"]): + """Provides high-level functionality for a wrapped DB-API connection. + + The :class:`_engine.Connection` object is procured by calling the + :meth:`_engine.Engine.connect` method of the :class:`_engine.Engine` + object, and provides services for execution of SQL statements as well + as transaction control. + + The Connection object is **not** thread-safe. While a Connection can be + shared among threads using properly synchronized access, it is still + possible that the underlying DBAPI connection may not support shared + access between threads. Check the DBAPI documentation for details. + + The Connection object represents a single DBAPI connection checked out + from the connection pool. In this state, the connection pool has no + affect upon the connection, including its expiration or timeout state. + For the connection pool to properly manage connections, connections + should be returned to the connection pool (i.e. ``connection.close()``) + whenever the connection is not in use. + + .. index:: + single: thread safety; Connection + + """ + + dialect: Dialect + dispatch: dispatcher[ConnectionEventsTarget] + + _sqla_logger_namespace = "sqlalchemy.engine.Connection" + + # used by sqlalchemy.engine.util.TransactionalContext + _trans_context_manager: Optional[TransactionalContext] = None + + # legacy as of 2.0, should be eventually deprecated and + # removed. was used in the "pre_ping" recipe that's been in the docs + # a long time + should_close_with_result = False + + _dbapi_connection: Optional[PoolProxiedConnection] + + _execution_options: _ExecuteOptions + + _transaction: Optional[RootTransaction] + _nested_transaction: Optional[NestedTransaction] + + def __init__( + self, + engine: Engine, + connection: Optional[PoolProxiedConnection] = None, + _has_events: Optional[bool] = None, + _allow_revalidate: bool = True, + _allow_autobegin: bool = True, + ): + """Construct a new Connection.""" + self.engine = engine + self.dialect = dialect = engine.dialect + + if connection is None: + try: + self._dbapi_connection = engine.raw_connection() + except dialect.loaded_dbapi.Error as err: + Connection._handle_dbapi_exception_noconnection( + err, dialect, engine + ) + raise + else: + self._dbapi_connection = connection + + self._transaction = self._nested_transaction = None + self.__savepoint_seq = 0 + self.__in_begin = False + + self.__can_reconnect = _allow_revalidate + self._allow_autobegin = _allow_autobegin + self._echo = self.engine._should_log_info() + + if _has_events is None: + # if _has_events is sent explicitly as False, + # then don't join the dispatch of the engine; we don't + # want to handle any of the engine's events in that case. + self.dispatch = self.dispatch._join(engine.dispatch) + self._has_events = _has_events or ( + _has_events is None and engine._has_events + ) + + self._execution_options = engine._execution_options + + if self._has_events or self.engine._has_events: + self.dispatch.engine_connect(self) + + # this can be assigned differently via + # characteristics.LoggingTokenCharacteristic + _message_formatter: Any = None + + def _log_info(self, message: str, *arg: Any, **kw: Any) -> None: + fmt = self._message_formatter + + if fmt: + message = fmt(message) + + if log.STACKLEVEL: + kw["stacklevel"] = 1 + log.STACKLEVEL_OFFSET + + self.engine.logger.info(message, *arg, **kw) + + def _log_debug(self, message: str, *arg: Any, **kw: Any) -> None: + fmt = self._message_formatter + + if fmt: + message = fmt(message) + + if log.STACKLEVEL: + kw["stacklevel"] = 1 + log.STACKLEVEL_OFFSET + + self.engine.logger.debug(message, *arg, **kw) + + @property + def _schema_translate_map(self) -> Optional[SchemaTranslateMapType]: + schema_translate_map: Optional[SchemaTranslateMapType] = ( + self._execution_options.get("schema_translate_map", None) + ) + + return schema_translate_map + + def schema_for_object(self, obj: HasSchemaAttr) -> Optional[str]: + """Return the schema name for the given schema item taking into + account current schema translate map. + + """ + + name = obj.schema + schema_translate_map: Optional[SchemaTranslateMapType] = ( + self._execution_options.get("schema_translate_map", None) + ) + + if ( + schema_translate_map + and name in schema_translate_map + and obj._use_schema_map + ): + return schema_translate_map[name] + else: + return name + + def __enter__(self) -> Connection: + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self.close() + + @overload + def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + no_parameters: bool = False, + stream_results: bool = False, + max_row_buffer: int = ..., + yield_per: int = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + preserve_rowcount: bool = False, + **opt: Any, + ) -> Connection: ... + + @overload + def execution_options(self, **opt: Any) -> Connection: ... + + def execution_options(self, **opt: Any) -> Connection: + r"""Set non-SQL options for the connection which take effect + during execution. + + This method modifies this :class:`_engine.Connection` **in-place**; + the return value is the same :class:`_engine.Connection` object + upon which the method is called. Note that this is in contrast + to the behavior of the ``execution_options`` methods on other + objects such as :meth:`_engine.Engine.execution_options` and + :meth:`_sql.Executable.execution_options`. The rationale is that many + such execution options necessarily modify the state of the base + DBAPI connection in any case so there is no feasible means of + keeping the effect of such an option localized to a "sub" connection. + + .. versionchanged:: 2.0 The :meth:`_engine.Connection.execution_options` + method, in contrast to other objects with this method, modifies + the connection in-place without creating copy of it. + + As discussed elsewhere, the :meth:`_engine.Connection.execution_options` + method accepts any arbitrary parameters including user defined names. + All parameters given are consumable in a number of ways including + by using the :meth:`_engine.Connection.get_execution_options` method. + See the examples at :meth:`_sql.Executable.execution_options` + and :meth:`_engine.Engine.execution_options`. + + The keywords that are currently recognized by SQLAlchemy itself + include all those listed under :meth:`.Executable.execution_options`, + as well as others that are specific to :class:`_engine.Connection`. + + :param compiled_cache: Available on: :class:`_engine.Connection`, + :class:`_engine.Engine`. + + A dictionary where :class:`.Compiled` objects + will be cached when the :class:`_engine.Connection` + compiles a clause + expression into a :class:`.Compiled` object. This dictionary will + supersede the statement cache that may be configured on the + :class:`_engine.Engine` itself. If set to None, caching + is disabled, even if the engine has a configured cache size. + + Note that the ORM makes use of its own "compiled" caches for + some operations, including flush operations. The caching + used by the ORM internally supersedes a cache dictionary + specified here. + + :param logging_token: Available on: :class:`_engine.Connection`, + :class:`_engine.Engine`, :class:`_sql.Executable`. + + Adds the specified string token surrounded by brackets in log + messages logged by the connection, i.e. the logging that's enabled + either via the :paramref:`_sa.create_engine.echo` flag or via the + ``logging.getLogger("sqlalchemy.engine")`` logger. This allows a + per-connection or per-sub-engine token to be available which is + useful for debugging concurrent connection scenarios. + + .. versionadded:: 1.4.0b2 + + .. seealso:: + + :ref:`dbengine_logging_tokens` - usage example + + :paramref:`_sa.create_engine.logging_name` - adds a name to the + name used by the Python logger object itself. + + :param isolation_level: Available on: :class:`_engine.Connection`, + :class:`_engine.Engine`. + + Set the transaction isolation level for the lifespan of this + :class:`_engine.Connection` object. + Valid values include those string + values accepted by the :paramref:`_sa.create_engine.isolation_level` + parameter passed to :func:`_sa.create_engine`. These levels are + semi-database specific; see individual dialect documentation for + valid levels. + + The isolation level option applies the isolation level by emitting + statements on the DBAPI connection, and **necessarily affects the + original Connection object overall**. The isolation level will remain + at the given setting until explicitly changed, or when the DBAPI + connection itself is :term:`released` to the connection pool, i.e. the + :meth:`_engine.Connection.close` method is called, at which time an + event handler will emit additional statements on the DBAPI connection + in order to revert the isolation level change. + + .. note:: The ``isolation_level`` execution option may only be + established before the :meth:`_engine.Connection.begin` method is + called, as well as before any SQL statements are emitted which + would otherwise trigger "autobegin", or directly after a call to + :meth:`_engine.Connection.commit` or + :meth:`_engine.Connection.rollback`. A database cannot change the + isolation level on a transaction in progress. + + .. note:: The ``isolation_level`` execution option is implicitly + reset if the :class:`_engine.Connection` is invalidated, e.g. via + the :meth:`_engine.Connection.invalidate` method, or if a + disconnection error occurs. The new connection produced after the + invalidation will **not** have the selected isolation level + re-applied to it automatically. + + .. seealso:: + + :ref:`dbapi_autocommit` + + :meth:`_engine.Connection.get_isolation_level` + - view current actual level + + :param no_parameters: Available on: :class:`_engine.Connection`, + :class:`_sql.Executable`. + + When ``True``, if the final parameter + list or dictionary is totally empty, will invoke the + statement on the cursor as ``cursor.execute(statement)``, + not passing the parameter collection at all. + Some DBAPIs such as psycopg2 and mysql-python consider + percent signs as significant only when parameters are + present; this option allows code to generate SQL + containing percent signs (and possibly other characters) + that is neutral regarding whether it's executed by the DBAPI + or piped into a script that's later invoked by + command line tools. + + :param stream_results: Available on: :class:`_engine.Connection`, + :class:`_sql.Executable`. + + Indicate to the dialect that results should be + "streamed" and not pre-buffered, if possible. For backends + such as PostgreSQL, MySQL and MariaDB, this indicates the use of + a "server side cursor" as opposed to a client side cursor. + Other backends such as that of Oracle may already use server + side cursors by default. + + The usage of + :paramref:`_engine.Connection.execution_options.stream_results` is + usually combined with setting a fixed number of rows to to be fetched + in batches, to allow for efficient iteration of database rows while + at the same time not loading all result rows into memory at once; + this can be configured on a :class:`_engine.Result` object using the + :meth:`_engine.Result.yield_per` method, after execution has + returned a new :class:`_engine.Result`. If + :meth:`_engine.Result.yield_per` is not used, + the :paramref:`_engine.Connection.execution_options.stream_results` + mode of operation will instead use a dynamically sized buffer + which buffers sets of rows at a time, growing on each batch + based on a fixed growth size up until a limit which may + be configured using the + :paramref:`_engine.Connection.execution_options.max_row_buffer` + parameter. + + When using the ORM to fetch ORM mapped objects from a result, + :meth:`_engine.Result.yield_per` should always be used with + :paramref:`_engine.Connection.execution_options.stream_results`, + so that the ORM does not fetch all rows into new ORM objects at once. + + For typical use, the + :paramref:`_engine.Connection.execution_options.yield_per` execution + option should be preferred, which sets up both + :paramref:`_engine.Connection.execution_options.stream_results` and + :meth:`_engine.Result.yield_per` at once. This option is supported + both at a core level by :class:`_engine.Connection` as well as by the + ORM :class:`_engine.Session`; the latter is described at + :ref:`orm_queryguide_yield_per`. + + .. seealso:: + + :ref:`engine_stream_results` - background on + :paramref:`_engine.Connection.execution_options.stream_results` + + :paramref:`_engine.Connection.execution_options.max_row_buffer` + + :paramref:`_engine.Connection.execution_options.yield_per` + + :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel` + describing the ORM version of ``yield_per`` + + :param max_row_buffer: Available on: :class:`_engine.Connection`, + :class:`_sql.Executable`. Sets a maximum + buffer size to use when the + :paramref:`_engine.Connection.execution_options.stream_results` + execution option is used on a backend that supports server side + cursors. The default value if not specified is 1000. + + .. seealso:: + + :paramref:`_engine.Connection.execution_options.stream_results` + + :ref:`engine_stream_results` + + + :param yield_per: Available on: :class:`_engine.Connection`, + :class:`_sql.Executable`. Integer value applied which will + set the :paramref:`_engine.Connection.execution_options.stream_results` + execution option and invoke :meth:`_engine.Result.yield_per` + automatically at once. Allows equivalent functionality as + is present when using this parameter with the ORM. + + .. versionadded:: 1.4.40 + + .. seealso:: + + :ref:`engine_stream_results` - background and examples + on using server side cursors with Core. + + :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel` + describing the ORM version of ``yield_per`` + + :param insertmanyvalues_page_size: Available on: :class:`_engine.Connection`, + :class:`_engine.Engine`. Number of rows to format into an + INSERT statement when the statement uses "insertmanyvalues" mode, + which is a paged form of bulk insert that is used for many backends + when using :term:`executemany` execution typically in conjunction + with RETURNING. Defaults to 1000. May also be modified on a + per-engine basis using the + :paramref:`_sa.create_engine.insertmanyvalues_page_size` parameter. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`engine_insertmanyvalues` + + :param schema_translate_map: Available on: :class:`_engine.Connection`, + :class:`_engine.Engine`, :class:`_sql.Executable`. + + A dictionary mapping schema names to schema names, that will be + applied to the :paramref:`_schema.Table.schema` element of each + :class:`_schema.Table` + encountered when SQL or DDL expression elements + are compiled into strings; the resulting schema name will be + converted based on presence in the map of the original name. + + .. seealso:: + + :ref:`schema_translating` + + :param preserve_rowcount: Boolean; when True, the ``cursor.rowcount`` + attribute will be unconditionally memoized within the result and + made available via the :attr:`.CursorResult.rowcount` attribute. + Normally, this attribute is only preserved for UPDATE and DELETE + statements. Using this option, the DBAPIs rowcount value can + be accessed for other kinds of statements such as INSERT and SELECT, + to the degree that the DBAPI supports these statements. See + :attr:`.CursorResult.rowcount` for notes regarding the behavior + of this attribute. + + .. versionadded:: 2.0.28 + + .. seealso:: + + :meth:`_engine.Engine.execution_options` + + :meth:`.Executable.execution_options` + + :meth:`_engine.Connection.get_execution_options` + + :ref:`orm_queryguide_execution_options` - documentation on all + ORM-specific execution options + + """ # noqa + if self._has_events or self.engine._has_events: + self.dispatch.set_connection_execution_options(self, opt) + self._execution_options = self._execution_options.union(opt) + self.dialect.set_connection_execution_options(self, opt) + return self + + def get_execution_options(self) -> _ExecuteOptions: + """Get the non-SQL options which will take effect during execution. + + .. versionadded:: 1.3 + + .. seealso:: + + :meth:`_engine.Connection.execution_options` + """ + return self._execution_options + + @property + def _still_open_and_dbapi_connection_is_valid(self) -> bool: + pool_proxied_connection = self._dbapi_connection + return ( + pool_proxied_connection is not None + and pool_proxied_connection.is_valid + ) + + @property + def closed(self) -> bool: + """Return True if this connection is closed.""" + + return self._dbapi_connection is None and not self.__can_reconnect + + @property + def invalidated(self) -> bool: + """Return True if this connection was invalidated. + + This does not indicate whether or not the connection was + invalidated at the pool level, however + + """ + + # prior to 1.4, "invalid" was stored as a state independent of + # "closed", meaning an invalidated connection could be "closed", + # the _dbapi_connection would be None and closed=True, yet the + # "invalid" flag would stay True. This meant that there were + # three separate states (open/valid, closed/valid, closed/invalid) + # when there is really no reason for that; a connection that's + # "closed" does not need to be "invalid". So the state is now + # represented by the two facts alone. + + pool_proxied_connection = self._dbapi_connection + return pool_proxied_connection is None and self.__can_reconnect + + @property + def connection(self) -> PoolProxiedConnection: + """The underlying DB-API connection managed by this Connection. + + This is a SQLAlchemy connection-pool proxied connection + which then has the attribute + :attr:`_pool._ConnectionFairy.dbapi_connection` that refers to the + actual driver connection. + + .. seealso:: + + + :ref:`dbapi_connections` + + """ + + if self._dbapi_connection is None: + try: + return self._revalidate_connection() + except (exc.PendingRollbackError, exc.ResourceClosedError): + raise + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + else: + return self._dbapi_connection + + def get_isolation_level(self) -> IsolationLevel: + """Return the current **actual** isolation level that's present on + the database within the scope of this connection. + + This attribute will perform a live SQL operation against the database + in order to procure the current isolation level, so the value returned + is the actual level on the underlying DBAPI connection regardless of + how this state was set. This will be one of the four actual isolation + modes ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``, + ``SERIALIZABLE``. It will **not** include the ``AUTOCOMMIT`` isolation + level setting. Third party dialects may also feature additional + isolation level settings. + + .. note:: This method **will not report** on the ``AUTOCOMMIT`` + isolation level, which is a separate :term:`dbapi` setting that's + independent of **actual** isolation level. When ``AUTOCOMMIT`` is + in use, the database connection still has a "traditional" isolation + mode in effect, that is typically one of the four values + ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``, + ``SERIALIZABLE``. + + Compare to the :attr:`_engine.Connection.default_isolation_level` + accessor which returns the isolation level that is present on the + database at initial connection time. + + .. seealso:: + + :attr:`_engine.Connection.default_isolation_level` + - view default level + + :paramref:`_sa.create_engine.isolation_level` + - set per :class:`_engine.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`_engine.Connection` isolation level + + """ + dbapi_connection = self.connection.dbapi_connection + assert dbapi_connection is not None + try: + return self.dialect.get_isolation_level(dbapi_connection) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + @property + def default_isolation_level(self) -> Optional[IsolationLevel]: + """The initial-connection time isolation level associated with the + :class:`_engine.Dialect` in use. + + This value is independent of the + :paramref:`.Connection.execution_options.isolation_level` and + :paramref:`.Engine.execution_options.isolation_level` execution + options, and is determined by the :class:`_engine.Dialect` when the + first connection is created, by performing a SQL query against the + database for the current isolation level before any additional commands + have been emitted. + + Calling this accessor does not invoke any new SQL queries. + + .. seealso:: + + :meth:`_engine.Connection.get_isolation_level` + - view current actual isolation level + + :paramref:`_sa.create_engine.isolation_level` + - set per :class:`_engine.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`_engine.Connection` isolation level + + """ + return self.dialect.default_isolation_level + + def _invalid_transaction(self) -> NoReturn: + raise exc.PendingRollbackError( + "Can't reconnect until invalid %stransaction is rolled " + "back. Please rollback() fully before proceeding" + % ("savepoint " if self._nested_transaction is not None else ""), + code="8s2b", + ) + + def _revalidate_connection(self) -> PoolProxiedConnection: + if self.__can_reconnect and self.invalidated: + if self._transaction is not None: + self._invalid_transaction() + self._dbapi_connection = self.engine.raw_connection() + return self._dbapi_connection + raise exc.ResourceClosedError("This Connection is closed") + + @property + def info(self) -> _InfoType: + """Info dictionary associated with the underlying DBAPI connection + referred to by this :class:`_engine.Connection`, allowing user-defined + data to be associated with the connection. + + The data here will follow along with the DBAPI connection including + after it is returned to the connection pool and used again + in subsequent instances of :class:`_engine.Connection`. + + """ + + return self.connection.info + + def invalidate(self, exception: Optional[BaseException] = None) -> None: + """Invalidate the underlying DBAPI connection associated with + this :class:`_engine.Connection`. + + An attempt will be made to close the underlying DBAPI connection + immediately; however if this operation fails, the error is logged + but not raised. The connection is then discarded whether or not + close() succeeded. + + Upon the next use (where "use" typically means using the + :meth:`_engine.Connection.execute` method or similar), + this :class:`_engine.Connection` will attempt to + procure a new DBAPI connection using the services of the + :class:`_pool.Pool` as a source of connectivity (e.g. + a "reconnection"). + + If a transaction was in progress (e.g. the + :meth:`_engine.Connection.begin` method has been called) when + :meth:`_engine.Connection.invalidate` method is called, at the DBAPI + level all state associated with this transaction is lost, as + the DBAPI connection is closed. The :class:`_engine.Connection` + will not allow a reconnection to proceed until the + :class:`.Transaction` object is ended, by calling the + :meth:`.Transaction.rollback` method; until that point, any attempt at + continuing to use the :class:`_engine.Connection` will raise an + :class:`~sqlalchemy.exc.InvalidRequestError`. + This is to prevent applications from accidentally + continuing an ongoing transactional operations despite the + fact that the transaction has been lost due to an + invalidation. + + The :meth:`_engine.Connection.invalidate` method, + just like auto-invalidation, + will at the connection pool level invoke the + :meth:`_events.PoolEvents.invalidate` event. + + :param exception: an optional ``Exception`` instance that's the + reason for the invalidation. is passed along to event handlers + and logging functions. + + .. seealso:: + + :ref:`pool_connection_invalidation` + + """ + + if self.invalidated: + return + + if self.closed: + raise exc.ResourceClosedError("This Connection is closed") + + if self._still_open_and_dbapi_connection_is_valid: + pool_proxied_connection = self._dbapi_connection + assert pool_proxied_connection is not None + pool_proxied_connection.invalidate(exception) + + self._dbapi_connection = None + + def detach(self) -> None: + """Detach the underlying DB-API connection from its connection pool. + + E.g.:: + + with engine.connect() as conn: + conn.detach() + conn.execute(text("SET search_path TO schema1, schema2")) + + # work with connection + + # connection is fully closed (since we used "with:", can + # also call .close()) + + This :class:`_engine.Connection` instance will remain usable. + When closed + (or exited from a context manager context as above), + the DB-API connection will be literally closed and not + returned to its originating pool. + + This method can be used to insulate the rest of an application + from a modified state on a connection (such as a transaction + isolation level or similar). + + """ + + if self.closed: + raise exc.ResourceClosedError("This Connection is closed") + + pool_proxied_connection = self._dbapi_connection + if pool_proxied_connection is None: + raise exc.InvalidRequestError( + "Can't detach an invalidated Connection" + ) + pool_proxied_connection.detach() + + def _autobegin(self) -> None: + if self._allow_autobegin and not self.__in_begin: + self.begin() + + def begin(self) -> RootTransaction: + """Begin a transaction prior to autobegin occurring. + + E.g.:: + + with engine.connect() as conn: + with conn.begin() as trans: + conn.execute(table.insert(), {"username": "sandy"}) + + + The returned object is an instance of :class:`_engine.RootTransaction`. + This object represents the "scope" of the transaction, + which completes when either the :meth:`_engine.Transaction.rollback` + or :meth:`_engine.Transaction.commit` method is called; the object + also works as a context manager as illustrated above. + + The :meth:`_engine.Connection.begin` method begins a + transaction that normally will be begun in any case when the connection + is first used to execute a statement. The reason this method might be + used would be to invoke the :meth:`_events.ConnectionEvents.begin` + event at a specific time, or to organize code within the scope of a + connection checkout in terms of context managed blocks, such as:: + + with engine.connect() as conn: + with conn.begin(): + conn.execute(...) + conn.execute(...) + + with conn.begin(): + conn.execute(...) + conn.execute(...) + + The above code is not fundamentally any different in its behavior than + the following code which does not use + :meth:`_engine.Connection.begin`; the below style is known + as "commit as you go" style:: + + with engine.connect() as conn: + conn.execute(...) + conn.execute(...) + conn.commit() + + conn.execute(...) + conn.execute(...) + conn.commit() + + From a database point of view, the :meth:`_engine.Connection.begin` + method does not emit any SQL or change the state of the underlying + DBAPI connection in any way; the Python DBAPI does not have any + concept of explicit transaction begin. + + .. seealso:: + + :ref:`tutorial_working_with_transactions` - in the + :ref:`unified_tutorial` + + :meth:`_engine.Connection.begin_nested` - use a SAVEPOINT + + :meth:`_engine.Connection.begin_twophase` - + use a two phase /XID transaction + + :meth:`_engine.Engine.begin` - context manager available from + :class:`_engine.Engine` + + """ + if self._transaction is None: + self._transaction = RootTransaction(self) + return self._transaction + else: + raise exc.InvalidRequestError( + "This connection has already initialized a SQLAlchemy " + "Transaction() object via begin() or autobegin; can't " + "call begin() here unless rollback() or commit() " + "is called first." + ) + + def begin_nested(self) -> NestedTransaction: + """Begin a nested transaction (i.e. SAVEPOINT) and return a transaction + handle that controls the scope of the SAVEPOINT. + + E.g.:: + + with engine.begin() as connection: + with connection.begin_nested(): + connection.execute(table.insert(), {"username": "sandy"}) + + The returned object is an instance of + :class:`_engine.NestedTransaction`, which includes transactional + methods :meth:`_engine.NestedTransaction.commit` and + :meth:`_engine.NestedTransaction.rollback`; for a nested transaction, + these methods correspond to the operations "RELEASE SAVEPOINT " + and "ROLLBACK TO SAVEPOINT ". The name of the savepoint is local + to the :class:`_engine.NestedTransaction` object and is generated + automatically. Like any other :class:`_engine.Transaction`, the + :class:`_engine.NestedTransaction` may be used as a context manager as + illustrated above which will "release" or "rollback" corresponding to + if the operation within the block were successful or raised an + exception. + + Nested transactions require SAVEPOINT support in the underlying + database, else the behavior is undefined. SAVEPOINT is commonly used to + run operations within a transaction that may fail, while continuing the + outer transaction. E.g.:: + + from sqlalchemy import exc + + with engine.begin() as connection: + trans = connection.begin_nested() + try: + connection.execute(table.insert(), {"username": "sandy"}) + trans.commit() + except exc.IntegrityError: # catch for duplicate username + trans.rollback() # rollback to savepoint + + # outer transaction continues + connection.execute( ... ) + + If :meth:`_engine.Connection.begin_nested` is called without first + calling :meth:`_engine.Connection.begin` or + :meth:`_engine.Engine.begin`, the :class:`_engine.Connection` object + will "autobegin" the outer transaction first. This outer transaction + may be committed using "commit-as-you-go" style, e.g.:: + + with engine.connect() as connection: # begin() wasn't called + + with connection.begin_nested(): will auto-"begin()" first + connection.execute( ... ) + # savepoint is released + + connection.execute( ... ) + + # explicitly commit outer transaction + connection.commit() + + # can continue working with connection here + + .. versionchanged:: 2.0 + + :meth:`_engine.Connection.begin_nested` will now participate + in the connection "autobegin" behavior that is new as of + 2.0 / "future" style connections in 1.4. + + .. seealso:: + + :meth:`_engine.Connection.begin` + + :ref:`session_begin_nested` - ORM support for SAVEPOINT + + """ + if self._transaction is None: + self._autobegin() + + return NestedTransaction(self) + + def begin_twophase(self, xid: Optional[Any] = None) -> TwoPhaseTransaction: + """Begin a two-phase or XA transaction and return a transaction + handle. + + The returned object is an instance of :class:`.TwoPhaseTransaction`, + which in addition to the methods provided by + :class:`.Transaction`, also provides a + :meth:`~.TwoPhaseTransaction.prepare` method. + + :param xid: the two phase transaction id. If not supplied, a + random id will be generated. + + .. seealso:: + + :meth:`_engine.Connection.begin` + + :meth:`_engine.Connection.begin_twophase` + + """ + + if self._transaction is not None: + raise exc.InvalidRequestError( + "Cannot start a two phase transaction when a transaction " + "is already in progress." + ) + if xid is None: + xid = self.engine.dialect.create_xid() + return TwoPhaseTransaction(self, xid) + + def commit(self) -> None: + """Commit the transaction that is currently in progress. + + This method commits the current transaction if one has been started. + If no transaction was started, the method has no effect, assuming + the connection is in a non-invalidated state. + + A transaction is begun on a :class:`_engine.Connection` automatically + whenever a statement is first executed, or when the + :meth:`_engine.Connection.begin` method is called. + + .. note:: The :meth:`_engine.Connection.commit` method only acts upon + the primary database transaction that is linked to the + :class:`_engine.Connection` object. It does not operate upon a + SAVEPOINT that would have been invoked from the + :meth:`_engine.Connection.begin_nested` method; for control of a + SAVEPOINT, call :meth:`_engine.NestedTransaction.commit` on the + :class:`_engine.NestedTransaction` that is returned by the + :meth:`_engine.Connection.begin_nested` method itself. + + + """ + if self._transaction: + self._transaction.commit() + + def rollback(self) -> None: + """Roll back the transaction that is currently in progress. + + This method rolls back the current transaction if one has been started. + If no transaction was started, the method has no effect. If a + transaction was started and the connection is in an invalidated state, + the transaction is cleared using this method. + + A transaction is begun on a :class:`_engine.Connection` automatically + whenever a statement is first executed, or when the + :meth:`_engine.Connection.begin` method is called. + + .. note:: The :meth:`_engine.Connection.rollback` method only acts + upon the primary database transaction that is linked to the + :class:`_engine.Connection` object. It does not operate upon a + SAVEPOINT that would have been invoked from the + :meth:`_engine.Connection.begin_nested` method; for control of a + SAVEPOINT, call :meth:`_engine.NestedTransaction.rollback` on the + :class:`_engine.NestedTransaction` that is returned by the + :meth:`_engine.Connection.begin_nested` method itself. + + + """ + if self._transaction: + self._transaction.rollback() + + def recover_twophase(self) -> List[Any]: + return self.engine.dialect.do_recover_twophase(self) + + def rollback_prepared(self, xid: Any, recover: bool = False) -> None: + self.engine.dialect.do_rollback_twophase(self, xid, recover=recover) + + def commit_prepared(self, xid: Any, recover: bool = False) -> None: + self.engine.dialect.do_commit_twophase(self, xid, recover=recover) + + def in_transaction(self) -> bool: + """Return True if a transaction is in progress.""" + return self._transaction is not None and self._transaction.is_active + + def in_nested_transaction(self) -> bool: + """Return True if a transaction is in progress.""" + return ( + self._nested_transaction is not None + and self._nested_transaction.is_active + ) + + def _is_autocommit_isolation(self) -> bool: + opt_iso = self._execution_options.get("isolation_level", None) + return bool( + opt_iso == "AUTOCOMMIT" + or ( + opt_iso is None + and self.engine.dialect._on_connect_isolation_level + == "AUTOCOMMIT" + ) + ) + + def _get_required_transaction(self) -> RootTransaction: + trans = self._transaction + if trans is None: + raise exc.InvalidRequestError("connection is not in a transaction") + return trans + + def _get_required_nested_transaction(self) -> NestedTransaction: + trans = self._nested_transaction + if trans is None: + raise exc.InvalidRequestError( + "connection is not in a nested transaction" + ) + return trans + + def get_transaction(self) -> Optional[RootTransaction]: + """Return the current root transaction in progress, if any. + + .. versionadded:: 1.4 + + """ + + return self._transaction + + def get_nested_transaction(self) -> Optional[NestedTransaction]: + """Return the current nested transaction in progress, if any. + + .. versionadded:: 1.4 + + """ + return self._nested_transaction + + def _begin_impl(self, transaction: RootTransaction) -> None: + if self._echo: + if self._is_autocommit_isolation(): + self._log_info( + "BEGIN (implicit; DBAPI should not BEGIN due to " + "autocommit mode)" + ) + else: + self._log_info("BEGIN (implicit)") + + self.__in_begin = True + + if self._has_events or self.engine._has_events: + self.dispatch.begin(self) + + try: + self.engine.dialect.do_begin(self.connection) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + finally: + self.__in_begin = False + + def _rollback_impl(self) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.rollback(self) + + if self._still_open_and_dbapi_connection_is_valid: + if self._echo: + if self._is_autocommit_isolation(): + self._log_info( + "ROLLBACK using DBAPI connection.rollback(), " + "DBAPI should ignore due to autocommit mode" + ) + else: + self._log_info("ROLLBACK") + try: + self.engine.dialect.do_rollback(self.connection) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def _commit_impl(self) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.commit(self) + + if self._echo: + if self._is_autocommit_isolation(): + self._log_info( + "COMMIT using DBAPI connection.commit(), " + "DBAPI should ignore due to autocommit mode" + ) + else: + self._log_info("COMMIT") + try: + self.engine.dialect.do_commit(self.connection) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def _savepoint_impl(self, name: Optional[str] = None) -> str: + if self._has_events or self.engine._has_events: + self.dispatch.savepoint(self, name) + + if name is None: + self.__savepoint_seq += 1 + name = "sa_savepoint_%s" % self.__savepoint_seq + self.engine.dialect.do_savepoint(self, name) + return name + + def _rollback_to_savepoint_impl(self, name: str) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.rollback_savepoint(self, name, None) + + if self._still_open_and_dbapi_connection_is_valid: + self.engine.dialect.do_rollback_to_savepoint(self, name) + + def _release_savepoint_impl(self, name: str) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.release_savepoint(self, name, None) + + self.engine.dialect.do_release_savepoint(self, name) + + def _begin_twophase_impl(self, transaction: TwoPhaseTransaction) -> None: + if self._echo: + self._log_info("BEGIN TWOPHASE (implicit)") + if self._has_events or self.engine._has_events: + self.dispatch.begin_twophase(self, transaction.xid) + + self.__in_begin = True + try: + self.engine.dialect.do_begin_twophase(self, transaction.xid) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + finally: + self.__in_begin = False + + def _prepare_twophase_impl(self, xid: Any) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.prepare_twophase(self, xid) + + assert isinstance(self._transaction, TwoPhaseTransaction) + try: + self.engine.dialect.do_prepare_twophase(self, xid) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def _rollback_twophase_impl(self, xid: Any, is_prepared: bool) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.rollback_twophase(self, xid, is_prepared) + + if self._still_open_and_dbapi_connection_is_valid: + assert isinstance(self._transaction, TwoPhaseTransaction) + try: + self.engine.dialect.do_rollback_twophase( + self, xid, is_prepared + ) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def _commit_twophase_impl(self, xid: Any, is_prepared: bool) -> None: + if self._has_events or self.engine._has_events: + self.dispatch.commit_twophase(self, xid, is_prepared) + + assert isinstance(self._transaction, TwoPhaseTransaction) + try: + self.engine.dialect.do_commit_twophase(self, xid, is_prepared) + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def close(self) -> None: + """Close this :class:`_engine.Connection`. + + This results in a release of the underlying database + resources, that is, the DBAPI connection referenced + internally. The DBAPI connection is typically restored + back to the connection-holding :class:`_pool.Pool` referenced + by the :class:`_engine.Engine` that produced this + :class:`_engine.Connection`. Any transactional state present on + the DBAPI connection is also unconditionally released via + the DBAPI connection's ``rollback()`` method, regardless + of any :class:`.Transaction` object that may be + outstanding with regards to this :class:`_engine.Connection`. + + This has the effect of also calling :meth:`_engine.Connection.rollback` + if any transaction is in place. + + After :meth:`_engine.Connection.close` is called, the + :class:`_engine.Connection` is permanently in a closed state, + and will allow no further operations. + + """ + + if self._transaction: + self._transaction.close() + skip_reset = True + else: + skip_reset = False + + if self._dbapi_connection is not None: + conn = self._dbapi_connection + + # as we just closed the transaction, close the connection + # pool connection without doing an additional reset + if skip_reset: + cast("_ConnectionFairy", conn)._close_special( + transaction_reset=True + ) + else: + conn.close() + + # There is a slight chance that conn.close() may have + # triggered an invalidation here in which case + # _dbapi_connection would already be None, however usually + # it will be non-None here and in a "closed" state. + self._dbapi_connection = None + self.__can_reconnect = False + + @overload + def scalar( + self, + statement: TypedReturnsRows[Tuple[_T]], + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Optional[_T]: ... + + @overload + def scalar( + self, + statement: Executable, + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Any: ... + + def scalar( + self, + statement: Executable, + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Any: + r"""Executes a SQL statement construct and returns a scalar object. + + This method is shorthand for invoking the + :meth:`_engine.Result.scalar` method after invoking the + :meth:`_engine.Connection.execute` method. Parameters are equivalent. + + :return: a scalar Python value representing the first column of the + first row returned. + + """ + distilled_parameters = _distill_params_20(parameters) + try: + meth = statement._execute_on_scalar + except AttributeError as err: + raise exc.ObjectNotExecutableError(statement) from err + else: + return meth( + self, + distilled_parameters, + execution_options or NO_OPTIONS, + ) + + @overload + def scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> ScalarResult[_T]: ... + + @overload + def scalars( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> ScalarResult[Any]: ... + + def scalars( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> ScalarResult[Any]: + """Executes and returns a scalar result set, which yields scalar values + from the first column of each row. + + This method is equivalent to calling :meth:`_engine.Connection.execute` + to receive a :class:`_result.Result` object, then invoking the + :meth:`_result.Result.scalars` method to produce a + :class:`_result.ScalarResult` instance. + + :return: a :class:`_result.ScalarResult` + + .. versionadded:: 1.4.24 + + """ + + return self.execute( + statement, parameters, execution_options=execution_options + ).scalars() + + @overload + def execute( + self, + statement: TypedReturnsRows[_T], + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[_T]: ... + + @overload + def execute( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[Any]: ... + + def execute( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[Any]: + r"""Executes a SQL statement construct and returns a + :class:`_engine.CursorResult`. + + :param statement: The statement to be executed. This is always + an object that is in both the :class:`_expression.ClauseElement` and + :class:`_expression.Executable` hierarchies, including: + + * :class:`_expression.Select` + * :class:`_expression.Insert`, :class:`_expression.Update`, + :class:`_expression.Delete` + * :class:`_expression.TextClause` and + :class:`_expression.TextualSelect` + * :class:`_schema.DDL` and objects which inherit from + :class:`_schema.ExecutableDDLElement` + + :param parameters: parameters which will be bound into the statement. + This may be either a dictionary of parameter names to values, + or a mutable sequence (e.g. a list) of dictionaries. When a + list of dictionaries is passed, the underlying statement execution + will make use of the DBAPI ``cursor.executemany()`` method. + When a single dictionary is passed, the DBAPI ``cursor.execute()`` + method will be used. + + :param execution_options: optional dictionary of execution options, + which will be associated with the statement execution. This + dictionary can provide a subset of the options that are accepted + by :meth:`_engine.Connection.execution_options`. + + :return: a :class:`_engine.Result` object. + + """ + distilled_parameters = _distill_params_20(parameters) + try: + meth = statement._execute_on_connection + except AttributeError as err: + raise exc.ObjectNotExecutableError(statement) from err + else: + return meth( + self, + distilled_parameters, + execution_options or NO_OPTIONS, + ) + + def _execute_function( + self, + func: FunctionElement[Any], + distilled_parameters: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> CursorResult[Any]: + """Execute a sql.FunctionElement object.""" + + return self._execute_clauseelement( + func.select(), distilled_parameters, execution_options + ) + + def _execute_default( + self, + default: DefaultGenerator, + distilled_parameters: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Any: + """Execute a schema.ColumnDefault object.""" + + execution_options = self._execution_options.merge_with( + execution_options + ) + + event_multiparams: Optional[_CoreMultiExecuteParams] + event_params: Optional[_CoreAnyExecuteParams] + + # note for event handlers, the "distilled parameters" which is always + # a list of dicts is broken out into separate "multiparams" and + # "params" collections, which allows the handler to distinguish + # between an executemany and execute style set of parameters. + if self._has_events or self.engine._has_events: + ( + default, + distilled_parameters, + event_multiparams, + event_params, + ) = self._invoke_before_exec_event( + default, distilled_parameters, execution_options + ) + else: + event_multiparams = event_params = None + + try: + conn = self._dbapi_connection + if conn is None: + conn = self._revalidate_connection() + + dialect = self.dialect + ctx = dialect.execution_ctx_cls._init_default( + dialect, self, conn, execution_options + ) + except (exc.PendingRollbackError, exc.ResourceClosedError): + raise + except BaseException as e: + self._handle_dbapi_exception(e, None, None, None, None) + + ret = ctx._exec_default(None, default, None) + + if self._has_events or self.engine._has_events: + self.dispatch.after_execute( + self, + default, + event_multiparams, + event_params, + execution_options, + ret, + ) + + return ret + + def _execute_ddl( + self, + ddl: ExecutableDDLElement, + distilled_parameters: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> CursorResult[Any]: + """Execute a schema.DDL object.""" + + exec_opts = ddl._execution_options.merge_with( + self._execution_options, execution_options + ) + + event_multiparams: Optional[_CoreMultiExecuteParams] + event_params: Optional[_CoreSingleExecuteParams] + + if self._has_events or self.engine._has_events: + ( + ddl, + distilled_parameters, + event_multiparams, + event_params, + ) = self._invoke_before_exec_event( + ddl, distilled_parameters, exec_opts + ) + else: + event_multiparams = event_params = None + + schema_translate_map = exec_opts.get("schema_translate_map", None) + + dialect = self.dialect + + compiled = ddl.compile( + dialect=dialect, schema_translate_map=schema_translate_map + ) + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_ddl, + compiled, + None, + exec_opts, + compiled, + ) + if self._has_events or self.engine._has_events: + self.dispatch.after_execute( + self, + ddl, + event_multiparams, + event_params, + exec_opts, + ret, + ) + return ret + + def _invoke_before_exec_event( + self, + elem: Any, + distilled_params: _CoreMultiExecuteParams, + execution_options: _ExecuteOptions, + ) -> Tuple[ + Any, + _CoreMultiExecuteParams, + _CoreMultiExecuteParams, + _CoreSingleExecuteParams, + ]: + event_multiparams: _CoreMultiExecuteParams + event_params: _CoreSingleExecuteParams + + if len(distilled_params) == 1: + event_multiparams, event_params = [], distilled_params[0] + else: + event_multiparams, event_params = distilled_params, {} + + for fn in self.dispatch.before_execute: + elem, event_multiparams, event_params = fn( + self, + elem, + event_multiparams, + event_params, + execution_options, + ) + + if event_multiparams: + distilled_params = list(event_multiparams) + if event_params: + raise exc.InvalidRequestError( + "Event handler can't return non-empty multiparams " + "and params at the same time" + ) + elif event_params: + distilled_params = [event_params] + else: + distilled_params = [] + + return elem, distilled_params, event_multiparams, event_params + + def _execute_clauseelement( + self, + elem: Executable, + distilled_parameters: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> CursorResult[Any]: + """Execute a sql.ClauseElement object.""" + + execution_options = elem._execution_options.merge_with( + self._execution_options, execution_options + ) + + has_events = self._has_events or self.engine._has_events + if has_events: + ( + elem, + distilled_parameters, + event_multiparams, + event_params, + ) = self._invoke_before_exec_event( + elem, distilled_parameters, execution_options + ) + + if distilled_parameters: + # ensure we don't retain a link to the view object for keys() + # which links to the values, which we don't want to cache + keys = sorted(distilled_parameters[0]) + for_executemany = len(distilled_parameters) > 1 + else: + keys = [] + for_executemany = False + + dialect = self.dialect + + schema_translate_map = execution_options.get( + "schema_translate_map", None + ) + + compiled_cache: Optional[CompiledCacheType] = execution_options.get( + "compiled_cache", self.engine._compiled_cache + ) + + compiled_sql, extracted_params, cache_hit = elem._compile_w_cache( + dialect=dialect, + compiled_cache=compiled_cache, + column_keys=keys, + for_executemany=for_executemany, + schema_translate_map=schema_translate_map, + linting=self.dialect.compiler_linting | compiler.WARN_LINTING, + ) + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_compiled, + compiled_sql, + distilled_parameters, + execution_options, + compiled_sql, + distilled_parameters, + elem, + extracted_params, + cache_hit=cache_hit, + ) + if has_events: + self.dispatch.after_execute( + self, + elem, + event_multiparams, + event_params, + execution_options, + ret, + ) + return ret + + def _execute_compiled( + self, + compiled: Compiled, + distilled_parameters: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter = _EMPTY_EXECUTION_OPTS, + ) -> CursorResult[Any]: + """Execute a sql.Compiled object. + + TODO: why do we have this? likely deprecate or remove + + """ + + execution_options = compiled.execution_options.merge_with( + self._execution_options, execution_options + ) + + if self._has_events or self.engine._has_events: + ( + compiled, + distilled_parameters, + event_multiparams, + event_params, + ) = self._invoke_before_exec_event( + compiled, distilled_parameters, execution_options + ) + + dialect = self.dialect + + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_compiled, + compiled, + distilled_parameters, + execution_options, + compiled, + distilled_parameters, + None, + None, + ) + if self._has_events or self.engine._has_events: + self.dispatch.after_execute( + self, + compiled, + event_multiparams, + event_params, + execution_options, + ret, + ) + return ret + + def exec_driver_sql( + self, + statement: str, + parameters: Optional[_DBAPIAnyExecuteParams] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[Any]: + r"""Executes a string SQL statement on the DBAPI cursor directly, + without any SQL compilation steps. + + This can be used to pass any string directly to the + ``cursor.execute()`` method of the DBAPI in use. + + :param statement: The statement str to be executed. Bound parameters + must use the underlying DBAPI's paramstyle, such as "qmark", + "pyformat", "format", etc. + + :param parameters: represent bound parameter values to be used in the + execution. The format is one of: a dictionary of named parameters, + a tuple of positional parameters, or a list containing either + dictionaries or tuples for multiple-execute support. + + :return: a :class:`_engine.CursorResult`. + + E.g. multiple dictionaries:: + + + conn.exec_driver_sql( + "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)", + [{"id":1, "value":"v1"}, {"id":2, "value":"v2"}] + ) + + Single dictionary:: + + conn.exec_driver_sql( + "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)", + dict(id=1, value="v1") + ) + + Single tuple:: + + conn.exec_driver_sql( + "INSERT INTO table (id, value) VALUES (?, ?)", + (1, 'v1') + ) + + .. note:: The :meth:`_engine.Connection.exec_driver_sql` method does + not participate in the + :meth:`_events.ConnectionEvents.before_execute` and + :meth:`_events.ConnectionEvents.after_execute` events. To + intercept calls to :meth:`_engine.Connection.exec_driver_sql`, use + :meth:`_events.ConnectionEvents.before_cursor_execute` and + :meth:`_events.ConnectionEvents.after_cursor_execute`. + + .. seealso:: + + :pep:`249` + + """ + + distilled_parameters = _distill_raw_params(parameters) + + execution_options = self._execution_options.merge_with( + execution_options + ) + + dialect = self.dialect + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_statement, + statement, + None, + execution_options, + statement, + distilled_parameters, + ) + + return ret + + def _execute_context( + self, + dialect: Dialect, + constructor: Callable[..., ExecutionContext], + statement: Union[str, Compiled], + parameters: Optional[_AnyMultiExecuteParams], + execution_options: _ExecuteOptions, + *args: Any, + **kw: Any, + ) -> CursorResult[Any]: + """Create an :class:`.ExecutionContext` and execute, returning + a :class:`_engine.CursorResult`.""" + + if execution_options: + yp = execution_options.get("yield_per", None) + if yp: + execution_options = execution_options.union( + {"stream_results": True, "max_row_buffer": yp} + ) + try: + conn = self._dbapi_connection + if conn is None: + conn = self._revalidate_connection() + + context = constructor( + dialect, self, conn, execution_options, *args, **kw + ) + except (exc.PendingRollbackError, exc.ResourceClosedError): + raise + except BaseException as e: + self._handle_dbapi_exception( + e, str(statement), parameters, None, None + ) + + if ( + self._transaction + and not self._transaction.is_active + or ( + self._nested_transaction + and not self._nested_transaction.is_active + ) + ): + self._invalid_transaction() + + elif self._trans_context_manager: + TransactionalContext._trans_ctx_check(self) + + if self._transaction is None: + self._autobegin() + + context.pre_exec() + + if context.execute_style is ExecuteStyle.INSERTMANYVALUES: + return self._exec_insertmany_context(dialect, context) + else: + return self._exec_single_context( + dialect, context, statement, parameters + ) + + def _exec_single_context( + self, + dialect: Dialect, + context: ExecutionContext, + statement: Union[str, Compiled], + parameters: Optional[_AnyMultiExecuteParams], + ) -> CursorResult[Any]: + """continue the _execute_context() method for a single DBAPI + cursor.execute() or cursor.executemany() call. + + """ + if dialect.bind_typing is BindTyping.SETINPUTSIZES: + generic_setinputsizes = context._prepare_set_input_sizes() + + if generic_setinputsizes: + try: + dialect.do_set_input_sizes( + context.cursor, generic_setinputsizes, context + ) + except BaseException as e: + self._handle_dbapi_exception( + e, str(statement), parameters, None, context + ) + + cursor, str_statement, parameters = ( + context.cursor, + context.statement, + context.parameters, + ) + + effective_parameters: Optional[_AnyExecuteParams] + + if not context.executemany: + effective_parameters = parameters[0] + else: + effective_parameters = parameters + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_cursor_execute: + str_statement, effective_parameters = fn( + self, + cursor, + str_statement, + effective_parameters, + context, + context.executemany, + ) + + if self._echo: + self._log_info(str_statement) + + stats = context._get_cache_stats() + + if not self.engine.hide_parameters: + self._log_info( + "[%s] %r", + stats, + sql_util._repr_params( + effective_parameters, + batches=10, + ismulti=context.executemany, + ), + ) + else: + self._log_info( + "[%s] [SQL parameters hidden due to hide_parameters=True]", + stats, + ) + + evt_handled: bool = False + try: + if context.execute_style is ExecuteStyle.EXECUTEMANY: + effective_parameters = cast( + "_CoreMultiExecuteParams", effective_parameters + ) + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_executemany: + if fn( + cursor, + str_statement, + effective_parameters, + context, + ): + evt_handled = True + break + if not evt_handled: + self.dialect.do_executemany( + cursor, + str_statement, + effective_parameters, + context, + ) + elif not effective_parameters and context.no_parameters: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute_no_params: + if fn(cursor, str_statement, context): + evt_handled = True + break + if not evt_handled: + self.dialect.do_execute_no_params( + cursor, str_statement, context + ) + else: + effective_parameters = cast( + "_CoreSingleExecuteParams", effective_parameters + ) + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute: + if fn( + cursor, + str_statement, + effective_parameters, + context, + ): + evt_handled = True + break + if not evt_handled: + self.dialect.do_execute( + cursor, str_statement, effective_parameters, context + ) + + if self._has_events or self.engine._has_events: + self.dispatch.after_cursor_execute( + self, + cursor, + str_statement, + effective_parameters, + context, + context.executemany, + ) + + context.post_exec() + + result = context._setup_result_proxy() + + except BaseException as e: + self._handle_dbapi_exception( + e, str_statement, effective_parameters, cursor, context + ) + + return result + + def _exec_insertmany_context( + self, + dialect: Dialect, + context: ExecutionContext, + ) -> CursorResult[Any]: + """continue the _execute_context() method for an "insertmanyvalues" + operation, which will invoke DBAPI + cursor.execute() one or more times with individual log and + event hook calls. + + """ + + if dialect.bind_typing is BindTyping.SETINPUTSIZES: + generic_setinputsizes = context._prepare_set_input_sizes() + else: + generic_setinputsizes = None + + cursor, str_statement, parameters = ( + context.cursor, + context.statement, + context.parameters, + ) + + effective_parameters = parameters + + engine_events = self._has_events or self.engine._has_events + if self.dialect._has_events: + do_execute_dispatch: Iterable[Any] = ( + self.dialect.dispatch.do_execute + ) + else: + do_execute_dispatch = () + + if self._echo: + stats = context._get_cache_stats() + " (insertmanyvalues)" + + preserve_rowcount = context.execution_options.get( + "preserve_rowcount", False + ) + rowcount = 0 + + for imv_batch in dialect._deliver_insertmanyvalues_batches( + self, + cursor, + str_statement, + effective_parameters, + generic_setinputsizes, + context, + ): + if imv_batch.processed_setinputsizes: + try: + dialect.do_set_input_sizes( + context.cursor, + imv_batch.processed_setinputsizes, + context, + ) + except BaseException as e: + self._handle_dbapi_exception( + e, + sql_util._long_statement(imv_batch.replaced_statement), + imv_batch.replaced_parameters, + None, + context, + is_sub_exec=True, + ) + + sub_stmt = imv_batch.replaced_statement + sub_params = imv_batch.replaced_parameters + + if engine_events: + for fn in self.dispatch.before_cursor_execute: + sub_stmt, sub_params = fn( + self, + cursor, + sub_stmt, + sub_params, + context, + True, + ) + + if self._echo: + self._log_info(sql_util._long_statement(sub_stmt)) + + imv_stats = f""" {imv_batch.batchnum}/{ + imv_batch.total_batches + } ({ + 'ordered' + if imv_batch.rows_sorted else 'unordered' + }{ + '; batch not supported' + if imv_batch.is_downgraded + else '' + })""" + + if imv_batch.batchnum == 1: + stats += imv_stats + else: + stats = f"insertmanyvalues{imv_stats}" + + if not self.engine.hide_parameters: + self._log_info( + "[%s] %r", + stats, + sql_util._repr_params( + sub_params, + batches=10, + ismulti=False, + ), + ) + else: + self._log_info( + "[%s] [SQL parameters hidden due to " + "hide_parameters=True]", + stats, + ) + + try: + for fn in do_execute_dispatch: + if fn( + cursor, + sub_stmt, + sub_params, + context, + ): + break + else: + dialect.do_execute( + cursor, + sub_stmt, + sub_params, + context, + ) + + except BaseException as e: + self._handle_dbapi_exception( + e, + sql_util._long_statement(sub_stmt), + sub_params, + cursor, + context, + is_sub_exec=True, + ) + + if engine_events: + self.dispatch.after_cursor_execute( + self, + cursor, + str_statement, + effective_parameters, + context, + context.executemany, + ) + + if preserve_rowcount: + rowcount += imv_batch.current_batch_size + + try: + context.post_exec() + + if preserve_rowcount: + context._rowcount = rowcount # type: ignore[attr-defined] + + result = context._setup_result_proxy() + + except BaseException as e: + self._handle_dbapi_exception( + e, str_statement, effective_parameters, cursor, context + ) + + return result + + def _cursor_execute( + self, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPISingleExecuteParams, + context: Optional[ExecutionContext] = None, + ) -> None: + """Execute a statement + params on the given cursor. + + Adds appropriate logging and exception handling. + + This method is used by DefaultDialect for special-case + executions, such as for sequences and column defaults. + The path of statement execution in the majority of cases + terminates at _execute_context(). + + """ + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_cursor_execute: + statement, parameters = fn( + self, cursor, statement, parameters, context, False + ) + + if self._echo: + self._log_info(statement) + self._log_info("[raw sql] %r", parameters) + try: + for fn in ( + () + if not self.dialect._has_events + else self.dialect.dispatch.do_execute + ): + if fn(cursor, statement, parameters, context): + break + else: + self.dialect.do_execute(cursor, statement, parameters, context) + except BaseException as e: + self._handle_dbapi_exception( + e, statement, parameters, cursor, context + ) + + if self._has_events or self.engine._has_events: + self.dispatch.after_cursor_execute( + self, cursor, statement, parameters, context, False + ) + + def _safe_close_cursor(self, cursor: DBAPICursor) -> None: + """Close the given cursor, catching exceptions + and turning into log warnings. + + """ + try: + cursor.close() + except Exception: + # log the error through the connection pool's logger. + self.engine.pool.logger.error( + "Error closing cursor", exc_info=True + ) + + _reentrant_error = False + _is_disconnect = False + + def _handle_dbapi_exception( + self, + e: BaseException, + statement: Optional[str], + parameters: Optional[_AnyExecuteParams], + cursor: Optional[DBAPICursor], + context: Optional[ExecutionContext], + is_sub_exec: bool = False, + ) -> NoReturn: + exc_info = sys.exc_info() + + is_exit_exception = util.is_exit_exception(e) + + if not self._is_disconnect: + self._is_disconnect = ( + isinstance(e, self.dialect.loaded_dbapi.Error) + and not self.closed + and self.dialect.is_disconnect( + e, + self._dbapi_connection if not self.invalidated else None, + cursor, + ) + ) or (is_exit_exception and not self.closed) + + invalidate_pool_on_disconnect = not is_exit_exception + + ismulti: bool = ( + not is_sub_exec and context.executemany + if context is not None + else False + ) + if self._reentrant_error: + raise exc.DBAPIError.instance( + statement, + parameters, + e, + self.dialect.loaded_dbapi.Error, + hide_parameters=self.engine.hide_parameters, + dialect=self.dialect, + ismulti=ismulti, + ).with_traceback(exc_info[2]) from e + self._reentrant_error = True + try: + # non-DBAPI error - if we already got a context, + # or there's no string statement, don't wrap it + should_wrap = isinstance(e, self.dialect.loaded_dbapi.Error) or ( + statement is not None + and context is None + and not is_exit_exception + ) + + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + statement, + parameters, + cast(Exception, e), + self.dialect.loaded_dbapi.Error, + hide_parameters=self.engine.hide_parameters, + connection_invalidated=self._is_disconnect, + dialect=self.dialect, + ismulti=ismulti, + ) + else: + sqlalchemy_exception = None + + newraise = None + + if (self.dialect._has_events) and not self._execution_options.get( + "skip_user_error_events", False + ): + ctx = ExceptionContextImpl( + e, + sqlalchemy_exception, + self.engine, + self.dialect, + self, + cursor, + statement, + parameters, + context, + self._is_disconnect, + invalidate_pool_on_disconnect, + False, + ) + + for fn in self.dialect.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if self._is_disconnect != ctx.is_disconnect: + self._is_disconnect = ctx.is_disconnect + if sqlalchemy_exception: + sqlalchemy_exception.connection_invalidated = ( + ctx.is_disconnect + ) + + # set up potentially user-defined value for + # invalidate pool. + invalidate_pool_on_disconnect = ( + ctx.invalidate_pool_on_disconnect + ) + + if should_wrap and context: + context.handle_dbapi_exception(e) + + if not self._is_disconnect: + if cursor: + self._safe_close_cursor(cursor) + # "autorollback" was mostly relevant in 1.x series. + # It's very unlikely to reach here, as the connection + # does autobegin so when we are here, we are usually + # in an explicit / semi-explicit transaction. + # however we have a test which manufactures this + # scenario in any case using an event handler. + # test/engine/test_execute.py-> test_actual_autorollback + if not self.in_transaction(): + self._rollback_impl() + + if newraise: + raise newraise.with_traceback(exc_info[2]) from e + elif should_wrap: + assert sqlalchemy_exception is not None + raise sqlalchemy_exception.with_traceback(exc_info[2]) from e + else: + assert exc_info[1] is not None + raise exc_info[1].with_traceback(exc_info[2]) + finally: + del self._reentrant_error + if self._is_disconnect: + del self._is_disconnect + if not self.invalidated: + dbapi_conn_wrapper = self._dbapi_connection + assert dbapi_conn_wrapper is not None + if invalidate_pool_on_disconnect: + self.engine.pool._invalidate(dbapi_conn_wrapper, e) + self.invalidate(e) + + @classmethod + def _handle_dbapi_exception_noconnection( + cls, + e: BaseException, + dialect: Dialect, + engine: Optional[Engine] = None, + is_disconnect: Optional[bool] = None, + invalidate_pool_on_disconnect: bool = True, + is_pre_ping: bool = False, + ) -> NoReturn: + exc_info = sys.exc_info() + + if is_disconnect is None: + is_disconnect = isinstance( + e, dialect.loaded_dbapi.Error + ) and dialect.is_disconnect(e, None, None) + + should_wrap = isinstance(e, dialect.loaded_dbapi.Error) + + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + None, + None, + cast(Exception, e), + dialect.loaded_dbapi.Error, + hide_parameters=( + engine.hide_parameters if engine is not None else False + ), + connection_invalidated=is_disconnect, + dialect=dialect, + ) + else: + sqlalchemy_exception = None + + newraise = None + + if dialect._has_events: + ctx = ExceptionContextImpl( + e, + sqlalchemy_exception, + engine, + dialect, + None, + None, + None, + None, + None, + is_disconnect, + invalidate_pool_on_disconnect, + is_pre_ping, + ) + for fn in dialect.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if sqlalchemy_exception and is_disconnect != ctx.is_disconnect: + sqlalchemy_exception.connection_invalidated = is_disconnect = ( + ctx.is_disconnect + ) + + if newraise: + raise newraise.with_traceback(exc_info[2]) from e + elif should_wrap: + assert sqlalchemy_exception is not None + raise sqlalchemy_exception.with_traceback(exc_info[2]) from e + else: + assert exc_info[1] is not None + raise exc_info[1].with_traceback(exc_info[2]) + + def _run_ddl_visitor( + self, + visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]], + element: SchemaItem, + **kwargs: Any, + ) -> None: + """run a DDL visitor. + + This method is only here so that the MockConnection can change the + options given to the visitor so that "checkfirst" is skipped. + + """ + visitorcallable(self.dialect, self, **kwargs).traverse_single(element) + + +class ExceptionContextImpl(ExceptionContext): + """Implement the :class:`.ExceptionContext` interface.""" + + __slots__ = ( + "connection", + "engine", + "dialect", + "cursor", + "statement", + "parameters", + "original_exception", + "sqlalchemy_exception", + "chained_exception", + "execution_context", + "is_disconnect", + "invalidate_pool_on_disconnect", + "is_pre_ping", + ) + + def __init__( + self, + exception: BaseException, + sqlalchemy_exception: Optional[exc.StatementError], + engine: Optional[Engine], + dialect: Dialect, + connection: Optional[Connection], + cursor: Optional[DBAPICursor], + statement: Optional[str], + parameters: Optional[_DBAPIAnyExecuteParams], + context: Optional[ExecutionContext], + is_disconnect: bool, + invalidate_pool_on_disconnect: bool, + is_pre_ping: bool, + ): + self.engine = engine + self.dialect = dialect + self.connection = connection + self.sqlalchemy_exception = sqlalchemy_exception + self.original_exception = exception + self.execution_context = context + self.statement = statement + self.parameters = parameters + self.is_disconnect = is_disconnect + self.invalidate_pool_on_disconnect = invalidate_pool_on_disconnect + self.is_pre_ping = is_pre_ping + + +class Transaction(TransactionalContext): + """Represent a database transaction in progress. + + The :class:`.Transaction` object is procured by + calling the :meth:`_engine.Connection.begin` method of + :class:`_engine.Connection`:: + + from sqlalchemy import create_engine + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") + connection = engine.connect() + trans = connection.begin() + connection.execute(text("insert into x (a, b) values (1, 2)")) + trans.commit() + + The object provides :meth:`.rollback` and :meth:`.commit` + methods in order to control transaction boundaries. It + also implements a context manager interface so that + the Python ``with`` statement can be used with the + :meth:`_engine.Connection.begin` method:: + + with connection.begin(): + connection.execute(text("insert into x (a, b) values (1, 2)")) + + The Transaction object is **not** threadsafe. + + .. seealso:: + + :meth:`_engine.Connection.begin` + + :meth:`_engine.Connection.begin_twophase` + + :meth:`_engine.Connection.begin_nested` + + .. index:: + single: thread safety; Transaction + """ # noqa + + __slots__ = () + + _is_root: bool = False + is_active: bool + connection: Connection + + def __init__(self, connection: Connection): + raise NotImplementedError() + + @property + def _deactivated_from_connection(self) -> bool: + """True if this transaction is totally deactivated from the connection + and therefore can no longer affect its state. + + """ + raise NotImplementedError() + + def _do_close(self) -> None: + raise NotImplementedError() + + def _do_rollback(self) -> None: + raise NotImplementedError() + + def _do_commit(self) -> None: + raise NotImplementedError() + + @property + def is_valid(self) -> bool: + return self.is_active and not self.connection.invalidated + + def close(self) -> None: + """Close this :class:`.Transaction`. + + If this transaction is the base transaction in a begin/commit + nesting, the transaction will rollback(). Otherwise, the + method returns. + + This is used to cancel a Transaction without affecting the scope of + an enclosing transaction. + + """ + try: + self._do_close() + finally: + assert not self.is_active + + def rollback(self) -> None: + """Roll back this :class:`.Transaction`. + + The implementation of this may vary based on the type of transaction in + use: + + * For a simple database transaction (e.g. :class:`.RootTransaction`), + it corresponds to a ROLLBACK. + + * For a :class:`.NestedTransaction`, it corresponds to a + "ROLLBACK TO SAVEPOINT" operation. + + * For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two + phase transactions may be used. + + + """ + try: + self._do_rollback() + finally: + assert not self.is_active + + def commit(self) -> None: + """Commit this :class:`.Transaction`. + + The implementation of this may vary based on the type of transaction in + use: + + * For a simple database transaction (e.g. :class:`.RootTransaction`), + it corresponds to a COMMIT. + + * For a :class:`.NestedTransaction`, it corresponds to a + "RELEASE SAVEPOINT" operation. + + * For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two + phase transactions may be used. + + """ + try: + self._do_commit() + finally: + assert not self.is_active + + def _get_subject(self) -> Connection: + return self.connection + + def _transaction_is_active(self) -> bool: + return self.is_active + + def _transaction_is_closed(self) -> bool: + return not self._deactivated_from_connection + + def _rollback_can_be_called(self) -> bool: + # for RootTransaction / NestedTransaction, it's safe to call + # rollback() even if the transaction is deactive and no warnings + # will be emitted. tested in + # test_transaction.py -> test_no_rollback_in_deactive(?:_savepoint)? + return True + + +class RootTransaction(Transaction): + """Represent the "root" transaction on a :class:`_engine.Connection`. + + This corresponds to the current "BEGIN/COMMIT/ROLLBACK" that's occurring + for the :class:`_engine.Connection`. The :class:`_engine.RootTransaction` + is created by calling upon the :meth:`_engine.Connection.begin` method, and + remains associated with the :class:`_engine.Connection` throughout its + active span. The current :class:`_engine.RootTransaction` in use is + accessible via the :attr:`_engine.Connection.get_transaction` method of + :class:`_engine.Connection`. + + In :term:`2.0 style` use, the :class:`_engine.Connection` also employs + "autobegin" behavior that will create a new + :class:`_engine.RootTransaction` whenever a connection in a + non-transactional state is used to emit commands on the DBAPI connection. + The scope of the :class:`_engine.RootTransaction` in 2.0 style + use can be controlled using the :meth:`_engine.Connection.commit` and + :meth:`_engine.Connection.rollback` methods. + + + """ + + _is_root = True + + __slots__ = ("connection", "is_active") + + def __init__(self, connection: Connection): + assert connection._transaction is None + if connection._trans_context_manager: + TransactionalContext._trans_ctx_check(connection) + self.connection = connection + self._connection_begin_impl() + connection._transaction = self + + self.is_active = True + + def _deactivate_from_connection(self) -> None: + if self.is_active: + assert self.connection._transaction is self + self.is_active = False + + elif self.connection._transaction is not self: + util.warn("transaction already deassociated from connection") + + @property + def _deactivated_from_connection(self) -> bool: + return self.connection._transaction is not self + + def _connection_begin_impl(self) -> None: + self.connection._begin_impl(self) + + def _connection_rollback_impl(self) -> None: + self.connection._rollback_impl() + + def _connection_commit_impl(self) -> None: + self.connection._commit_impl() + + def _close_impl(self, try_deactivate: bool = False) -> None: + try: + if self.is_active: + self._connection_rollback_impl() + + if self.connection._nested_transaction: + self.connection._nested_transaction._cancel() + finally: + if self.is_active or try_deactivate: + self._deactivate_from_connection() + if self.connection._transaction is self: + self.connection._transaction = None + + assert not self.is_active + assert self.connection._transaction is not self + + def _do_close(self) -> None: + self._close_impl() + + def _do_rollback(self) -> None: + self._close_impl(try_deactivate=True) + + def _do_commit(self) -> None: + if self.is_active: + assert self.connection._transaction is self + + try: + self._connection_commit_impl() + finally: + # whether or not commit succeeds, cancel any + # nested transactions, make this transaction "inactive" + # and remove it as a reset agent + if self.connection._nested_transaction: + self.connection._nested_transaction._cancel() + + self._deactivate_from_connection() + + # ...however only remove as the connection's current transaction + # if commit succeeded. otherwise it stays on so that a rollback + # needs to occur. + self.connection._transaction = None + else: + if self.connection._transaction is self: + self.connection._invalid_transaction() + else: + raise exc.InvalidRequestError("This transaction is inactive") + + assert not self.is_active + assert self.connection._transaction is not self + + +class NestedTransaction(Transaction): + """Represent a 'nested', or SAVEPOINT transaction. + + The :class:`.NestedTransaction` object is created by calling the + :meth:`_engine.Connection.begin_nested` method of + :class:`_engine.Connection`. + + When using :class:`.NestedTransaction`, the semantics of "begin" / + "commit" / "rollback" are as follows: + + * the "begin" operation corresponds to the "BEGIN SAVEPOINT" command, where + the savepoint is given an explicit name that is part of the state + of this object. + + * The :meth:`.NestedTransaction.commit` method corresponds to a + "RELEASE SAVEPOINT" operation, using the savepoint identifier associated + with this :class:`.NestedTransaction`. + + * The :meth:`.NestedTransaction.rollback` method corresponds to a + "ROLLBACK TO SAVEPOINT" operation, using the savepoint identifier + associated with this :class:`.NestedTransaction`. + + The rationale for mimicking the semantics of an outer transaction in + terms of savepoints so that code may deal with a "savepoint" transaction + and an "outer" transaction in an agnostic way. + + .. seealso:: + + :ref:`session_begin_nested` - ORM version of the SAVEPOINT API. + + """ + + __slots__ = ("connection", "is_active", "_savepoint", "_previous_nested") + + _savepoint: str + + def __init__(self, connection: Connection): + assert connection._transaction is not None + if connection._trans_context_manager: + TransactionalContext._trans_ctx_check(connection) + self.connection = connection + self._savepoint = self.connection._savepoint_impl() + self.is_active = True + self._previous_nested = connection._nested_transaction + connection._nested_transaction = self + + def _deactivate_from_connection(self, warn: bool = True) -> None: + if self.connection._nested_transaction is self: + self.connection._nested_transaction = self._previous_nested + elif warn: + util.warn( + "nested transaction already deassociated from connection" + ) + + @property + def _deactivated_from_connection(self) -> bool: + return self.connection._nested_transaction is not self + + def _cancel(self) -> None: + # called by RootTransaction when the outer transaction is + # committed, rolled back, or closed to cancel all savepoints + # without any action being taken + self.is_active = False + self._deactivate_from_connection() + if self._previous_nested: + self._previous_nested._cancel() + + def _close_impl( + self, deactivate_from_connection: bool, warn_already_deactive: bool + ) -> None: + try: + if ( + self.is_active + and self.connection._transaction + and self.connection._transaction.is_active + ): + self.connection._rollback_to_savepoint_impl(self._savepoint) + finally: + self.is_active = False + + if deactivate_from_connection: + self._deactivate_from_connection(warn=warn_already_deactive) + + assert not self.is_active + if deactivate_from_connection: + assert self.connection._nested_transaction is not self + + def _do_close(self) -> None: + self._close_impl(True, False) + + def _do_rollback(self) -> None: + self._close_impl(True, True) + + def _do_commit(self) -> None: + if self.is_active: + try: + self.connection._release_savepoint_impl(self._savepoint) + finally: + # nested trans becomes inactive on failed release + # unconditionally. this prevents it from trying to + # emit SQL when it rolls back. + self.is_active = False + + # but only de-associate from connection if it succeeded + self._deactivate_from_connection() + else: + if self.connection._nested_transaction is self: + self.connection._invalid_transaction() + else: + raise exc.InvalidRequestError( + "This nested transaction is inactive" + ) + + +class TwoPhaseTransaction(RootTransaction): + """Represent a two-phase transaction. + + A new :class:`.TwoPhaseTransaction` object may be procured + using the :meth:`_engine.Connection.begin_twophase` method. + + The interface is the same as that of :class:`.Transaction` + with the addition of the :meth:`prepare` method. + + """ + + __slots__ = ("xid", "_is_prepared") + + xid: Any + + def __init__(self, connection: Connection, xid: Any): + self._is_prepared = False + self.xid = xid + super().__init__(connection) + + def prepare(self) -> None: + """Prepare this :class:`.TwoPhaseTransaction`. + + After a PREPARE, the transaction can be committed. + + """ + if not self.is_active: + raise exc.InvalidRequestError("This transaction is inactive") + self.connection._prepare_twophase_impl(self.xid) + self._is_prepared = True + + def _connection_begin_impl(self) -> None: + self.connection._begin_twophase_impl(self) + + def _connection_rollback_impl(self) -> None: + self.connection._rollback_twophase_impl(self.xid, self._is_prepared) + + def _connection_commit_impl(self) -> None: + self.connection._commit_twophase_impl(self.xid, self._is_prepared) + + +class Engine( + ConnectionEventsTarget, log.Identified, inspection.Inspectable["Inspector"] +): + """ + Connects a :class:`~sqlalchemy.pool.Pool` and + :class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a + source of database connectivity and behavior. + + An :class:`_engine.Engine` object is instantiated publicly using the + :func:`~sqlalchemy.create_engine` function. + + .. seealso:: + + :doc:`/core/engines` + + :ref:`connections_toplevel` + + """ + + dispatch: dispatcher[ConnectionEventsTarget] + + _compiled_cache: Optional[CompiledCacheType] + + _execution_options: _ExecuteOptions = _EMPTY_EXECUTION_OPTS + _has_events: bool = False + _connection_cls: Type[Connection] = Connection + _sqla_logger_namespace: str = "sqlalchemy.engine.Engine" + _is_future: bool = False + + _schema_translate_map: Optional[SchemaTranslateMapType] = None + _option_cls: Type[OptionEngine] + + dialect: Dialect + pool: Pool + url: URL + hide_parameters: bool + + def __init__( + self, + pool: Pool, + dialect: Dialect, + url: URL, + logging_name: Optional[str] = None, + echo: Optional[_EchoFlagType] = None, + query_cache_size: int = 500, + execution_options: Optional[Mapping[str, Any]] = None, + hide_parameters: bool = False, + ): + self.pool = pool + self.url = url + self.dialect = dialect + if logging_name: + self.logging_name = logging_name + self.echo = echo + self.hide_parameters = hide_parameters + if query_cache_size != 0: + self._compiled_cache = util.LRUCache( + query_cache_size, size_alert=self._lru_size_alert + ) + else: + self._compiled_cache = None + log.instance_logger(self, echoflag=echo) + if execution_options: + self.update_execution_options(**execution_options) + + def _lru_size_alert(self, cache: util.LRUCache[Any, Any]) -> None: + if self._should_log_info(): + self.logger.info( + "Compiled cache size pruning from %d items to %d. " + "Increase cache size to reduce the frequency of pruning.", + len(cache), + cache.capacity, + ) + + @property + def engine(self) -> Engine: + """Returns this :class:`.Engine`. + + Used for legacy schemes that accept :class:`.Connection` / + :class:`.Engine` objects within the same variable. + + """ + return self + + def clear_compiled_cache(self) -> None: + """Clear the compiled cache associated with the dialect. + + This applies **only** to the built-in cache that is established + via the :paramref:`_engine.create_engine.query_cache_size` parameter. + It will not impact any dictionary caches that were passed via the + :paramref:`.Connection.execution_options.compiled_cache` parameter. + + .. versionadded:: 1.4 + + """ + if self._compiled_cache: + self._compiled_cache.clear() + + def update_execution_options(self, **opt: Any) -> None: + r"""Update the default execution_options dictionary + of this :class:`_engine.Engine`. + + The given keys/values in \**opt are added to the + default execution options that will be used for + all connections. The initial contents of this dictionary + can be sent via the ``execution_options`` parameter + to :func:`_sa.create_engine`. + + .. seealso:: + + :meth:`_engine.Connection.execution_options` + + :meth:`_engine.Engine.execution_options` + + """ + self.dispatch.set_engine_execution_options(self, opt) + self._execution_options = self._execution_options.union(opt) + self.dialect.set_engine_execution_options(self, opt) + + @overload + def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + **opt: Any, + ) -> OptionEngine: ... + + @overload + def execution_options(self, **opt: Any) -> OptionEngine: ... + + def execution_options(self, **opt: Any) -> OptionEngine: + """Return a new :class:`_engine.Engine` that will provide + :class:`_engine.Connection` objects with the given execution options. + + The returned :class:`_engine.Engine` remains related to the original + :class:`_engine.Engine` in that it shares the same connection pool and + other state: + + * The :class:`_pool.Pool` used by the new :class:`_engine.Engine` + is the + same instance. The :meth:`_engine.Engine.dispose` + method will replace + the connection pool instance for the parent engine as well + as this one. + * Event listeners are "cascaded" - meaning, the new + :class:`_engine.Engine` + inherits the events of the parent, and new events can be associated + with the new :class:`_engine.Engine` individually. + * The logging configuration and logging_name is copied from the parent + :class:`_engine.Engine`. + + The intent of the :meth:`_engine.Engine.execution_options` method is + to implement schemes where multiple :class:`_engine.Engine` + objects refer to the same connection pool, but are differentiated + by options that affect some execution-level behavior for each + engine. One such example is breaking into separate "reader" and + "writer" :class:`_engine.Engine` instances, where one + :class:`_engine.Engine` + has a lower :term:`isolation level` setting configured or is even + transaction-disabled using "autocommit". An example of this + configuration is at :ref:`dbapi_autocommit_multiple`. + + Another example is one that + uses a custom option ``shard_id`` which is consumed by an event + to change the current schema on a database connection:: + + from sqlalchemy import event + from sqlalchemy.engine import Engine + + primary_engine = create_engine("mysql+mysqldb://") + shard1 = primary_engine.execution_options(shard_id="shard1") + shard2 = primary_engine.execution_options(shard_id="shard2") + + shards = {"default": "base", "shard_1": "db1", "shard_2": "db2"} + + @event.listens_for(Engine, "before_cursor_execute") + def _switch_shard(conn, cursor, stmt, + params, context, executemany): + shard_id = conn.get_execution_options().get('shard_id', "default") + current_shard = conn.info.get("current_shard", None) + + if current_shard != shard_id: + cursor.execute("use %s" % shards[shard_id]) + conn.info["current_shard"] = shard_id + + The above recipe illustrates two :class:`_engine.Engine` objects that + will each serve as factories for :class:`_engine.Connection` objects + that have pre-established "shard_id" execution options present. A + :meth:`_events.ConnectionEvents.before_cursor_execute` event handler + then interprets this execution option to emit a MySQL ``use`` statement + to switch databases before a statement execution, while at the same + time keeping track of which database we've established using the + :attr:`_engine.Connection.info` dictionary. + + .. seealso:: + + :meth:`_engine.Connection.execution_options` + - update execution options + on a :class:`_engine.Connection` object. + + :meth:`_engine.Engine.update_execution_options` + - update the execution + options for a given :class:`_engine.Engine` in place. + + :meth:`_engine.Engine.get_execution_options` + + + """ # noqa: E501 + return self._option_cls(self, opt) + + def get_execution_options(self) -> _ExecuteOptions: + """Get the non-SQL options which will take effect during execution. + + .. versionadded: 1.3 + + .. seealso:: + + :meth:`_engine.Engine.execution_options` + """ + return self._execution_options + + @property + def name(self) -> str: + """String name of the :class:`~sqlalchemy.engine.interfaces.Dialect` + in use by this :class:`Engine`. + + """ + + return self.dialect.name + + @property + def driver(self) -> str: + """Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect` + in use by this :class:`Engine`. + + """ + + return self.dialect.driver + + echo = log.echo_property() + + def __repr__(self) -> str: + return "Engine(%r)" % (self.url,) + + def dispose(self, close: bool = True) -> None: + """Dispose of the connection pool used by this + :class:`_engine.Engine`. + + A new connection pool is created immediately after the old one has been + disposed. The previous connection pool is disposed either actively, by + closing out all currently checked-in connections in that pool, or + passively, by losing references to it but otherwise not closing any + connections. The latter strategy is more appropriate for an initializer + in a forked Python process. + + :param close: if left at its default of ``True``, has the + effect of fully closing all **currently checked in** + database connections. Connections that are still checked out + will **not** be closed, however they will no longer be associated + with this :class:`_engine.Engine`, + so when they are closed individually, eventually the + :class:`_pool.Pool` which they are associated with will + be garbage collected and they will be closed out fully, if + not already closed on checkin. + + If set to ``False``, the previous connection pool is de-referenced, + and otherwise not touched in any way. + + .. versionadded:: 1.4.33 Added the :paramref:`.Engine.dispose.close` + parameter to allow the replacement of a connection pool in a child + process without interfering with the connections used by the parent + process. + + + .. seealso:: + + :ref:`engine_disposal` + + :ref:`pooling_multiprocessing` + + """ + if close: + self.pool.dispose() + self.pool = self.pool.recreate() + self.dispatch.engine_disposed(self) + + @contextlib.contextmanager + def _optional_conn_ctx_manager( + self, connection: Optional[Connection] = None + ) -> Iterator[Connection]: + if connection is None: + with self.connect() as conn: + yield conn + else: + yield connection + + @contextlib.contextmanager + def begin(self) -> Iterator[Connection]: + """Return a context manager delivering a :class:`_engine.Connection` + with a :class:`.Transaction` established. + + E.g.:: + + with engine.begin() as conn: + conn.execute( + text("insert into table (x, y, z) values (1, 2, 3)") + ) + conn.execute(text("my_special_procedure(5)")) + + Upon successful operation, the :class:`.Transaction` + is committed. If an error is raised, the :class:`.Transaction` + is rolled back. + + .. seealso:: + + :meth:`_engine.Engine.connect` - procure a + :class:`_engine.Connection` from + an :class:`_engine.Engine`. + + :meth:`_engine.Connection.begin` - start a :class:`.Transaction` + for a particular :class:`_engine.Connection`. + + """ + with self.connect() as conn: + with conn.begin(): + yield conn + + def _run_ddl_visitor( + self, + visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]], + element: SchemaItem, + **kwargs: Any, + ) -> None: + with self.begin() as conn: + conn._run_ddl_visitor(visitorcallable, element, **kwargs) + + def connect(self) -> Connection: + """Return a new :class:`_engine.Connection` object. + + The :class:`_engine.Connection` acts as a Python context manager, so + the typical use of this method looks like:: + + with engine.connect() as connection: + connection.execute(text("insert into table values ('foo')")) + connection.commit() + + Where above, after the block is completed, the connection is "closed" + and its underlying DBAPI resources are returned to the connection pool. + This also has the effect of rolling back any transaction that + was explicitly begun or was begun via autobegin, and will + emit the :meth:`_events.ConnectionEvents.rollback` event if one was + started and is still in progress. + + .. seealso:: + + :meth:`_engine.Engine.begin` + + """ + + return self._connection_cls(self) + + def raw_connection(self) -> PoolProxiedConnection: + """Return a "raw" DBAPI connection from the connection pool. + + The returned object is a proxied version of the DBAPI + connection object used by the underlying driver in use. + The object will have all the same behavior as the real DBAPI + connection, except that its ``close()`` method will result in the + connection being returned to the pool, rather than being closed + for real. + + This method provides direct DBAPI connection access for + special situations when the API provided by + :class:`_engine.Connection` + is not needed. When a :class:`_engine.Connection` object is already + present, the DBAPI connection is available using + the :attr:`_engine.Connection.connection` accessor. + + .. seealso:: + + :ref:`dbapi_connections` + + """ + return self.pool.connect() + + +class OptionEngineMixin(log.Identified): + _sa_propagate_class_events = False + + dispatch: dispatcher[ConnectionEventsTarget] + _compiled_cache: Optional[CompiledCacheType] + dialect: Dialect + pool: Pool + url: URL + hide_parameters: bool + echo: log.echo_property + + def __init__( + self, proxied: Engine, execution_options: CoreExecuteOptionsParameter + ): + self._proxied = proxied + self.url = proxied.url + self.dialect = proxied.dialect + self.logging_name = proxied.logging_name + self.echo = proxied.echo + self._compiled_cache = proxied._compiled_cache + self.hide_parameters = proxied.hide_parameters + log.instance_logger(self, echoflag=self.echo) + + # note: this will propagate events that are assigned to the parent + # engine after this OptionEngine is created. Since we share + # the events of the parent we also disallow class-level events + # to apply to the OptionEngine class directly. + # + # the other way this can work would be to transfer existing + # events only, using: + # self.dispatch._update(proxied.dispatch) + # + # that might be more appropriate however it would be a behavioral + # change for logic that assigns events to the parent engine and + # would like it to take effect for the already-created sub-engine. + self.dispatch = self.dispatch._join(proxied.dispatch) + + self._execution_options = proxied._execution_options + self.update_execution_options(**execution_options) + + def update_execution_options(self, **opt: Any) -> None: + raise NotImplementedError() + + if not typing.TYPE_CHECKING: + # https://github.com/python/typing/discussions/1095 + + @property + def pool(self) -> Pool: + return self._proxied.pool + + @pool.setter + def pool(self, pool: Pool) -> None: + self._proxied.pool = pool + + @property + def _has_events(self) -> bool: + return self._proxied._has_events or self.__dict__.get( + "_has_events", False + ) + + @_has_events.setter + def _has_events(self, value: bool) -> None: + self.__dict__["_has_events"] = value + + +class OptionEngine(OptionEngineMixin, Engine): + def update_execution_options(self, **opt: Any) -> None: + Engine.update_execution_options(self, **opt) + + +Engine._option_cls = OptionEngine diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py new file mode 100644 index 00000000..97b17fbd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py @@ -0,0 +1,155 @@ +# engine/characteristics.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import abc +import typing +from typing import Any +from typing import ClassVar + +if typing.TYPE_CHECKING: + from .base import Connection + from .interfaces import DBAPIConnection + from .interfaces import Dialect + + +class ConnectionCharacteristic(abc.ABC): + """An abstract base for an object that can set, get and reset a + per-connection characteristic, typically one that gets reset when the + connection is returned to the connection pool. + + transaction isolation is the canonical example, and the + ``IsolationLevelCharacteristic`` implementation provides this for the + ``DefaultDialect``. + + The ``ConnectionCharacteristic`` class should call upon the ``Dialect`` for + the implementation of each method. The object exists strictly to serve as + a dialect visitor that can be placed into the + ``DefaultDialect.connection_characteristics`` dictionary where it will take + effect for calls to :meth:`_engine.Connection.execution_options` and + related APIs. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + transactional: ClassVar[bool] = False + + @abc.abstractmethod + def reset_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> None: + """Reset the characteristic on the DBAPI connection to its default + value.""" + + @abc.abstractmethod + def set_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any + ) -> None: + """set characteristic on the DBAPI connection to a given value.""" + + def set_connection_characteristic( + self, + dialect: Dialect, + conn: Connection, + dbapi_conn: DBAPIConnection, + value: Any, + ) -> None: + """set characteristic on the :class:`_engine.Connection` to a given + value. + + .. versionadded:: 2.0.30 - added to support elements that are local + to the :class:`_engine.Connection` itself. + + """ + self.set_characteristic(dialect, dbapi_conn, value) + + @abc.abstractmethod + def get_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> Any: + """Given a DBAPI connection, get the current value of the + characteristic. + + """ + + def get_connection_characteristic( + self, dialect: Dialect, conn: Connection, dbapi_conn: DBAPIConnection + ) -> Any: + """Given a :class:`_engine.Connection`, get the current value of the + characteristic. + + .. versionadded:: 2.0.30 - added to support elements that are local + to the :class:`_engine.Connection` itself. + + """ + return self.get_characteristic(dialect, dbapi_conn) + + +class IsolationLevelCharacteristic(ConnectionCharacteristic): + """Manage the isolation level on a DBAPI connection""" + + transactional: ClassVar[bool] = True + + def reset_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> None: + dialect.reset_isolation_level(dbapi_conn) + + def set_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any + ) -> None: + dialect._assert_and_set_isolation_level(dbapi_conn, value) + + def get_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> Any: + return dialect.get_isolation_level(dbapi_conn) + + +class LoggingTokenCharacteristic(ConnectionCharacteristic): + """Manage the 'logging_token' option of a :class:`_engine.Connection`. + + .. versionadded:: 2.0.30 + + """ + + transactional: ClassVar[bool] = False + + def reset_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> None: + pass + + def set_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any + ) -> None: + raise NotImplementedError() + + def set_connection_characteristic( + self, + dialect: Dialect, + conn: Connection, + dbapi_conn: DBAPIConnection, + value: Any, + ) -> None: + if value: + conn._message_formatter = lambda msg: "[%s] %s" % (value, msg) + else: + del conn._message_formatter + + def get_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> Any: + raise NotImplementedError() + + def get_connection_characteristic( + self, dialect: Dialect, conn: Connection, dbapi_conn: DBAPIConnection + ) -> Any: + return conn._execution_options.get("logging_token", None) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py new file mode 100644 index 00000000..74a3cf80 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py @@ -0,0 +1,875 @@ +# engine/create.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import inspect +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import List +from typing import Optional +from typing import overload +from typing import Type +from typing import Union + +from . import base +from . import url as _url +from .interfaces import DBAPIConnection +from .mock import create_mock_engine +from .. import event +from .. import exc +from .. import util +from ..pool import _AdhocProxiedConnection +from ..pool import ConnectionPoolEntry +from ..sql import compiler +from ..util import immutabledict + +if typing.TYPE_CHECKING: + from .base import Engine + from .interfaces import _ExecuteOptions + from .interfaces import _ParamStyle + from .interfaces import IsolationLevel + from .url import URL + from ..log import _EchoFlagType + from ..pool import _CreatorFnType + from ..pool import _CreatorWRecFnType + from ..pool import _ResetStyleArgType + from ..pool import Pool + from ..util.typing import Literal + + +@overload +def create_engine( + url: Union[str, URL], + *, + connect_args: Dict[Any, Any] = ..., + convert_unicode: bool = ..., + creator: Union[_CreatorFnType, _CreatorWRecFnType] = ..., + echo: _EchoFlagType = ..., + echo_pool: _EchoFlagType = ..., + enable_from_linting: bool = ..., + execution_options: _ExecuteOptions = ..., + future: Literal[True], + hide_parameters: bool = ..., + implicit_returning: Literal[True] = ..., + insertmanyvalues_page_size: int = ..., + isolation_level: IsolationLevel = ..., + json_deserializer: Callable[..., Any] = ..., + json_serializer: Callable[..., Any] = ..., + label_length: Optional[int] = ..., + logging_name: str = ..., + max_identifier_length: Optional[int] = ..., + max_overflow: int = ..., + module: Optional[Any] = ..., + paramstyle: Optional[_ParamStyle] = ..., + pool: Optional[Pool] = ..., + poolclass: Optional[Type[Pool]] = ..., + pool_logging_name: str = ..., + pool_pre_ping: bool = ..., + pool_size: int = ..., + pool_recycle: int = ..., + pool_reset_on_return: Optional[_ResetStyleArgType] = ..., + pool_timeout: float = ..., + pool_use_lifo: bool = ..., + plugins: List[str] = ..., + query_cache_size: int = ..., + use_insertmanyvalues: bool = ..., + **kwargs: Any, +) -> Engine: ... + + +@overload +def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: ... + + +@util.deprecated_params( + strategy=( + "1.4", + "The :paramref:`_sa.create_engine.strategy` keyword is deprecated, " + "and the only argument accepted is 'mock'; please use " + ":func:`.create_mock_engine` going forward. For general " + "customization of create_engine which may have been accomplished " + "using strategies, see :class:`.CreateEnginePlugin`.", + ), + empty_in_strategy=( + "1.4", + "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is " + "deprecated, and no longer has any effect. All IN expressions " + "are now rendered using " + 'the "expanding parameter" strategy which renders a set of bound' + 'expressions, or an "empty set" SELECT, at statement execution' + "time.", + ), + implicit_returning=( + "2.0", + "The :paramref:`_sa.create_engine.implicit_returning` parameter " + "is deprecated and will be removed in a future release. ", + ), +) +def create_engine(url: Union[str, _url.URL], **kwargs: Any) -> Engine: + """Create a new :class:`_engine.Engine` instance. + + The standard calling form is to send the :ref:`URL ` as the + first positional argument, usually a string + that indicates database dialect and connection arguments:: + + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") + + .. note:: + + Please review :ref:`database_urls` for general guidelines in composing + URL strings. In particular, special characters, such as those often + part of passwords, must be URL encoded to be properly parsed. + + Additional keyword arguments may then follow it which + establish various options on the resulting :class:`_engine.Engine` + and its underlying :class:`.Dialect` and :class:`_pool.Pool` + constructs:: + + engine = create_engine("mysql+mysqldb://scott:tiger@hostname/dbname", + pool_recycle=3600, echo=True) + + The string form of the URL is + ``dialect[+driver]://user:password@host/dbname[?key=value..]``, where + ``dialect`` is a database name such as ``mysql``, ``oracle``, + ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as + ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively, + the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`. + + ``**kwargs`` takes a wide variety of options which are routed + towards their appropriate components. Arguments may be specific to + the :class:`_engine.Engine`, the underlying :class:`.Dialect`, + as well as the + :class:`_pool.Pool`. Specific dialects also accept keyword arguments that + are unique to that dialect. Here, we describe the parameters + that are common to most :func:`_sa.create_engine()` usage. + + Once established, the newly resulting :class:`_engine.Engine` will + request a connection from the underlying :class:`_pool.Pool` once + :meth:`_engine.Engine.connect` is called, or a method which depends on it + such as :meth:`_engine.Engine.execute` is invoked. The + :class:`_pool.Pool` in turn + will establish the first actual DBAPI connection when this request + is received. The :func:`_sa.create_engine` call itself does **not** + establish any actual DBAPI connections directly. + + .. seealso:: + + :doc:`/core/engines` + + :doc:`/dialects/index` + + :ref:`connections_toplevel` + + :param connect_args: a dictionary of options which will be + passed directly to the DBAPI's ``connect()`` method as + additional keyword arguments. See the example + at :ref:`custom_dbapi_args`. + + :param creator: a callable which returns a DBAPI connection. + This creation function will be passed to the underlying + connection pool and will be used to create all new database + connections. Usage of this function causes connection + parameters specified in the URL argument to be bypassed. + + This hook is not as flexible as the newer + :meth:`_events.DialectEvents.do_connect` hook which allows complete + control over how a connection is made to the database, given the full + set of URL arguments and state beforehand. + + .. seealso:: + + :meth:`_events.DialectEvents.do_connect` - event hook that allows + full control over DBAPI connection mechanics. + + :ref:`custom_dbapi_args` + + :param echo=False: if True, the Engine will log all statements + as well as a ``repr()`` of their parameter lists to the default log + handler, which defaults to ``sys.stdout`` for output. If set to the + string ``"debug"``, result rows will be printed to the standard output + as well. The ``echo`` attribute of ``Engine`` can be modified at any + time to turn logging on and off; direct control of logging is also + available using the standard Python ``logging`` module. + + .. seealso:: + + :ref:`dbengine_logging` - further detail on how to configure + logging. + + + :param echo_pool=False: if True, the connection pool will log + informational output such as when connections are invalidated + as well as when connections are recycled to the default log handler, + which defaults to ``sys.stdout`` for output. If set to the string + ``"debug"``, the logging will include pool checkouts and checkins. + Direct control of logging is also available using the standard Python + ``logging`` module. + + .. seealso:: + + :ref:`dbengine_logging` - further detail on how to configure + logging. + + + :param empty_in_strategy: No longer used; SQLAlchemy now uses + "empty set" behavior for IN in all cases. + + :param enable_from_linting: defaults to True. Will emit a warning + if a given SELECT statement is found to have un-linked FROM elements + which would cause a cartesian product. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`change_4737` + + :param execution_options: Dictionary execution options which will + be applied to all connections. See + :meth:`~sqlalchemy.engine.Connection.execution_options` + + :param future: Use the 2.0 style :class:`_engine.Engine` and + :class:`_engine.Connection` API. + + As of SQLAlchemy 2.0, this parameter is present for backwards + compatibility only and must remain at its default value of ``True``. + + The :paramref:`_sa.create_engine.future` parameter will be + deprecated in a subsequent 2.x release and eventually removed. + + .. versionadded:: 1.4 + + .. versionchanged:: 2.0 All :class:`_engine.Engine` objects are + "future" style engines and there is no longer a ``future=False`` + mode of operation. + + .. seealso:: + + :ref:`migration_20_toplevel` + + :param hide_parameters: Boolean, when set to True, SQL statement parameters + will not be displayed in INFO logging nor will they be formatted into + the string representation of :class:`.StatementError` objects. + + .. versionadded:: 1.3.8 + + .. seealso:: + + :ref:`dbengine_logging` - further detail on how to configure + logging. + + :param implicit_returning=True: Legacy parameter that may only be set + to True. In SQLAlchemy 2.0, this parameter does nothing. In order to + disable "implicit returning" for statements invoked by the ORM, + configure this on a per-table basis using the + :paramref:`.Table.implicit_returning` parameter. + + + :param insertmanyvalues_page_size: number of rows to format into an + INSERT statement when the statement uses "insertmanyvalues" mode, which is + a paged form of bulk insert that is used for many backends when using + :term:`executemany` execution typically in conjunction with RETURNING. + Defaults to 1000, but may also be subject to dialect-specific limiting + factors which may override this value on a per-statement basis. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`engine_insertmanyvalues` + + :ref:`engine_insertmanyvalues_page_size` + + :paramref:`_engine.Connection.execution_options.insertmanyvalues_page_size` + + :param isolation_level: optional string name of an isolation level + which will be set on all new connections unconditionally. + Isolation levels are typically some subset of the string names + ``"SERIALIZABLE"``, ``"REPEATABLE READ"``, + ``"READ COMMITTED"``, ``"READ UNCOMMITTED"`` and ``"AUTOCOMMIT"`` + based on backend. + + The :paramref:`_sa.create_engine.isolation_level` parameter is + in contrast to the + :paramref:`.Connection.execution_options.isolation_level` + execution option, which may be set on an individual + :class:`.Connection`, as well as the same parameter passed to + :meth:`.Engine.execution_options`, where it may be used to create + multiple engines with different isolation levels that share a common + connection pool and dialect. + + .. versionchanged:: 2.0 The + :paramref:`_sa.create_engine.isolation_level` + parameter has been generalized to work on all dialects which support + the concept of isolation level, and is provided as a more succinct, + up front configuration switch in contrast to the execution option + which is more of an ad-hoc programmatic option. + + .. seealso:: + + :ref:`dbapi_autocommit` + + :param json_deserializer: for dialects that support the + :class:`_types.JSON` + datatype, this is a Python callable that will convert a JSON string + to a Python object. By default, the Python ``json.loads`` function is + used. + + .. versionchanged:: 1.3.7 The SQLite dialect renamed this from + ``_json_deserializer``. + + :param json_serializer: for dialects that support the :class:`_types.JSON` + datatype, this is a Python callable that will render a given object + as JSON. By default, the Python ``json.dumps`` function is used. + + .. versionchanged:: 1.3.7 The SQLite dialect renamed this from + ``_json_serializer``. + + + :param label_length=None: optional integer value which limits + the size of dynamically generated column labels to that many + characters. If less than 6, labels are generated as + "_(counter)". If ``None``, the value of + ``dialect.max_identifier_length``, which may be affected via the + :paramref:`_sa.create_engine.max_identifier_length` parameter, + is used instead. The value of + :paramref:`_sa.create_engine.label_length` + may not be larger than that of + :paramref:`_sa.create_engine.max_identfier_length`. + + .. seealso:: + + :paramref:`_sa.create_engine.max_identifier_length` + + :param logging_name: String identifier which will be used within + the "name" field of logging records generated within the + "sqlalchemy.engine" logger. Defaults to a hexstring of the + object's id. + + .. seealso:: + + :ref:`dbengine_logging` - further detail on how to configure + logging. + + :paramref:`_engine.Connection.execution_options.logging_token` + + :param max_identifier_length: integer; override the max_identifier_length + determined by the dialect. if ``None`` or zero, has no effect. This + is the database's configured maximum number of characters that may be + used in a SQL identifier such as a table name, column name, or label + name. All dialects determine this value automatically, however in the + case of a new database version for which this value has changed but + SQLAlchemy's dialect has not been adjusted, the value may be passed + here. + + .. versionadded:: 1.3.9 + + .. seealso:: + + :paramref:`_sa.create_engine.label_length` + + :param max_overflow=10: the number of connections to allow in + connection pool "overflow", that is connections that can be + opened above and beyond the pool_size setting, which defaults + to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`. + + :param module=None: reference to a Python module object (the module + itself, not its string name). Specifies an alternate DBAPI module to + be used by the engine's dialect. Each sub-dialect references a + specific DBAPI which will be imported before first connect. This + parameter causes the import to be bypassed, and the given module to + be used instead. Can be used for testing of DBAPIs as well as to + inject "mock" DBAPI implementations into the :class:`_engine.Engine`. + + :param paramstyle=None: The `paramstyle `_ + to use when rendering bound parameters. This style defaults to the + one recommended by the DBAPI itself, which is retrieved from the + ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept + more than one paramstyle, and in particular it may be desirable + to change a "named" paramstyle into a "positional" one, or vice versa. + When this attribute is passed, it should be one of the values + ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or + ``"pyformat"``, and should correspond to a parameter style known + to be supported by the DBAPI in use. + + :param pool=None: an already-constructed instance of + :class:`~sqlalchemy.pool.Pool`, such as a + :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this + pool will be used directly as the underlying connection pool + for the engine, bypassing whatever connection parameters are + present in the URL argument. For information on constructing + connection pools manually, see :ref:`pooling_toplevel`. + + :param poolclass=None: a :class:`~sqlalchemy.pool.Pool` + subclass, which will be used to create a connection pool + instance using the connection parameters given in the URL. Note + this differs from ``pool`` in that you don't actually + instantiate the pool in this case, you just indicate what type + of pool to be used. + + :param pool_logging_name: String identifier which will be used within + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + id. + + .. seealso:: + + :ref:`dbengine_logging` - further detail on how to configure + logging. + + :param pool_pre_ping: boolean, if True will enable the connection pool + "pre-ping" feature that tests connections for liveness upon + each checkout. + + .. versionadded:: 1.2 + + .. seealso:: + + :ref:`pool_disconnects_pessimistic` + + :param pool_size=5: the number of connections to keep open + inside the connection pool. This used with + :class:`~sqlalchemy.pool.QueuePool` as + well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With + :class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting + of 0 indicates no limit; to disable pooling, set ``poolclass`` to + :class:`~sqlalchemy.pool.NullPool` instead. + + :param pool_recycle=-1: this setting causes the pool to recycle + connections after the given number of seconds has passed. It + defaults to -1, or no timeout. For example, setting to 3600 + means connections will be recycled after one hour. Note that + MySQL in particular will disconnect automatically if no + activity is detected on a connection for eight hours (although + this is configurable with the MySQLDB connection itself and the + server configuration as well). + + .. seealso:: + + :ref:`pool_setting_recycle` + + :param pool_reset_on_return='rollback': set the + :paramref:`_pool.Pool.reset_on_return` parameter of the underlying + :class:`_pool.Pool` object, which can be set to the values + ``"rollback"``, ``"commit"``, or ``None``. + + .. seealso:: + + :ref:`pool_reset_on_return` + + :param pool_timeout=30: number of seconds to wait before giving + up on getting a connection from the pool. This is only used + with :class:`~sqlalchemy.pool.QueuePool`. This can be a float but is + subject to the limitations of Python time functions which may not be + reliable in the tens of milliseconds. + + .. note: don't use 30.0 above, it seems to break with the :param tag + + :param pool_use_lifo=False: use LIFO (last-in-first-out) when retrieving + connections from :class:`.QueuePool` instead of FIFO + (first-in-first-out). Using LIFO, a server-side timeout scheme can + reduce the number of connections used during non- peak periods of + use. When planning for server-side timeouts, ensure that a recycle or + pre-ping strategy is in use to gracefully handle stale connections. + + .. versionadded:: 1.3 + + .. seealso:: + + :ref:`pool_use_lifo` + + :ref:`pool_disconnects` + + :param plugins: string list of plugin names to load. See + :class:`.CreateEnginePlugin` for background. + + .. versionadded:: 1.2.3 + + :param query_cache_size: size of the cache used to cache the SQL string + form of queries. Set to zero to disable caching. + + The cache is pruned of its least recently used items when its size reaches + N * 1.5. Defaults to 500, meaning the cache will always store at least + 500 SQL statements when filled, and will grow up to 750 items at which + point it is pruned back down to 500 by removing the 250 least recently + used items. + + Caching is accomplished on a per-statement basis by generating a + cache key that represents the statement's structure, then generating + string SQL for the current dialect only if that key is not present + in the cache. All statements support caching, however some features + such as an INSERT with a large set of parameters will intentionally + bypass the cache. SQL logging will indicate statistics for each + statement whether or not it were pull from the cache. + + .. note:: some ORM functions related to unit-of-work persistence as well + as some attribute loading strategies will make use of individual + per-mapper caches outside of the main cache. + + + .. seealso:: + + :ref:`sql_caching` + + .. versionadded:: 1.4 + + :param use_insertmanyvalues: True by default, use the "insertmanyvalues" + execution style for INSERT..RETURNING statements by default. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`engine_insertmanyvalues` + + """ # noqa + + if "strategy" in kwargs: + strat = kwargs.pop("strategy") + if strat == "mock": + # this case is deprecated + return create_mock_engine(url, **kwargs) # type: ignore + else: + raise exc.ArgumentError("unknown strategy: %r" % strat) + + kwargs.pop("empty_in_strategy", None) + + # create url.URL object + u = _url.make_url(url) + + u, plugins, kwargs = u._instantiate_plugins(kwargs) + + entrypoint = u._get_entrypoint() + _is_async = kwargs.pop("_is_async", False) + if _is_async: + dialect_cls = entrypoint.get_async_dialect_cls(u) + else: + dialect_cls = entrypoint.get_dialect_cls(u) + + if kwargs.pop("_coerce_config", False): + + def pop_kwarg(key: str, default: Optional[Any] = None) -> Any: + value = kwargs.pop(key, default) + if key in dialect_cls.engine_config_types: + value = dialect_cls.engine_config_types[key](value) + return value + + else: + pop_kwarg = kwargs.pop # type: ignore + + dialect_args = {} + # consume dialect arguments from kwargs + for k in util.get_cls_kwargs(dialect_cls): + if k in kwargs: + dialect_args[k] = pop_kwarg(k) + + dbapi = kwargs.pop("module", None) + if dbapi is None: + dbapi_args = {} + + if "import_dbapi" in dialect_cls.__dict__: + dbapi_meth = dialect_cls.import_dbapi + + elif hasattr(dialect_cls, "dbapi") and inspect.ismethod( + dialect_cls.dbapi + ): + util.warn_deprecated( + "The dbapi() classmethod on dialect classes has been " + "renamed to import_dbapi(). Implement an import_dbapi() " + f"classmethod directly on class {dialect_cls} to remove this " + "warning; the old .dbapi() classmethod may be maintained for " + "backwards compatibility.", + "2.0", + ) + dbapi_meth = dialect_cls.dbapi + else: + dbapi_meth = dialect_cls.import_dbapi + + for k in util.get_func_kwargs(dbapi_meth): + if k in kwargs: + dbapi_args[k] = pop_kwarg(k) + dbapi = dbapi_meth(**dbapi_args) + + dialect_args["dbapi"] = dbapi + + dialect_args.setdefault("compiler_linting", compiler.NO_LINTING) + enable_from_linting = kwargs.pop("enable_from_linting", True) + if enable_from_linting: + dialect_args["compiler_linting"] ^= compiler.COLLECT_CARTESIAN_PRODUCTS + + for plugin in plugins: + plugin.handle_dialect_kwargs(dialect_cls, dialect_args) + + # create dialect + dialect = dialect_cls(**dialect_args) + + # assemble connection arguments + (cargs_tup, cparams) = dialect.create_connect_args(u) + cparams.update(pop_kwarg("connect_args", {})) + + if "async_fallback" in cparams and util.asbool(cparams["async_fallback"]): + util.warn_deprecated( + "The async_fallback dialect argument is deprecated and will be " + "removed in SQLAlchemy 2.1.", + "2.0", + ) + + cargs = list(cargs_tup) # allow mutability + + # look for existing pool or create + pool = pop_kwarg("pool", None) + if pool is None: + + def connect( + connection_record: Optional[ConnectionPoolEntry] = None, + ) -> DBAPIConnection: + if dialect._has_events: + for fn in dialect.dispatch.do_connect: + connection = cast( + DBAPIConnection, + fn(dialect, connection_record, cargs, cparams), + ) + if connection is not None: + return connection + + return dialect.connect(*cargs, **cparams) + + creator = pop_kwarg("creator", connect) + + poolclass = pop_kwarg("poolclass", None) + if poolclass is None: + poolclass = dialect.get_dialect_pool_class(u) + pool_args = {"dialect": dialect} + + # consume pool arguments from kwargs, translating a few of + # the arguments + for k in util.get_cls_kwargs(poolclass): + tk = _pool_translate_kwargs.get(k, k) + if tk in kwargs: + pool_args[k] = pop_kwarg(tk) + + for plugin in plugins: + plugin.handle_pool_kwargs(poolclass, pool_args) + + pool = poolclass(creator, **pool_args) + else: + pool._dialect = dialect + + if ( + hasattr(pool, "_is_asyncio") + and pool._is_asyncio is not dialect.is_async + ): + raise exc.ArgumentError( + f"Pool class {pool.__class__.__name__} cannot be " + f"used with {'non-' if not dialect.is_async else ''}" + "asyncio engine", + code="pcls", + ) + + # create engine. + if not pop_kwarg("future", True): + raise exc.ArgumentError( + "The 'future' parameter passed to " + "create_engine() may only be set to True." + ) + + engineclass = base.Engine + + engine_args = {} + for k in util.get_cls_kwargs(engineclass): + if k in kwargs: + engine_args[k] = pop_kwarg(k) + + # internal flags used by the test suite for instrumenting / proxying + # engines with mocks etc. + _initialize = kwargs.pop("_initialize", True) + + # all kwargs should be consumed + if kwargs: + raise TypeError( + "Invalid argument(s) %s sent to create_engine(), " + "using configuration %s/%s/%s. Please check that the " + "keyword arguments are appropriate for this combination " + "of components." + % ( + ",".join("'%s'" % k for k in kwargs), + dialect.__class__.__name__, + pool.__class__.__name__, + engineclass.__name__, + ) + ) + + engine = engineclass(pool, dialect, u, **engine_args) + + if _initialize: + do_on_connect = dialect.on_connect_url(u) + if do_on_connect: + + def on_connect( + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ) -> None: + assert do_on_connect is not None + do_on_connect(dbapi_connection) + + event.listen(pool, "connect", on_connect) + + builtin_on_connect = dialect._builtin_onconnect() + if builtin_on_connect: + event.listen(pool, "connect", builtin_on_connect) + + def first_connect( + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ) -> None: + c = base.Connection( + engine, + connection=_AdhocProxiedConnection( + dbapi_connection, connection_record + ), + _has_events=False, + # reconnecting will be a reentrant condition, so if the + # connection goes away, Connection is then closed + _allow_revalidate=False, + # dont trigger the autobegin sequence + # within the up front dialect checks + _allow_autobegin=False, + ) + c._execution_options = util.EMPTY_DICT + + try: + dialect.initialize(c) + finally: + # note that "invalidated" and "closed" are mutually + # exclusive in 1.4 Connection. + if not c.invalidated and not c.closed: + # transaction is rolled back otherwise, tested by + # test/dialect/postgresql/test_dialect.py + # ::MiscBackendTest::test_initial_transaction_state + dialect.do_rollback(c.connection) + + # previously, the "first_connect" event was used here, which was then + # scaled back if the "on_connect" handler were present. now, + # since "on_connect" is virtually always present, just use + # "connect" event with once_unless_exception in all cases so that + # the connection event flow is consistent in all cases. + event.listen( + pool, "connect", first_connect, _once_unless_exception=True + ) + + dialect_cls.engine_created(engine) + if entrypoint is not dialect_cls: + entrypoint.engine_created(engine) + + for plugin in plugins: + plugin.engine_created(engine) + + return engine + + +def engine_from_config( + configuration: Dict[str, Any], prefix: str = "sqlalchemy.", **kwargs: Any +) -> Engine: + """Create a new Engine instance using a configuration dictionary. + + The dictionary is typically produced from a config file. + + The keys of interest to ``engine_from_config()`` should be prefixed, e.g. + ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument + indicates the prefix to be searched for. Each matching key (after the + prefix is stripped) is treated as though it were the corresponding keyword + argument to a :func:`_sa.create_engine` call. + + The only required key is (assuming the default prefix) ``sqlalchemy.url``, + which provides the :ref:`database URL `. + + A select set of keyword arguments will be "coerced" to their + expected type based on string values. The set of arguments + is extensible per-dialect using the ``engine_config_types`` accessor. + + :param configuration: A dictionary (typically produced from a config file, + but this is not a requirement). Items whose keys start with the value + of 'prefix' will have that prefix stripped, and will then be passed to + :func:`_sa.create_engine`. + + :param prefix: Prefix to match and then strip from keys + in 'configuration'. + + :param kwargs: Each keyword argument to ``engine_from_config()`` itself + overrides the corresponding item taken from the 'configuration' + dictionary. Keyword arguments should *not* be prefixed. + + """ + + options = { + key[len(prefix) :]: configuration[key] + for key in configuration + if key.startswith(prefix) + } + options["_coerce_config"] = True + options.update(kwargs) + url = options.pop("url") + return create_engine(url, **options) + + +@overload +def create_pool_from_url( + url: Union[str, URL], + *, + poolclass: Optional[Type[Pool]] = ..., + logging_name: str = ..., + pre_ping: bool = ..., + size: int = ..., + recycle: int = ..., + reset_on_return: Optional[_ResetStyleArgType] = ..., + timeout: float = ..., + use_lifo: bool = ..., + **kwargs: Any, +) -> Pool: ... + + +@overload +def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: ... + + +def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: + """Create a pool instance from the given url. + + If ``poolclass`` is not provided the pool class used + is selected using the dialect specified in the URL. + + The arguments passed to :func:`_sa.create_pool_from_url` are + identical to the pool argument passed to the :func:`_sa.create_engine` + function. + + .. versionadded:: 2.0.10 + """ + + for key in _pool_translate_kwargs: + if key in kwargs: + kwargs[_pool_translate_kwargs[key]] = kwargs.pop(key) + + engine = create_engine(url, **kwargs, _initialize=False) + return engine.pool + + +_pool_translate_kwargs = immutabledict( + { + "logging_name": "pool_logging_name", + "echo": "echo_pool", + "timeout": "pool_timeout", + "recycle": "pool_recycle", + "events": "pool_events", # deprecated + "reset_on_return": "pool_reset_on_return", + "pre_ping": "pool_pre_ping", + "use_lifo": "pool_use_lifo", + } +) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py new file mode 100644 index 00000000..b83cb451 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py @@ -0,0 +1,2181 @@ +# engine/cursor.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Define cursor-specific result set constructs including +:class:`.CursorResult`.""" + + +from __future__ import annotations + +import collections +import functools +import operator +import typing +from typing import Any +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import Iterator +from typing import List +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .result import IteratorResult +from .result import MergedResult +from .result import Result +from .result import ResultMetaData +from .result import SimpleResultMetaData +from .result import tuplegetter +from .row import Row +from .. import exc +from .. import util +from ..sql import elements +from ..sql import sqltypes +from ..sql import util as sql_util +from ..sql.base import _generative +from ..sql.compiler import ResultColumnsEntry +from ..sql.compiler import RM_NAME +from ..sql.compiler import RM_OBJECTS +from ..sql.compiler import RM_RENDERED_NAME +from ..sql.compiler import RM_TYPE +from ..sql.type_api import TypeEngine +from ..util import compat +from ..util.typing import Literal +from ..util.typing import Self + + +if typing.TYPE_CHECKING: + from .base import Connection + from .default import DefaultExecutionContext + from .interfaces import _DBAPICursorDescription + from .interfaces import DBAPICursor + from .interfaces import Dialect + from .interfaces import ExecutionContext + from .result import _KeyIndexType + from .result import _KeyMapRecType + from .result import _KeyMapType + from .result import _KeyType + from .result import _ProcessorsType + from .result import _TupleGetterType + from ..sql.type_api import _ResultProcessorType + + +_T = TypeVar("_T", bound=Any) + + +# metadata entry tuple indexes. +# using raw tuple is faster than namedtuple. +# these match up to the positions in +# _CursorKeyMapRecType +MD_INDEX: Literal[0] = 0 +"""integer index in cursor.description + +""" + +MD_RESULT_MAP_INDEX: Literal[1] = 1 +"""integer index in compiled._result_columns""" + +MD_OBJECTS: Literal[2] = 2 +"""other string keys and ColumnElement obj that can match. + +This comes from compiler.RM_OBJECTS / compiler.ResultColumnsEntry.objects + +""" + +MD_LOOKUP_KEY: Literal[3] = 3 +"""string key we usually expect for key-based lookup + +this comes from compiler.RM_NAME / compiler.ResultColumnsEntry.name +""" + + +MD_RENDERED_NAME: Literal[4] = 4 +"""name that is usually in cursor.description + +this comes from compiler.RENDERED_NAME / compiler.ResultColumnsEntry.keyname +""" + + +MD_PROCESSOR: Literal[5] = 5 +"""callable to process a result value into a row""" + +MD_UNTRANSLATED: Literal[6] = 6 +"""raw name from cursor.description""" + + +_CursorKeyMapRecType = Tuple[ + Optional[int], # MD_INDEX, None means the record is ambiguously named + int, # MD_RESULT_MAP_INDEX + List[Any], # MD_OBJECTS + str, # MD_LOOKUP_KEY + str, # MD_RENDERED_NAME + Optional["_ResultProcessorType[Any]"], # MD_PROCESSOR + Optional[str], # MD_UNTRANSLATED +] + +_CursorKeyMapType = Mapping["_KeyType", _CursorKeyMapRecType] + +# same as _CursorKeyMapRecType except the MD_INDEX value is definitely +# not None +_NonAmbigCursorKeyMapRecType = Tuple[ + int, + int, + List[Any], + str, + str, + Optional["_ResultProcessorType[Any]"], + str, +] + + +class CursorResultMetaData(ResultMetaData): + """Result metadata for DBAPI cursors.""" + + __slots__ = ( + "_keymap", + "_processors", + "_keys", + "_keymap_by_result_column_idx", + "_tuplefilter", + "_translated_indexes", + "_safe_for_cache", + "_unpickled", + "_key_to_index", + # don't need _unique_filters support here for now. Can be added + # if a need arises. + ) + + _keymap: _CursorKeyMapType + _processors: _ProcessorsType + _keymap_by_result_column_idx: Optional[Dict[int, _KeyMapRecType]] + _unpickled: bool + _safe_for_cache: bool + _translated_indexes: Optional[List[int]] + + returns_rows: ClassVar[bool] = True + + def _has_key(self, key: Any) -> bool: + return key in self._keymap + + def _for_freeze(self) -> ResultMetaData: + return SimpleResultMetaData( + self._keys, + extra=[self._keymap[key][MD_OBJECTS] for key in self._keys], + ) + + def _make_new_metadata( + self, + *, + unpickled: bool, + processors: _ProcessorsType, + keys: Sequence[str], + keymap: _KeyMapType, + tuplefilter: Optional[_TupleGetterType], + translated_indexes: Optional[List[int]], + safe_for_cache: bool, + keymap_by_result_column_idx: Any, + ) -> CursorResultMetaData: + new_obj = self.__class__.__new__(self.__class__) + new_obj._unpickled = unpickled + new_obj._processors = processors + new_obj._keys = keys + new_obj._keymap = keymap + new_obj._tuplefilter = tuplefilter + new_obj._translated_indexes = translated_indexes + new_obj._safe_for_cache = safe_for_cache + new_obj._keymap_by_result_column_idx = keymap_by_result_column_idx + new_obj._key_to_index = self._make_key_to_index(keymap, MD_INDEX) + return new_obj + + def _remove_processors(self) -> CursorResultMetaData: + assert not self._tuplefilter + return self._make_new_metadata( + unpickled=self._unpickled, + processors=[None] * len(self._processors), + tuplefilter=None, + translated_indexes=None, + keymap={ + key: value[0:5] + (None,) + value[6:] + for key, value in self._keymap.items() + }, + keys=self._keys, + safe_for_cache=self._safe_for_cache, + keymap_by_result_column_idx=self._keymap_by_result_column_idx, + ) + + def _splice_horizontally( + self, other: CursorResultMetaData + ) -> CursorResultMetaData: + assert not self._tuplefilter + + keymap = dict(self._keymap) + offset = len(self._keys) + keymap.update( + { + key: ( + # int index should be None for ambiguous key + ( + value[0] + offset + if value[0] is not None and key not in keymap + else None + ), + value[1] + offset, + *value[2:], + ) + for key, value in other._keymap.items() + } + ) + return self._make_new_metadata( + unpickled=self._unpickled, + processors=self._processors + other._processors, # type: ignore + tuplefilter=None, + translated_indexes=None, + keys=self._keys + other._keys, # type: ignore + keymap=keymap, + safe_for_cache=self._safe_for_cache, + keymap_by_result_column_idx={ + metadata_entry[MD_RESULT_MAP_INDEX]: metadata_entry + for metadata_entry in keymap.values() + }, + ) + + def _reduce(self, keys: Sequence[_KeyIndexType]) -> ResultMetaData: + recs = list(self._metadata_for_keys(keys)) + + indexes = [rec[MD_INDEX] for rec in recs] + new_keys: List[str] = [rec[MD_LOOKUP_KEY] for rec in recs] + + if self._translated_indexes: + indexes = [self._translated_indexes[idx] for idx in indexes] + tup = tuplegetter(*indexes) + new_recs = [(index,) + rec[1:] for index, rec in enumerate(recs)] + + keymap = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs} + # TODO: need unit test for: + # result = connection.execute("raw sql, no columns").scalars() + # without the "or ()" it's failing because MD_OBJECTS is None + keymap.update( + (e, new_rec) + for new_rec in new_recs + for e in new_rec[MD_OBJECTS] or () + ) + + return self._make_new_metadata( + unpickled=self._unpickled, + processors=self._processors, + keys=new_keys, + tuplefilter=tup, + translated_indexes=indexes, + keymap=keymap, # type: ignore[arg-type] + safe_for_cache=self._safe_for_cache, + keymap_by_result_column_idx=self._keymap_by_result_column_idx, + ) + + def _adapt_to_context(self, context: ExecutionContext) -> ResultMetaData: + """When using a cached Compiled construct that has a _result_map, + for a new statement that used the cached Compiled, we need to ensure + the keymap has the Column objects from our new statement as keys. + So here we rewrite keymap with new entries for the new columns + as matched to those of the cached statement. + + """ + + if not context.compiled or not context.compiled._result_columns: + return self + + compiled_statement = context.compiled.statement + invoked_statement = context.invoked_statement + + if TYPE_CHECKING: + assert isinstance(invoked_statement, elements.ClauseElement) + + if compiled_statement is invoked_statement: + return self + + assert invoked_statement is not None + + # this is the most common path for Core statements when + # caching is used. In ORM use, this codepath is not really used + # as the _result_disable_adapt_to_context execution option is + # set by the ORM. + + # make a copy and add the columns from the invoked statement + # to the result map. + + keymap_by_position = self._keymap_by_result_column_idx + + if keymap_by_position is None: + # first retrival from cache, this map will not be set up yet, + # initialize lazily + keymap_by_position = self._keymap_by_result_column_idx = { + metadata_entry[MD_RESULT_MAP_INDEX]: metadata_entry + for metadata_entry in self._keymap.values() + } + + assert not self._tuplefilter + return self._make_new_metadata( + keymap=compat.dict_union( + self._keymap, + { + new: keymap_by_position[idx] + for idx, new in enumerate( + invoked_statement._all_selected_columns + ) + if idx in keymap_by_position + }, + ), + unpickled=self._unpickled, + processors=self._processors, + tuplefilter=None, + translated_indexes=None, + keys=self._keys, + safe_for_cache=self._safe_for_cache, + keymap_by_result_column_idx=self._keymap_by_result_column_idx, + ) + + def __init__( + self, + parent: CursorResult[Any], + cursor_description: _DBAPICursorDescription, + ): + context = parent.context + self._tuplefilter = None + self._translated_indexes = None + self._safe_for_cache = self._unpickled = False + + if context.result_column_struct: + ( + result_columns, + cols_are_ordered, + textual_ordered, + ad_hoc_textual, + loose_column_name_matching, + ) = context.result_column_struct + num_ctx_cols = len(result_columns) + else: + result_columns = cols_are_ordered = ( # type: ignore + num_ctx_cols + ) = ad_hoc_textual = loose_column_name_matching = ( + textual_ordered + ) = False + + # merge cursor.description with the column info + # present in the compiled structure, if any + raw = self._merge_cursor_description( + context, + cursor_description, + result_columns, + num_ctx_cols, + cols_are_ordered, + textual_ordered, + ad_hoc_textual, + loose_column_name_matching, + ) + + # processors in key order which are used when building up + # a row + self._processors = [ + metadata_entry[MD_PROCESSOR] for metadata_entry in raw + ] + + # this is used when using this ResultMetaData in a Core-only cache + # retrieval context. it's initialized on first cache retrieval + # when the _result_disable_adapt_to_context execution option + # (which the ORM generally sets) is not set. + self._keymap_by_result_column_idx = None + + # for compiled SQL constructs, copy additional lookup keys into + # the key lookup map, such as Column objects, labels, + # column keys and other names + if num_ctx_cols: + # keymap by primary string... + by_key = { + metadata_entry[MD_LOOKUP_KEY]: metadata_entry + for metadata_entry in raw + } + + if len(by_key) != num_ctx_cols: + # if by-primary-string dictionary smaller than + # number of columns, assume we have dupes; (this check + # is also in place if string dictionary is bigger, as + # can occur when '*' was used as one of the compiled columns, + # which may or may not be suggestive of dupes), rewrite + # dupe records with "None" for index which results in + # ambiguous column exception when accessed. + # + # this is considered to be the less common case as it is not + # common to have dupe column keys in a SELECT statement. + # + # new in 1.4: get the complete set of all possible keys, + # strings, objects, whatever, that are dupes across two + # different records, first. + index_by_key: Dict[Any, Any] = {} + dupes = set() + for metadata_entry in raw: + for key in (metadata_entry[MD_RENDERED_NAME],) + ( + metadata_entry[MD_OBJECTS] or () + ): + idx = metadata_entry[MD_INDEX] + # if this key has been associated with more than one + # positional index, it's a dupe + if index_by_key.setdefault(key, idx) != idx: + dupes.add(key) + + # then put everything we have into the keymap excluding only + # those keys that are dupes. + self._keymap = { + obj_elem: metadata_entry + for metadata_entry in raw + if metadata_entry[MD_OBJECTS] + for obj_elem in metadata_entry[MD_OBJECTS] + if obj_elem not in dupes + } + + # then for the dupe keys, put the "ambiguous column" + # record into by_key. + by_key.update( + { + key: (None, None, [], key, key, None, None) + for key in dupes + } + ) + + else: + # no dupes - copy secondary elements from compiled + # columns into self._keymap. this is the most common + # codepath for Core / ORM statement executions before the + # result metadata is cached + self._keymap = { + obj_elem: metadata_entry + for metadata_entry in raw + if metadata_entry[MD_OBJECTS] + for obj_elem in metadata_entry[MD_OBJECTS] + } + # update keymap with primary string names taking + # precedence + self._keymap.update(by_key) + else: + # no compiled objects to map, just create keymap by primary string + self._keymap = { + metadata_entry[MD_LOOKUP_KEY]: metadata_entry + for metadata_entry in raw + } + + # update keymap with "translated" names. In SQLAlchemy this is a + # sqlite only thing, and in fact impacting only extremely old SQLite + # versions unlikely to be present in modern Python versions. + # however, the pyhive third party dialect is + # also using this hook, which means others still might use it as well. + # I dislike having this awkward hook here but as long as we need + # to use names in cursor.description in some cases we need to have + # some hook to accomplish this. + if not num_ctx_cols and context._translate_colname: + self._keymap.update( + { + metadata_entry[MD_UNTRANSLATED]: self._keymap[ + metadata_entry[MD_LOOKUP_KEY] + ] + for metadata_entry in raw + if metadata_entry[MD_UNTRANSLATED] + } + ) + + self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX) + + def _merge_cursor_description( + self, + context, + cursor_description, + result_columns, + num_ctx_cols, + cols_are_ordered, + textual_ordered, + ad_hoc_textual, + loose_column_name_matching, + ): + """Merge a cursor.description with compiled result column information. + + There are at least four separate strategies used here, selected + depending on the type of SQL construct used to start with. + + The most common case is that of the compiled SQL expression construct, + which generated the column names present in the raw SQL string and + which has the identical number of columns as were reported by + cursor.description. In this case, we assume a 1-1 positional mapping + between the entries in cursor.description and the compiled object. + This is also the most performant case as we disregard extracting / + decoding the column names present in cursor.description since we + already have the desired name we generated in the compiled SQL + construct. + + The next common case is that of the completely raw string SQL, + such as passed to connection.execute(). In this case we have no + compiled construct to work with, so we extract and decode the + names from cursor.description and index those as the primary + result row target keys. + + The remaining fairly common case is that of the textual SQL + that includes at least partial column information; this is when + we use a :class:`_expression.TextualSelect` construct. + This construct may have + unordered or ordered column information. In the ordered case, we + merge the cursor.description and the compiled construct's information + positionally, and warn if there are additional description names + present, however we still decode the names in cursor.description + as we don't have a guarantee that the names in the columns match + on these. In the unordered case, we match names in cursor.description + to that of the compiled construct based on name matching. + In both of these cases, the cursor.description names and the column + expression objects and names are indexed as result row target keys. + + The final case is much less common, where we have a compiled + non-textual SQL expression construct, but the number of columns + in cursor.description doesn't match what's in the compiled + construct. We make the guess here that there might be textual + column expressions in the compiled construct that themselves include + a comma in them causing them to split. We do the same name-matching + as with textual non-ordered columns. + + The name-matched system of merging is the same as that used by + SQLAlchemy for all cases up through the 0.9 series. Positional + matching for compiled SQL expressions was introduced in 1.0 as a + major performance feature, and positional matching for textual + :class:`_expression.TextualSelect` objects in 1.1. + As name matching is no longer + a common case, it was acceptable to factor it into smaller generator- + oriented methods that are easier to understand, but incur slightly + more performance overhead. + + """ + + if ( + num_ctx_cols + and cols_are_ordered + and not textual_ordered + and num_ctx_cols == len(cursor_description) + ): + self._keys = [elem[0] for elem in result_columns] + # pure positional 1-1 case; doesn't need to read + # the names from cursor.description + + # most common case for Core and ORM + + # this metadata is safe to cache because we are guaranteed + # to have the columns in the same order for new executions + self._safe_for_cache = True + return [ + ( + idx, + idx, + rmap_entry[RM_OBJECTS], + rmap_entry[RM_NAME], + rmap_entry[RM_RENDERED_NAME], + context.get_result_processor( + rmap_entry[RM_TYPE], + rmap_entry[RM_RENDERED_NAME], + cursor_description[idx][1], + ), + None, + ) + for idx, rmap_entry in enumerate(result_columns) + ] + else: + # name-based or text-positional cases, where we need + # to read cursor.description names + + if textual_ordered or ( + ad_hoc_textual and len(cursor_description) == num_ctx_cols + ): + self._safe_for_cache = True + # textual positional case + raw_iterator = self._merge_textual_cols_by_position( + context, cursor_description, result_columns + ) + elif num_ctx_cols: + # compiled SQL with a mismatch of description cols + # vs. compiled cols, or textual w/ unordered columns + # the order of columns can change if the query is + # against a "select *", so not safe to cache + self._safe_for_cache = False + raw_iterator = self._merge_cols_by_name( + context, + cursor_description, + result_columns, + loose_column_name_matching, + ) + else: + # no compiled SQL, just a raw string, order of columns + # can change for "select *" + self._safe_for_cache = False + raw_iterator = self._merge_cols_by_none( + context, cursor_description + ) + + return [ + ( + idx, + ridx, + obj, + cursor_colname, + cursor_colname, + context.get_result_processor( + mapped_type, cursor_colname, coltype + ), + untranslated, + ) + for ( + idx, + ridx, + cursor_colname, + mapped_type, + coltype, + obj, + untranslated, + ) in raw_iterator + ] + + def _colnames_from_description(self, context, cursor_description): + """Extract column names and data types from a cursor.description. + + Applies unicode decoding, column translation, "normalization", + and case sensitivity rules to the names based on the dialect. + + """ + + dialect = context.dialect + translate_colname = context._translate_colname + normalize_name = ( + dialect.normalize_name if dialect.requires_name_normalize else None + ) + untranslated = None + + self._keys = [] + + for idx, rec in enumerate(cursor_description): + colname = rec[0] + coltype = rec[1] + + if translate_colname: + colname, untranslated = translate_colname(colname) + + if normalize_name: + colname = normalize_name(colname) + + self._keys.append(colname) + + yield idx, colname, untranslated, coltype + + def _merge_textual_cols_by_position( + self, context, cursor_description, result_columns + ): + num_ctx_cols = len(result_columns) + + if num_ctx_cols > len(cursor_description): + util.warn( + "Number of columns in textual SQL (%d) is " + "smaller than number of columns requested (%d)" + % (num_ctx_cols, len(cursor_description)) + ) + seen = set() + + for ( + idx, + colname, + untranslated, + coltype, + ) in self._colnames_from_description(context, cursor_description): + if idx < num_ctx_cols: + ctx_rec = result_columns[idx] + obj = ctx_rec[RM_OBJECTS] + ridx = idx + mapped_type = ctx_rec[RM_TYPE] + if obj[0] in seen: + raise exc.InvalidRequestError( + "Duplicate column expression requested " + "in textual SQL: %r" % obj[0] + ) + seen.add(obj[0]) + else: + mapped_type = sqltypes.NULLTYPE + obj = None + ridx = None + yield idx, ridx, colname, mapped_type, coltype, obj, untranslated + + def _merge_cols_by_name( + self, + context, + cursor_description, + result_columns, + loose_column_name_matching, + ): + match_map = self._create_description_match_map( + result_columns, loose_column_name_matching + ) + mapped_type: TypeEngine[Any] + + for ( + idx, + colname, + untranslated, + coltype, + ) in self._colnames_from_description(context, cursor_description): + try: + ctx_rec = match_map[colname] + except KeyError: + mapped_type = sqltypes.NULLTYPE + obj = None + result_columns_idx = None + else: + obj = ctx_rec[1] + mapped_type = ctx_rec[2] + result_columns_idx = ctx_rec[3] + yield ( + idx, + result_columns_idx, + colname, + mapped_type, + coltype, + obj, + untranslated, + ) + + @classmethod + def _create_description_match_map( + cls, + result_columns: List[ResultColumnsEntry], + loose_column_name_matching: bool = False, + ) -> Dict[ + Union[str, object], Tuple[str, Tuple[Any, ...], TypeEngine[Any], int] + ]: + """when matching cursor.description to a set of names that are present + in a Compiled object, as is the case with TextualSelect, get all the + names we expect might match those in cursor.description. + """ + + d: Dict[ + Union[str, object], + Tuple[str, Tuple[Any, ...], TypeEngine[Any], int], + ] = {} + for ridx, elem in enumerate(result_columns): + key = elem[RM_RENDERED_NAME] + if key in d: + # conflicting keyname - just add the column-linked objects + # to the existing record. if there is a duplicate column + # name in the cursor description, this will allow all of those + # objects to raise an ambiguous column error + e_name, e_obj, e_type, e_ridx = d[key] + d[key] = e_name, e_obj + elem[RM_OBJECTS], e_type, ridx + else: + d[key] = (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE], ridx) + + if loose_column_name_matching: + # when using a textual statement with an unordered set + # of columns that line up, we are expecting the user + # to be using label names in the SQL that match to the column + # expressions. Enable more liberal matching for this case; + # duplicate keys that are ambiguous will be fixed later. + for r_key in elem[RM_OBJECTS]: + d.setdefault( + r_key, + (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE], ridx), + ) + return d + + def _merge_cols_by_none(self, context, cursor_description): + for ( + idx, + colname, + untranslated, + coltype, + ) in self._colnames_from_description(context, cursor_description): + yield ( + idx, + None, + colname, + sqltypes.NULLTYPE, + coltype, + None, + untranslated, + ) + + if not TYPE_CHECKING: + + def _key_fallback( + self, key: Any, err: Optional[Exception], raiseerr: bool = True + ) -> Optional[NoReturn]: + if raiseerr: + if self._unpickled and isinstance(key, elements.ColumnElement): + raise exc.NoSuchColumnError( + "Row was unpickled; lookup by ColumnElement " + "is unsupported" + ) from err + else: + raise exc.NoSuchColumnError( + "Could not locate column in row for column '%s'" + % util.string_or_unprintable(key) + ) from err + else: + return None + + def _raise_for_ambiguous_column_name(self, rec): + raise exc.InvalidRequestError( + "Ambiguous column name '%s' in " + "result set column descriptions" % rec[MD_LOOKUP_KEY] + ) + + def _index_for_key(self, key: Any, raiseerr: bool = True) -> Optional[int]: + # TODO: can consider pre-loading ints and negative ints + # into _keymap - also no coverage here + if isinstance(key, int): + key = self._keys[key] + + try: + rec = self._keymap[key] + except KeyError as ke: + x = self._key_fallback(key, ke, raiseerr) + assert x is None + return None + + index = rec[0] + + if index is None: + self._raise_for_ambiguous_column_name(rec) + return index + + def _indexes_for_keys(self, keys): + try: + return [self._keymap[key][0] for key in keys] + except KeyError as ke: + # ensure it raises + CursorResultMetaData._key_fallback(self, ke.args[0], ke) + + def _metadata_for_keys( + self, keys: Sequence[Any] + ) -> Iterator[_NonAmbigCursorKeyMapRecType]: + for key in keys: + if int in key.__class__.__mro__: + key = self._keys[key] + + try: + rec = self._keymap[key] + except KeyError as ke: + # ensure it raises + CursorResultMetaData._key_fallback(self, ke.args[0], ke) + + index = rec[MD_INDEX] + + if index is None: + self._raise_for_ambiguous_column_name(rec) + + yield cast(_NonAmbigCursorKeyMapRecType, rec) + + def __getstate__(self): + # TODO: consider serializing this as SimpleResultMetaData + return { + "_keymap": { + key: ( + rec[MD_INDEX], + rec[MD_RESULT_MAP_INDEX], + [], + key, + rec[MD_RENDERED_NAME], + None, + None, + ) + for key, rec in self._keymap.items() + if isinstance(key, (str, int)) + }, + "_keys": self._keys, + "_translated_indexes": self._translated_indexes, + } + + def __setstate__(self, state): + self._processors = [None for _ in range(len(state["_keys"]))] + self._keymap = state["_keymap"] + self._keymap_by_result_column_idx = None + self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX) + self._keys = state["_keys"] + self._unpickled = True + if state["_translated_indexes"]: + self._translated_indexes = cast( + "List[int]", state["_translated_indexes"] + ) + self._tuplefilter = tuplegetter(*self._translated_indexes) + else: + self._translated_indexes = self._tuplefilter = None + + +class ResultFetchStrategy: + """Define a fetching strategy for a result object. + + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + alternate_cursor_description: Optional[_DBAPICursorDescription] = None + + def soft_close( + self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor] + ) -> None: + raise NotImplementedError() + + def hard_close( + self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor] + ) -> None: + raise NotImplementedError() + + def yield_per( + self, + result: CursorResult[Any], + dbapi_cursor: Optional[DBAPICursor], + num: int, + ) -> None: + return + + def fetchone( + self, + result: CursorResult[Any], + dbapi_cursor: DBAPICursor, + hard_close: bool = False, + ) -> Any: + raise NotImplementedError() + + def fetchmany( + self, + result: CursorResult[Any], + dbapi_cursor: DBAPICursor, + size: Optional[int] = None, + ) -> Any: + raise NotImplementedError() + + def fetchall( + self, + result: CursorResult[Any], + dbapi_cursor: DBAPICursor, + ) -> Any: + raise NotImplementedError() + + def handle_exception( + self, + result: CursorResult[Any], + dbapi_cursor: Optional[DBAPICursor], + err: BaseException, + ) -> NoReturn: + raise err + + +class NoCursorFetchStrategy(ResultFetchStrategy): + """Cursor strategy for a result that has no open cursor. + + There are two varieties of this strategy, one for DQL and one for + DML (and also DDL), each of which represent a result that had a cursor + but no longer has one. + + """ + + __slots__ = () + + def soft_close(self, result, dbapi_cursor): + pass + + def hard_close(self, result, dbapi_cursor): + pass + + def fetchone(self, result, dbapi_cursor, hard_close=False): + return self._non_result(result, None) + + def fetchmany(self, result, dbapi_cursor, size=None): + return self._non_result(result, []) + + def fetchall(self, result, dbapi_cursor): + return self._non_result(result, []) + + def _non_result(self, result, default, err=None): + raise NotImplementedError() + + +class NoCursorDQLFetchStrategy(NoCursorFetchStrategy): + """Cursor strategy for a DQL result that has no open cursor. + + This is a result set that can return rows, i.e. for a SELECT, or for an + INSERT, UPDATE, DELETE that includes RETURNING. However it is in the state + where the cursor is closed and no rows remain available. The owning result + object may or may not be "hard closed", which determines if the fetch + methods send empty results or raise for closed result. + + """ + + __slots__ = () + + def _non_result(self, result, default, err=None): + if result.closed: + raise exc.ResourceClosedError( + "This result object is closed." + ) from err + else: + return default + + +_NO_CURSOR_DQL = NoCursorDQLFetchStrategy() + + +class NoCursorDMLFetchStrategy(NoCursorFetchStrategy): + """Cursor strategy for a DML result that has no open cursor. + + This is a result set that does not return rows, i.e. for an INSERT, + UPDATE, DELETE that does not include RETURNING. + + """ + + __slots__ = () + + def _non_result(self, result, default, err=None): + # we only expect to have a _NoResultMetaData() here right now. + assert not result._metadata.returns_rows + result._metadata._we_dont_return_rows(err) + + +_NO_CURSOR_DML = NoCursorDMLFetchStrategy() + + +class CursorFetchStrategy(ResultFetchStrategy): + """Call fetch methods from a DBAPI cursor. + + Alternate versions of this class may instead buffer the rows from + cursors or not use cursors at all. + + """ + + __slots__ = () + + def soft_close( + self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor] + ) -> None: + result.cursor_strategy = _NO_CURSOR_DQL + + def hard_close( + self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor] + ) -> None: + result.cursor_strategy = _NO_CURSOR_DQL + + def handle_exception( + self, + result: CursorResult[Any], + dbapi_cursor: Optional[DBAPICursor], + err: BaseException, + ) -> NoReturn: + result.connection._handle_dbapi_exception( + err, None, None, dbapi_cursor, result.context + ) + + def yield_per( + self, + result: CursorResult[Any], + dbapi_cursor: Optional[DBAPICursor], + num: int, + ) -> None: + result.cursor_strategy = BufferedRowCursorFetchStrategy( + dbapi_cursor, + {"max_row_buffer": num}, + initial_buffer=collections.deque(), + growth_factor=0, + ) + + def fetchone( + self, + result: CursorResult[Any], + dbapi_cursor: DBAPICursor, + hard_close: bool = False, + ) -> Any: + try: + row = dbapi_cursor.fetchone() + if row is None: + result._soft_close(hard=hard_close) + return row + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + + def fetchmany( + self, + result: CursorResult[Any], + dbapi_cursor: DBAPICursor, + size: Optional[int] = None, + ) -> Any: + try: + if size is None: + l = dbapi_cursor.fetchmany() + else: + l = dbapi_cursor.fetchmany(size) + + if not l: + result._soft_close() + return l + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + + def fetchall( + self, + result: CursorResult[Any], + dbapi_cursor: DBAPICursor, + ) -> Any: + try: + rows = dbapi_cursor.fetchall() + result._soft_close() + return rows + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + + +_DEFAULT_FETCH = CursorFetchStrategy() + + +class BufferedRowCursorFetchStrategy(CursorFetchStrategy): + """A cursor fetch strategy with row buffering behavior. + + This strategy buffers the contents of a selection of rows + before ``fetchone()`` is called. This is to allow the results of + ``cursor.description`` to be available immediately, when + interfacing with a DB-API that requires rows to be consumed before + this information is available (currently psycopg2, when used with + server-side cursors). + + The pre-fetching behavior fetches only one row initially, and then + grows its buffer size by a fixed amount with each successive need + for additional rows up the ``max_row_buffer`` size, which defaults + to 1000:: + + with psycopg2_engine.connect() as conn: + + result = conn.execution_options( + stream_results=True, max_row_buffer=50 + ).execute(text("select * from table")) + + .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows. + + .. seealso:: + + :ref:`psycopg2_execution_options` + """ + + __slots__ = ("_max_row_buffer", "_rowbuffer", "_bufsize", "_growth_factor") + + def __init__( + self, + dbapi_cursor, + execution_options, + growth_factor=5, + initial_buffer=None, + ): + self._max_row_buffer = execution_options.get("max_row_buffer", 1000) + + if initial_buffer is not None: + self._rowbuffer = initial_buffer + else: + self._rowbuffer = collections.deque(dbapi_cursor.fetchmany(1)) + self._growth_factor = growth_factor + + if growth_factor: + self._bufsize = min(self._max_row_buffer, self._growth_factor) + else: + self._bufsize = self._max_row_buffer + + @classmethod + def create(cls, result): + return BufferedRowCursorFetchStrategy( + result.cursor, + result.context.execution_options, + ) + + def _buffer_rows(self, result, dbapi_cursor): + """this is currently used only by fetchone().""" + + size = self._bufsize + try: + if size < 1: + new_rows = dbapi_cursor.fetchall() + else: + new_rows = dbapi_cursor.fetchmany(size) + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + + if not new_rows: + return + self._rowbuffer = collections.deque(new_rows) + if self._growth_factor and size < self._max_row_buffer: + self._bufsize = min( + self._max_row_buffer, size * self._growth_factor + ) + + def yield_per(self, result, dbapi_cursor, num): + self._growth_factor = 0 + self._max_row_buffer = self._bufsize = num + + def soft_close(self, result, dbapi_cursor): + self._rowbuffer.clear() + super().soft_close(result, dbapi_cursor) + + def hard_close(self, result, dbapi_cursor): + self._rowbuffer.clear() + super().hard_close(result, dbapi_cursor) + + def fetchone(self, result, dbapi_cursor, hard_close=False): + if not self._rowbuffer: + self._buffer_rows(result, dbapi_cursor) + if not self._rowbuffer: + try: + result._soft_close(hard=hard_close) + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + return None + return self._rowbuffer.popleft() + + def fetchmany(self, result, dbapi_cursor, size=None): + if size is None: + return self.fetchall(result, dbapi_cursor) + + rb = self._rowbuffer + lb = len(rb) + close = False + if size > lb: + try: + new = dbapi_cursor.fetchmany(size - lb) + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + else: + if not new: + # defer closing since it may clear the row buffer + close = True + else: + rb.extend(new) + + res = [rb.popleft() for _ in range(min(size, len(rb)))] + if close: + result._soft_close() + return res + + def fetchall(self, result, dbapi_cursor): + try: + ret = list(self._rowbuffer) + list(dbapi_cursor.fetchall()) + self._rowbuffer.clear() + result._soft_close() + return ret + except BaseException as e: + self.handle_exception(result, dbapi_cursor, e) + + +class FullyBufferedCursorFetchStrategy(CursorFetchStrategy): + """A cursor strategy that buffers rows fully upon creation. + + Used for operations where a result is to be delivered + after the database conversation can not be continued, + such as MSSQL INSERT...OUTPUT after an autocommit. + + """ + + __slots__ = ("_rowbuffer", "alternate_cursor_description") + + def __init__( + self, dbapi_cursor, alternate_description=None, initial_buffer=None + ): + self.alternate_cursor_description = alternate_description + if initial_buffer is not None: + self._rowbuffer = collections.deque(initial_buffer) + else: + self._rowbuffer = collections.deque(dbapi_cursor.fetchall()) + + def yield_per(self, result, dbapi_cursor, num): + pass + + def soft_close(self, result, dbapi_cursor): + self._rowbuffer.clear() + super().soft_close(result, dbapi_cursor) + + def hard_close(self, result, dbapi_cursor): + self._rowbuffer.clear() + super().hard_close(result, dbapi_cursor) + + def fetchone(self, result, dbapi_cursor, hard_close=False): + if self._rowbuffer: + return self._rowbuffer.popleft() + else: + result._soft_close(hard=hard_close) + return None + + def fetchmany(self, result, dbapi_cursor, size=None): + if size is None: + return self.fetchall(result, dbapi_cursor) + + rb = self._rowbuffer + rows = [rb.popleft() for _ in range(min(size, len(rb)))] + if not rows: + result._soft_close() + return rows + + def fetchall(self, result, dbapi_cursor): + ret = self._rowbuffer + self._rowbuffer = collections.deque() + result._soft_close() + return ret + + +class _NoResultMetaData(ResultMetaData): + __slots__ = () + + returns_rows = False + + def _we_dont_return_rows(self, err=None): + raise exc.ResourceClosedError( + "This result object does not return rows. " + "It has been closed automatically." + ) from err + + def _index_for_key(self, keys, raiseerr): + self._we_dont_return_rows() + + def _metadata_for_keys(self, key): + self._we_dont_return_rows() + + def _reduce(self, keys): + self._we_dont_return_rows() + + @property + def _keymap(self): + self._we_dont_return_rows() + + @property + def _key_to_index(self): + self._we_dont_return_rows() + + @property + def _processors(self): + self._we_dont_return_rows() + + @property + def keys(self): + self._we_dont_return_rows() + + +_NO_RESULT_METADATA = _NoResultMetaData() + + +def null_dml_result() -> IteratorResult[Any]: + it: IteratorResult[Any] = IteratorResult(_NoResultMetaData(), iter([])) + it._soft_close() + return it + + +class CursorResult(Result[_T]): + """A Result that is representing state from a DBAPI cursor. + + .. versionchanged:: 1.4 The :class:`.CursorResult`` + class replaces the previous :class:`.ResultProxy` interface. + This classes are based on the :class:`.Result` calling API + which provides an updated usage model and calling facade for + SQLAlchemy Core and SQLAlchemy ORM. + + Returns database rows via the :class:`.Row` class, which provides + additional API features and behaviors on top of the raw data returned by + the DBAPI. Through the use of filters such as the :meth:`.Result.scalars` + method, other kinds of objects may also be returned. + + .. seealso:: + + :ref:`tutorial_selecting_data` - introductory material for accessing + :class:`_engine.CursorResult` and :class:`.Row` objects. + + """ + + __slots__ = ( + "context", + "dialect", + "cursor", + "cursor_strategy", + "_echo", + "connection", + ) + + _metadata: Union[CursorResultMetaData, _NoResultMetaData] + _no_result_metadata = _NO_RESULT_METADATA + _soft_closed: bool = False + closed: bool = False + _is_cursor = True + + context: DefaultExecutionContext + dialect: Dialect + cursor_strategy: ResultFetchStrategy + connection: Connection + + def __init__( + self, + context: DefaultExecutionContext, + cursor_strategy: ResultFetchStrategy, + cursor_description: Optional[_DBAPICursorDescription], + ): + self.context = context + self.dialect = context.dialect + self.cursor = context.cursor + self.cursor_strategy = cursor_strategy + self.connection = context.root_connection + self._echo = echo = ( + self.connection._echo and context.engine._should_log_debug() + ) + + if cursor_description is not None: + # inline of Result._row_getter(), set up an initial row + # getter assuming no transformations will be called as this + # is the most common case + + metadata = self._init_metadata(context, cursor_description) + + _make_row: Any + _make_row = functools.partial( + Row, + metadata, + metadata._effective_processors, + metadata._key_to_index, + ) + + if context._num_sentinel_cols: + sentinel_filter = operator.itemgetter( + slice(-context._num_sentinel_cols) + ) + + def _sliced_row(raw_data): + return _make_row(sentinel_filter(raw_data)) + + sliced_row = _sliced_row + else: + sliced_row = _make_row + + if echo: + log = self.context.connection._log_debug + + def _log_row(row): + log("Row %r", sql_util._repr_row(row)) + return row + + self._row_logging_fn = _log_row + + def _make_row_2(row): + return _log_row(sliced_row(row)) + + make_row = _make_row_2 + else: + make_row = sliced_row + self._set_memoized_attribute("_row_getter", make_row) + + else: + assert context._num_sentinel_cols == 0 + self._metadata = self._no_result_metadata + + def _init_metadata(self, context, cursor_description): + if context.compiled: + compiled = context.compiled + + if compiled._cached_metadata: + metadata = compiled._cached_metadata + else: + metadata = CursorResultMetaData(self, cursor_description) + if metadata._safe_for_cache: + compiled._cached_metadata = metadata + + # result rewrite/ adapt step. this is to suit the case + # when we are invoked against a cached Compiled object, we want + # to rewrite the ResultMetaData to reflect the Column objects + # that are in our current SQL statement object, not the one + # that is associated with the cached Compiled object. + # the Compiled object may also tell us to not + # actually do this step; this is to support the ORM where + # it is to produce a new Result object in any case, and will + # be using the cached Column objects against this database result + # so we don't want to rewrite them. + # + # Basically this step suits the use case where the end user + # is using Core SQL expressions and is accessing columns in the + # result row using row._mapping[table.c.column]. + if ( + not context.execution_options.get( + "_result_disable_adapt_to_context", False + ) + and compiled._result_columns + and context.cache_hit is context.dialect.CACHE_HIT + and compiled.statement is not context.invoked_statement + ): + metadata = metadata._adapt_to_context(context) + + self._metadata = metadata + + else: + self._metadata = metadata = CursorResultMetaData( + self, cursor_description + ) + if self._echo: + context.connection._log_debug( + "Col %r", tuple(x[0] for x in cursor_description) + ) + return metadata + + def _soft_close(self, hard=False): + """Soft close this :class:`_engine.CursorResult`. + + This releases all DBAPI cursor resources, but leaves the + CursorResult "open" from a semantic perspective, meaning the + fetchXXX() methods will continue to return empty results. + + This method is called automatically when: + + * all result rows are exhausted using the fetchXXX() methods. + * cursor.description is None. + + This method is **not public**, but is documented in order to clarify + the "autoclose" process used. + + .. seealso:: + + :meth:`_engine.CursorResult.close` + + + """ + + if (not hard and self._soft_closed) or (hard and self.closed): + return + + if hard: + self.closed = True + self.cursor_strategy.hard_close(self, self.cursor) + else: + self.cursor_strategy.soft_close(self, self.cursor) + + if not self._soft_closed: + cursor = self.cursor + self.cursor = None # type: ignore + self.connection._safe_close_cursor(cursor) + self._soft_closed = True + + @property + def inserted_primary_key_rows(self): + """Return the value of + :attr:`_engine.CursorResult.inserted_primary_key` + as a row contained within a list; some dialects may support a + multiple row form as well. + + .. note:: As indicated below, in current SQLAlchemy versions this + accessor is only useful beyond what's already supplied by + :attr:`_engine.CursorResult.inserted_primary_key` when using the + :ref:`postgresql_psycopg2` dialect. Future versions hope to + generalize this feature to more dialects. + + This accessor is added to support dialects that offer the feature + that is currently implemented by the :ref:`psycopg2_executemany_mode` + feature, currently **only the psycopg2 dialect**, which provides + for many rows to be INSERTed at once while still retaining the + behavior of being able to return server-generated primary key values. + + * **When using the psycopg2 dialect, or other dialects that may support + "fast executemany" style inserts in upcoming releases** : When + invoking an INSERT statement while passing a list of rows as the + second argument to :meth:`_engine.Connection.execute`, this accessor + will then provide a list of rows, where each row contains the primary + key value for each row that was INSERTed. + + * **When using all other dialects / backends that don't yet support + this feature**: This accessor is only useful for **single row INSERT + statements**, and returns the same information as that of the + :attr:`_engine.CursorResult.inserted_primary_key` within a + single-element list. When an INSERT statement is executed in + conjunction with a list of rows to be INSERTed, the list will contain + one row per row inserted in the statement, however it will contain + ``None`` for any server-generated values. + + Future releases of SQLAlchemy will further generalize the + "fast execution helper" feature of psycopg2 to suit other dialects, + thus allowing this accessor to be of more general use. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_engine.CursorResult.inserted_primary_key` + + """ + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled expression construct." + ) + elif not self.context.isinsert: + raise exc.InvalidRequestError( + "Statement is not an insert() expression construct." + ) + elif self.context._is_explicit_returning: + raise exc.InvalidRequestError( + "Can't call inserted_primary_key " + "when returning() " + "is used." + ) + return self.context.inserted_primary_key_rows + + @property + def inserted_primary_key(self): + """Return the primary key for the row just inserted. + + The return value is a :class:`_result.Row` object representing + a named tuple of primary key values in the order in which the + primary key columns are configured in the source + :class:`_schema.Table`. + + .. versionchanged:: 1.4.8 - the + :attr:`_engine.CursorResult.inserted_primary_key` + value is now a named tuple via the :class:`_result.Row` class, + rather than a plain tuple. + + This accessor only applies to single row :func:`_expression.insert` + constructs which did not explicitly specify + :meth:`_expression.Insert.returning`. Support for multirow inserts, + while not yet available for most backends, would be accessed using + the :attr:`_engine.CursorResult.inserted_primary_key_rows` accessor. + + Note that primary key columns which specify a server_default clause, or + otherwise do not qualify as "autoincrement" columns (see the notes at + :class:`_schema.Column`), and were generated using the database-side + default, will appear in this list as ``None`` unless the backend + supports "returning" and the insert statement executed with the + "implicit returning" enabled. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() construct. + + """ + + if self.context.executemany: + raise exc.InvalidRequestError( + "This statement was an executemany call; if primary key " + "returning is supported, please " + "use .inserted_primary_key_rows." + ) + + ikp = self.inserted_primary_key_rows + if ikp: + return ikp[0] + else: + return None + + def last_updated_params(self): + """Return the collection of updated parameters from this + execution. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an update() construct. + + """ + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled expression construct." + ) + elif not self.context.isupdate: + raise exc.InvalidRequestError( + "Statement is not an update() expression construct." + ) + elif self.context.executemany: + return self.context.compiled_parameters + else: + return self.context.compiled_parameters[0] + + def last_inserted_params(self): + """Return the collection of inserted parameters from this + execution. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() construct. + + """ + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled expression construct." + ) + elif not self.context.isinsert: + raise exc.InvalidRequestError( + "Statement is not an insert() expression construct." + ) + elif self.context.executemany: + return self.context.compiled_parameters + else: + return self.context.compiled_parameters[0] + + @property + def returned_defaults_rows(self): + """Return a list of rows each containing the values of default + columns that were fetched using + the :meth:`.ValuesBase.return_defaults` feature. + + The return value is a list of :class:`.Row` objects. + + .. versionadded:: 1.4 + + """ + return self.context.returned_default_rows + + def splice_horizontally(self, other): + """Return a new :class:`.CursorResult` that "horizontally splices" + together the rows of this :class:`.CursorResult` with that of another + :class:`.CursorResult`. + + .. tip:: This method is for the benefit of the SQLAlchemy ORM and is + not intended for general use. + + "horizontally splices" means that for each row in the first and second + result sets, a new row that concatenates the two rows together is + produced, which then becomes the new row. The incoming + :class:`.CursorResult` must have the identical number of rows. It is + typically expected that the two result sets come from the same sort + order as well, as the result rows are spliced together based on their + position in the result. + + The expected use case here is so that multiple INSERT..RETURNING + statements (which definitely need to be sorted) against different + tables can produce a single result that looks like a JOIN of those two + tables. + + E.g.:: + + r1 = connection.execute( + users.insert().returning( + users.c.user_name, + users.c.user_id, + sort_by_parameter_order=True + ), + user_values + ) + + r2 = connection.execute( + addresses.insert().returning( + addresses.c.address_id, + addresses.c.address, + addresses.c.user_id, + sort_by_parameter_order=True + ), + address_values + ) + + rows = r1.splice_horizontally(r2).all() + assert ( + rows == + [ + ("john", 1, 1, "foo@bar.com", 1), + ("jack", 2, 2, "bar@bat.com", 2), + ] + ) + + .. versionadded:: 2.0 + + .. seealso:: + + :meth:`.CursorResult.splice_vertically` + + + """ + + clone = self._generate() + total_rows = [ + tuple(r1) + tuple(r2) + for r1, r2 in zip( + list(self._raw_row_iterator()), + list(other._raw_row_iterator()), + ) + ] + + clone._metadata = clone._metadata._splice_horizontally(other._metadata) + + clone.cursor_strategy = FullyBufferedCursorFetchStrategy( + None, + initial_buffer=total_rows, + ) + clone._reset_memoizations() + return clone + + def splice_vertically(self, other): + """Return a new :class:`.CursorResult` that "vertically splices", + i.e. "extends", the rows of this :class:`.CursorResult` with that of + another :class:`.CursorResult`. + + .. tip:: This method is for the benefit of the SQLAlchemy ORM and is + not intended for general use. + + "vertically splices" means the rows of the given result are appended to + the rows of this cursor result. The incoming :class:`.CursorResult` + must have rows that represent the identical list of columns in the + identical order as they are in this :class:`.CursorResult`. + + .. versionadded:: 2.0 + + .. seealso:: + + :meth:`.CursorResult.splice_horizontally` + + """ + clone = self._generate() + total_rows = list(self._raw_row_iterator()) + list( + other._raw_row_iterator() + ) + + clone.cursor_strategy = FullyBufferedCursorFetchStrategy( + None, + initial_buffer=total_rows, + ) + clone._reset_memoizations() + return clone + + def _rewind(self, rows): + """rewind this result back to the given rowset. + + this is used internally for the case where an :class:`.Insert` + construct combines the use of + :meth:`.Insert.return_defaults` along with the + "supplemental columns" feature. + + """ + + if self._echo: + self.context.connection._log_debug( + "CursorResult rewound %d row(s)", len(rows) + ) + + # the rows given are expected to be Row objects, so we + # have to clear out processors which have already run on these + # rows + self._metadata = cast( + CursorResultMetaData, self._metadata + )._remove_processors() + + self.cursor_strategy = FullyBufferedCursorFetchStrategy( + None, + # TODO: if these are Row objects, can we save on not having to + # re-make new Row objects out of them a second time? is that + # what's actually happening right now? maybe look into this + initial_buffer=rows, + ) + self._reset_memoizations() + return self + + @property + def returned_defaults(self): + """Return the values of default columns that were fetched using + the :meth:`.ValuesBase.return_defaults` feature. + + The value is an instance of :class:`.Row`, or ``None`` + if :meth:`.ValuesBase.return_defaults` was not used or if the + backend does not support RETURNING. + + .. seealso:: + + :meth:`.ValuesBase.return_defaults` + + """ + + if self.context.executemany: + raise exc.InvalidRequestError( + "This statement was an executemany call; if return defaults " + "is supported, please use .returned_defaults_rows." + ) + + rows = self.context.returned_default_rows + if rows: + return rows[0] + else: + return None + + def lastrow_has_defaults(self): + """Return ``lastrow_has_defaults()`` from the underlying + :class:`.ExecutionContext`. + + See :class:`.ExecutionContext` for details. + + """ + + return self.context.lastrow_has_defaults() + + def postfetch_cols(self): + """Return ``postfetch_cols()`` from the underlying + :class:`.ExecutionContext`. + + See :class:`.ExecutionContext` for details. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() or update() construct. + + """ + + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled expression construct." + ) + elif not self.context.isinsert and not self.context.isupdate: + raise exc.InvalidRequestError( + "Statement is not an insert() or update() " + "expression construct." + ) + return self.context.postfetch_cols + + def prefetch_cols(self): + """Return ``prefetch_cols()`` from the underlying + :class:`.ExecutionContext`. + + See :class:`.ExecutionContext` for details. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() or update() construct. + + """ + + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled expression construct." + ) + elif not self.context.isinsert and not self.context.isupdate: + raise exc.InvalidRequestError( + "Statement is not an insert() or update() " + "expression construct." + ) + return self.context.prefetch_cols + + def supports_sane_rowcount(self): + """Return ``supports_sane_rowcount`` from the dialect. + + See :attr:`_engine.CursorResult.rowcount` for background. + + """ + + return self.dialect.supports_sane_rowcount + + def supports_sane_multi_rowcount(self): + """Return ``supports_sane_multi_rowcount`` from the dialect. + + See :attr:`_engine.CursorResult.rowcount` for background. + + """ + + return self.dialect.supports_sane_multi_rowcount + + @util.memoized_property + def rowcount(self) -> int: + """Return the 'rowcount' for this result. + + The primary purpose of 'rowcount' is to report the number of rows + matched by the WHERE criterion of an UPDATE or DELETE statement + executed once (i.e. for a single parameter set), which may then be + compared to the number of rows expected to be updated or deleted as a + means of asserting data integrity. + + This attribute is transferred from the ``cursor.rowcount`` attribute + of the DBAPI before the cursor is closed, to support DBAPIs that + don't make this value available after cursor close. Some DBAPIs may + offer meaningful values for other kinds of statements, such as INSERT + and SELECT statements as well. In order to retrieve ``cursor.rowcount`` + for these statements, set the + :paramref:`.Connection.execution_options.preserve_rowcount` + execution option to True, which will cause the ``cursor.rowcount`` + value to be unconditionally memoized before any results are returned + or the cursor is closed, regardless of statement type. + + For cases where the DBAPI does not support rowcount for a particular + kind of statement and/or execution, the returned value will be ``-1``, + which is delivered directly from the DBAPI and is part of :pep:`249`. + All DBAPIs should support rowcount for single-parameter-set + UPDATE and DELETE statements, however. + + .. note:: + + Notes regarding :attr:`_engine.CursorResult.rowcount`: + + + * This attribute returns the number of rows *matched*, + which is not necessarily the same as the number of rows + that were actually *modified*. For example, an UPDATE statement + may have no net change on a given row if the SET values + given are the same as those present in the row already. + Such a row would be matched but not modified. + On backends that feature both styles, such as MySQL, + rowcount is configured to return the match + count in all cases. + + * :attr:`_engine.CursorResult.rowcount` in the default case is + *only* useful in conjunction with an UPDATE or DELETE statement, + and only with a single set of parameters. For other kinds of + statements, SQLAlchemy will not attempt to pre-memoize the value + unless the + :paramref:`.Connection.execution_options.preserve_rowcount` + execution option is used. Note that contrary to :pep:`249`, many + DBAPIs do not support rowcount values for statements that are not + UPDATE or DELETE, particularly when rows are being returned which + are not fully pre-buffered. DBAPIs that dont support rowcount + for a particular kind of statement should return the value ``-1`` + for such statements. + + * :attr:`_engine.CursorResult.rowcount` may not be meaningful + when executing a single statement with multiple parameter sets + (i.e. an :term:`executemany`). Most DBAPIs do not sum "rowcount" + values across multiple parameter sets and will return ``-1`` + when accessed. + + * SQLAlchemy's :ref:`engine_insertmanyvalues` feature does support + a correct population of :attr:`_engine.CursorResult.rowcount` + when the :paramref:`.Connection.execution_options.preserve_rowcount` + execution option is set to True. + + * Statements that use RETURNING may not support rowcount, returning + a ``-1`` value instead. + + .. seealso:: + + :ref:`tutorial_update_delete_rowcount` - in the :ref:`unified_tutorial` + + :paramref:`.Connection.execution_options.preserve_rowcount` + + """ # noqa: E501 + try: + return self.context.rowcount + except BaseException as e: + self.cursor_strategy.handle_exception(self, self.cursor, e) + raise # not called + + @property + def lastrowid(self): + """Return the 'lastrowid' accessor on the DBAPI cursor. + + This is a DBAPI specific method and is only functional + for those backends which support it, for statements + where it is appropriate. It's behavior is not + consistent across backends. + + Usage of this method is normally unnecessary when + using insert() expression constructs; the + :attr:`~CursorResult.inserted_primary_key` attribute provides a + tuple of primary key values for a newly inserted row, + regardless of database backend. + + """ + try: + return self.context.get_lastrowid() + except BaseException as e: + self.cursor_strategy.handle_exception(self, self.cursor, e) + + @property + def returns_rows(self): + """True if this :class:`_engine.CursorResult` returns zero or more + rows. + + I.e. if it is legal to call the methods + :meth:`_engine.CursorResult.fetchone`, + :meth:`_engine.CursorResult.fetchmany` + :meth:`_engine.CursorResult.fetchall`. + + Overall, the value of :attr:`_engine.CursorResult.returns_rows` should + always be synonymous with whether or not the DBAPI cursor had a + ``.description`` attribute, indicating the presence of result columns, + noting that a cursor that returns zero rows still has a + ``.description`` if a row-returning statement was emitted. + + This attribute should be True for all results that are against + SELECT statements, as well as for DML statements INSERT/UPDATE/DELETE + that use RETURNING. For INSERT/UPDATE/DELETE statements that were + not using RETURNING, the value will usually be False, however + there are some dialect-specific exceptions to this, such as when + using the MSSQL / pyodbc dialect a SELECT is emitted inline in + order to retrieve an inserted primary key value. + + + """ + return self._metadata.returns_rows + + @property + def is_insert(self): + """True if this :class:`_engine.CursorResult` is the result + of a executing an expression language compiled + :func:`_expression.insert` construct. + + When True, this implies that the + :attr:`inserted_primary_key` attribute is accessible, + assuming the statement did not include + a user defined "returning" construct. + + """ + return self.context.isinsert + + def _fetchiter_impl(self): + fetchone = self.cursor_strategy.fetchone + + while True: + row = fetchone(self, self.cursor) + if row is None: + break + yield row + + def _fetchone_impl(self, hard_close=False): + return self.cursor_strategy.fetchone(self, self.cursor, hard_close) + + def _fetchall_impl(self): + return self.cursor_strategy.fetchall(self, self.cursor) + + def _fetchmany_impl(self, size=None): + return self.cursor_strategy.fetchmany(self, self.cursor, size) + + def _raw_row_iterator(self): + return self._fetchiter_impl() + + def merge(self, *others: Result[Any]) -> MergedResult[Any]: + merged_result = super().merge(*others) + if self.context._has_rowcount: + merged_result.rowcount = sum( + cast("CursorResult[Any]", result).rowcount + for result in (self,) + others + ) + return merged_result + + def close(self) -> Any: + """Close this :class:`_engine.CursorResult`. + + This closes out the underlying DBAPI cursor corresponding to the + statement execution, if one is still present. Note that the DBAPI + cursor is automatically released when the :class:`_engine.CursorResult` + exhausts all available rows. :meth:`_engine.CursorResult.close` is + generally an optional method except in the case when discarding a + :class:`_engine.CursorResult` that still has additional rows pending + for fetch. + + After this method is called, it is no longer valid to call upon + the fetch methods, which will raise a :class:`.ResourceClosedError` + on subsequent use. + + .. seealso:: + + :ref:`connections_toplevel` + + """ + self._soft_close(hard=True) + + @_generative + def yield_per(self, num: int) -> Self: + self._yield_per = num + self.cursor_strategy.yield_per(self, self.cursor, num) + return self + + +ResultProxy = CursorResult diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py new file mode 100644 index 00000000..d42a3138 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py @@ -0,0 +1,2365 @@ +# engine/default.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Default implementations of per-dialect sqlalchemy.engine classes. + +These are semi-private implementation classes which are only of importance +to database dialect authors; dialects will usually use the classes here +as the base class for their own corresponding classes. + +""" + +from __future__ import annotations + +import functools +import operator +import random +import re +from time import perf_counter +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import MutableSequence +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +import weakref + +from . import characteristics +from . import cursor as _cursor +from . import interfaces +from .base import Connection +from .interfaces import CacheStats +from .interfaces import DBAPICursor +from .interfaces import Dialect +from .interfaces import ExecuteStyle +from .interfaces import ExecutionContext +from .reflection import ObjectKind +from .reflection import ObjectScope +from .. import event +from .. import exc +from .. import pool +from .. import util +from ..sql import compiler +from ..sql import dml +from ..sql import expression +from ..sql import type_api +from ..sql import util as sql_util +from ..sql._typing import is_tuple_type +from ..sql.base import _NoArg +from ..sql.compiler import DDLCompiler +from ..sql.compiler import InsertmanyvaluesSentinelOpts +from ..sql.compiler import SQLCompiler +from ..sql.elements import quoted_name +from ..util.typing import Final +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from types import ModuleType + + from .base import Engine + from .cursor import ResultFetchStrategy + from .interfaces import _CoreMultiExecuteParams + from .interfaces import _CoreSingleExecuteParams + from .interfaces import _DBAPICursorDescription + from .interfaces import _DBAPIMultiExecuteParams + from .interfaces import _ExecuteOptions + from .interfaces import _MutableCoreSingleExecuteParams + from .interfaces import _ParamStyle + from .interfaces import DBAPIConnection + from .interfaces import IsolationLevel + from .row import Row + from .url import URL + from ..event import _ListenerFnType + from ..pool import Pool + from ..pool import PoolProxiedConnection + from ..sql import Executable + from ..sql.compiler import Compiled + from ..sql.compiler import Linting + from ..sql.compiler import ResultColumnsEntry + from ..sql.dml import DMLState + from ..sql.dml import UpdateBase + from ..sql.elements import BindParameter + from ..sql.schema import Column + from ..sql.type_api import _BindProcessorType + from ..sql.type_api import _ResultProcessorType + from ..sql.type_api import TypeEngine + +# When we're handed literal SQL, ensure it's a SELECT query +SERVER_SIDE_CURSOR_RE = re.compile(r"\s*SELECT", re.I | re.UNICODE) + + +( + CACHE_HIT, + CACHE_MISS, + CACHING_DISABLED, + NO_CACHE_KEY, + NO_DIALECT_SUPPORT, +) = list(CacheStats) + + +class DefaultDialect(Dialect): + """Default implementation of Dialect""" + + statement_compiler = compiler.SQLCompiler + ddl_compiler = compiler.DDLCompiler + type_compiler_cls = compiler.GenericTypeCompiler + + preparer = compiler.IdentifierPreparer + supports_alter = True + supports_comments = False + supports_constraint_comments = False + inline_comments = False + supports_statement_cache = True + + div_is_floordiv = True + + bind_typing = interfaces.BindTyping.NONE + + include_set_input_sizes: Optional[Set[Any]] = None + exclude_set_input_sizes: Optional[Set[Any]] = None + + # the first value we'd get for an autoincrement column. + default_sequence_base = 1 + + # most DBAPIs happy with this for execute(). + # not cx_oracle. + execute_sequence_format = tuple + + supports_schemas = True + supports_views = True + supports_sequences = False + sequences_optional = False + preexecute_autoincrement_sequences = False + supports_identity_columns = False + postfetch_lastrowid = True + favor_returning_over_lastrowid = False + insert_null_pk_still_autoincrements = False + update_returning = False + delete_returning = False + update_returning_multifrom = False + delete_returning_multifrom = False + insert_returning = False + + cte_follows_insert = False + + supports_native_enum = False + supports_native_boolean = False + supports_native_uuid = False + returns_native_bytes = False + + non_native_boolean_check_constraint = True + + supports_simple_order_by_label = True + + tuple_in_values = False + + connection_characteristics = util.immutabledict( + { + "isolation_level": characteristics.IsolationLevelCharacteristic(), + "logging_token": characteristics.LoggingTokenCharacteristic(), + } + ) + + engine_config_types: Mapping[str, Any] = util.immutabledict( + { + "pool_timeout": util.asint, + "echo": util.bool_or_str("debug"), + "echo_pool": util.bool_or_str("debug"), + "pool_recycle": util.asint, + "pool_size": util.asint, + "max_overflow": util.asint, + "future": util.asbool, + } + ) + + # if the NUMERIC type + # returns decimal.Decimal. + # *not* the FLOAT type however. + supports_native_decimal = False + + name = "default" + + # length at which to truncate + # any identifier. + max_identifier_length = 9999 + _user_defined_max_identifier_length: Optional[int] = None + + isolation_level: Optional[str] = None + + # sub-categories of max_identifier_length. + # currently these accommodate for MySQL which allows alias names + # of 255 but DDL names only of 64. + max_index_name_length: Optional[int] = None + max_constraint_name_length: Optional[int] = None + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + colspecs: MutableMapping[Type[TypeEngine[Any]], Type[TypeEngine[Any]]] = {} + default_paramstyle = "named" + + supports_default_values = False + """dialect supports INSERT... DEFAULT VALUES syntax""" + + supports_default_metavalue = False + """dialect supports INSERT... VALUES (DEFAULT) syntax""" + + default_metavalue_token = "DEFAULT" + """for INSERT... VALUES (DEFAULT) syntax, the token to put in the + parenthesis.""" + + # not sure if this is a real thing but the compiler will deliver it + # if this is the only flag enabled. + supports_empty_insert = True + """dialect supports INSERT () VALUES ()""" + + supports_multivalues_insert = False + + use_insertmanyvalues: bool = False + + use_insertmanyvalues_wo_returning: bool = False + + insertmanyvalues_implicit_sentinel: InsertmanyvaluesSentinelOpts = ( + InsertmanyvaluesSentinelOpts.NOT_SUPPORTED + ) + + insertmanyvalues_page_size: int = 1000 + insertmanyvalues_max_parameters = 32700 + + supports_is_distinct_from = True + + supports_server_side_cursors = False + + server_side_cursors = False + + # extra record-level locking features (#4860) + supports_for_update_of = False + + server_version_info = None + + default_schema_name: Optional[str] = None + + # indicates symbol names are + # UPPERCASEd if they are case insensitive + # within the database. + # if this is True, the methods normalize_name() + # and denormalize_name() must be provided. + requires_name_normalize = False + + is_async = False + + has_terminate = False + + # TODO: this is not to be part of 2.0. implement rudimentary binary + # literals for SQLite, PostgreSQL, MySQL only within + # _Binary.literal_processor + _legacy_binary_type_literal_encoding = "utf-8" + + @util.deprecated_params( + empty_in_strategy=( + "1.4", + "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is " + "deprecated, and no longer has any effect. All IN expressions " + "are now rendered using " + 'the "expanding parameter" strategy which renders a set of bound' + 'expressions, or an "empty set" SELECT, at statement execution' + "time.", + ), + server_side_cursors=( + "1.4", + "The :paramref:`_sa.create_engine.server_side_cursors` parameter " + "is deprecated and will be removed in a future release. Please " + "use the " + ":paramref:`_engine.Connection.execution_options.stream_results` " + "parameter.", + ), + ) + def __init__( + self, + paramstyle: Optional[_ParamStyle] = None, + isolation_level: Optional[IsolationLevel] = None, + dbapi: Optional[ModuleType] = None, + implicit_returning: Literal[True] = True, + supports_native_boolean: Optional[bool] = None, + max_identifier_length: Optional[int] = None, + label_length: Optional[int] = None, + insertmanyvalues_page_size: Union[_NoArg, int] = _NoArg.NO_ARG, + use_insertmanyvalues: Optional[bool] = None, + # util.deprecated_params decorator cannot render the + # Linting.NO_LINTING constant + compiler_linting: Linting = int(compiler.NO_LINTING), # type: ignore + server_side_cursors: bool = False, + **kwargs: Any, + ): + if server_side_cursors: + if not self.supports_server_side_cursors: + raise exc.ArgumentError( + "Dialect %s does not support server side cursors" % self + ) + else: + self.server_side_cursors = True + + if getattr(self, "use_setinputsizes", False): + util.warn_deprecated( + "The dialect-level use_setinputsizes attribute is " + "deprecated. Please use " + "bind_typing = BindTyping.SETINPUTSIZES", + "2.0", + ) + self.bind_typing = interfaces.BindTyping.SETINPUTSIZES + + self.positional = False + self._ischema = None + + self.dbapi = dbapi + + if paramstyle is not None: + self.paramstyle = paramstyle + elif self.dbapi is not None: + self.paramstyle = self.dbapi.paramstyle + else: + self.paramstyle = self.default_paramstyle + self.positional = self.paramstyle in ( + "qmark", + "format", + "numeric", + "numeric_dollar", + ) + self.identifier_preparer = self.preparer(self) + self._on_connect_isolation_level = isolation_level + + legacy_tt_callable = getattr(self, "type_compiler", None) + if legacy_tt_callable is not None: + tt_callable = cast( + Type[compiler.GenericTypeCompiler], + self.type_compiler, + ) + else: + tt_callable = self.type_compiler_cls + + self.type_compiler_instance = self.type_compiler = tt_callable(self) + + if supports_native_boolean is not None: + self.supports_native_boolean = supports_native_boolean + + self._user_defined_max_identifier_length = max_identifier_length + if self._user_defined_max_identifier_length: + self.max_identifier_length = ( + self._user_defined_max_identifier_length + ) + self.label_length = label_length + self.compiler_linting = compiler_linting + + if use_insertmanyvalues is not None: + self.use_insertmanyvalues = use_insertmanyvalues + + if insertmanyvalues_page_size is not _NoArg.NO_ARG: + self.insertmanyvalues_page_size = insertmanyvalues_page_size + + @property + @util.deprecated( + "2.0", + "full_returning is deprecated, please use insert_returning, " + "update_returning, delete_returning", + ) + def full_returning(self): + return ( + self.insert_returning + and self.update_returning + and self.delete_returning + ) + + @util.memoized_property + def insert_executemany_returning(self): + """Default implementation for insert_executemany_returning, if not + otherwise overridden by the specific dialect. + + The default dialect determines "insert_executemany_returning" is + available if the dialect in use has opted into using the + "use_insertmanyvalues" feature. If they haven't opted into that, then + this attribute is False, unless the dialect in question overrides this + and provides some other implementation (such as the Oracle dialect). + + """ + return self.insert_returning and self.use_insertmanyvalues + + @util.memoized_property + def insert_executemany_returning_sort_by_parameter_order(self): + """Default implementation for + insert_executemany_returning_deterministic_order, if not otherwise + overridden by the specific dialect. + + The default dialect determines "insert_executemany_returning" can have + deterministic order only if the dialect in use has opted into using the + "use_insertmanyvalues" feature, which implements deterministic ordering + using client side sentinel columns only by default. The + "insertmanyvalues" feature also features alternate forms that can + use server-generated PK values as "sentinels", but those are only + used if the :attr:`.Dialect.insertmanyvalues_implicit_sentinel` + bitflag enables those alternate SQL forms, which are disabled + by default. + + If the dialect in use hasn't opted into that, then this attribute is + False, unless the dialect in question overrides this and provides some + other implementation (such as the Oracle dialect). + + """ + return self.insert_returning and self.use_insertmanyvalues + + update_executemany_returning = False + delete_executemany_returning = False + + @util.memoized_property + def loaded_dbapi(self) -> ModuleType: + if self.dbapi is None: + raise exc.InvalidRequestError( + f"Dialect {self} does not have a Python DBAPI established " + "and cannot be used for actual database interaction" + ) + return self.dbapi + + @util.memoized_property + def _bind_typing_render_casts(self): + return self.bind_typing is interfaces.BindTyping.RENDER_CASTS + + def _ensure_has_table_connection(self, arg): + if not isinstance(arg, Connection): + raise exc.ArgumentError( + "The argument passed to Dialect.has_table() should be a " + "%s, got %s. " + "Additionally, the Dialect.has_table() method is for " + "internal dialect " + "use only; please use " + "``inspect(some_engine).has_table(>)`` " + "for public API use." % (Connection, type(arg)) + ) + + @util.memoized_property + def _supports_statement_cache(self): + ssc = self.__class__.__dict__.get("supports_statement_cache", None) + if ssc is None: + util.warn( + "Dialect %s:%s will not make use of SQL compilation caching " + "as it does not set the 'supports_statement_cache' attribute " + "to ``True``. This can have " + "significant performance implications including some " + "performance degradations in comparison to prior SQLAlchemy " + "versions. Dialect maintainers should seek to set this " + "attribute to True after appropriate development and testing " + "for SQLAlchemy 1.4 caching support. Alternatively, this " + "attribute may be set to False which will disable this " + "warning." % (self.name, self.driver), + code="cprf", + ) + + return bool(ssc) + + @util.memoized_property + def _type_memos(self): + return weakref.WeakKeyDictionary() + + @property + def dialect_description(self): + return self.name + "+" + self.driver + + @property + def supports_sane_rowcount_returning(self): + """True if this dialect supports sane rowcount even if RETURNING is + in use. + + For dialects that don't support RETURNING, this is synonymous with + ``supports_sane_rowcount``. + + """ + return self.supports_sane_rowcount + + @classmethod + def get_pool_class(cls, url: URL) -> Type[Pool]: + return getattr(cls, "poolclass", pool.QueuePool) + + def get_dialect_pool_class(self, url: URL) -> Type[Pool]: + return self.get_pool_class(url) + + @classmethod + def load_provisioning(cls): + package = ".".join(cls.__module__.split(".")[0:-1]) + try: + __import__(package + ".provision") + except ImportError: + pass + + def _builtin_onconnect(self) -> Optional[_ListenerFnType]: + if self._on_connect_isolation_level is not None: + + def builtin_connect(dbapi_conn, conn_rec): + self._assert_and_set_isolation_level( + dbapi_conn, self._on_connect_isolation_level + ) + + return builtin_connect + else: + return None + + def initialize(self, connection): + try: + self.server_version_info = self._get_server_version_info( + connection + ) + except NotImplementedError: + self.server_version_info = None + try: + self.default_schema_name = self._get_default_schema_name( + connection + ) + except NotImplementedError: + self.default_schema_name = None + + try: + self.default_isolation_level = self.get_default_isolation_level( + connection.connection.dbapi_connection + ) + except NotImplementedError: + self.default_isolation_level = None + + if not self._user_defined_max_identifier_length: + max_ident_length = self._check_max_identifier_length(connection) + if max_ident_length: + self.max_identifier_length = max_ident_length + + if ( + self.label_length + and self.label_length > self.max_identifier_length + ): + raise exc.ArgumentError( + "Label length of %d is greater than this dialect's" + " maximum identifier length of %d" + % (self.label_length, self.max_identifier_length) + ) + + def on_connect(self): + # inherits the docstring from interfaces.Dialect.on_connect + return None + + def _check_max_identifier_length(self, connection): + """Perform a connection / server version specific check to determine + the max_identifier_length. + + If the dialect's class level max_identifier_length should be used, + can return None. + + .. versionadded:: 1.3.9 + + """ + return None + + def get_default_isolation_level(self, dbapi_conn): + """Given a DBAPI connection, return its isolation level, or + a default isolation level if one cannot be retrieved. + + May be overridden by subclasses in order to provide a + "fallback" isolation level for databases that cannot reliably + retrieve the actual isolation level. + + By default, calls the :meth:`_engine.Interfaces.get_isolation_level` + method, propagating any exceptions raised. + + .. versionadded:: 1.3.22 + + """ + return self.get_isolation_level(dbapi_conn) + + def type_descriptor(self, typeobj): + """Provide a database-specific :class:`.TypeEngine` object, given + the generic object which comes from the types module. + + This method looks for a dictionary called + ``colspecs`` as a class or instance-level variable, + and passes on to :func:`_types.adapt_type`. + + """ + return type_api.adapt_type(typeobj, self.colspecs) + + def has_index(self, connection, table_name, index_name, schema=None, **kw): + if not self.has_table(connection, table_name, schema=schema, **kw): + return False + for idx in self.get_indexes( + connection, table_name, schema=schema, **kw + ): + if idx["name"] == index_name: + return True + else: + return False + + def has_schema( + self, connection: Connection, schema_name: str, **kw: Any + ) -> bool: + return schema_name in self.get_schema_names(connection, **kw) + + def validate_identifier(self, ident): + if len(ident) > self.max_identifier_length: + raise exc.IdentifierError( + "Identifier '%s' exceeds maximum length of %d characters" + % (ident, self.max_identifier_length) + ) + + def connect(self, *cargs, **cparams): + # inherits the docstring from interfaces.Dialect.connect + return self.loaded_dbapi.connect(*cargs, **cparams) + + def create_connect_args(self, url): + # inherits the docstring from interfaces.Dialect.create_connect_args + opts = url.translate_connect_args() + opts.update(url.query) + return ([], opts) + + def set_engine_execution_options( + self, engine: Engine, opts: Mapping[str, Any] + ) -> None: + supported_names = set(self.connection_characteristics).intersection( + opts + ) + if supported_names: + characteristics: Mapping[str, Any] = util.immutabledict( + (name, opts[name]) for name in supported_names + ) + + @event.listens_for(engine, "engine_connect") + def set_connection_characteristics(connection): + self._set_connection_characteristics( + connection, characteristics + ) + + def set_connection_execution_options( + self, connection: Connection, opts: Mapping[str, Any] + ) -> None: + supported_names = set(self.connection_characteristics).intersection( + opts + ) + if supported_names: + characteristics: Mapping[str, Any] = util.immutabledict( + (name, opts[name]) for name in supported_names + ) + self._set_connection_characteristics(connection, characteristics) + + def _set_connection_characteristics(self, connection, characteristics): + characteristic_values = [ + (name, self.connection_characteristics[name], value) + for name, value in characteristics.items() + ] + + if connection.in_transaction(): + trans_objs = [ + (name, obj) + for name, obj, _ in characteristic_values + if obj.transactional + ] + if trans_objs: + raise exc.InvalidRequestError( + "This connection has already initialized a SQLAlchemy " + "Transaction() object via begin() or autobegin; " + "%s may not be altered unless rollback() or commit() " + "is called first." + % (", ".join(name for name, obj in trans_objs)) + ) + + dbapi_connection = connection.connection.dbapi_connection + for _, characteristic, value in characteristic_values: + characteristic.set_connection_characteristic( + self, connection, dbapi_connection, value + ) + connection.connection._connection_record.finalize_callback.append( + functools.partial(self._reset_characteristics, characteristics) + ) + + def _reset_characteristics(self, characteristics, dbapi_connection): + for characteristic_name in characteristics: + characteristic = self.connection_characteristics[ + characteristic_name + ] + characteristic.reset_characteristic(self, dbapi_connection) + + def do_begin(self, dbapi_connection): + pass + + def do_rollback(self, dbapi_connection): + dbapi_connection.rollback() + + def do_commit(self, dbapi_connection): + dbapi_connection.commit() + + def do_terminate(self, dbapi_connection): + self.do_close(dbapi_connection) + + def do_close(self, dbapi_connection): + dbapi_connection.close() + + @util.memoized_property + def _dialect_specific_select_one(self): + return str(expression.select(1).compile(dialect=self)) + + def _do_ping_w_event(self, dbapi_connection: DBAPIConnection) -> bool: + try: + return self.do_ping(dbapi_connection) + except self.loaded_dbapi.Error as err: + is_disconnect = self.is_disconnect(err, dbapi_connection, None) + + if self._has_events: + try: + Connection._handle_dbapi_exception_noconnection( + err, + self, + is_disconnect=is_disconnect, + invalidate_pool_on_disconnect=False, + is_pre_ping=True, + ) + except exc.StatementError as new_err: + is_disconnect = new_err.connection_invalidated + + if is_disconnect: + return False + else: + raise + + def do_ping(self, dbapi_connection: DBAPIConnection) -> bool: + cursor = None + + cursor = dbapi_connection.cursor() + try: + cursor.execute(self._dialect_specific_select_one) + finally: + cursor.close() + return True + + def create_xid(self): + """Create a random two-phase transaction ID. + + This id will be passed to do_begin_twophase(), do_rollback_twophase(), + do_commit_twophase(). Its format is unspecified. + """ + + return "_sa_%032x" % random.randint(0, 2**128) + + def do_savepoint(self, connection, name): + connection.execute(expression.SavepointClause(name)) + + def do_rollback_to_savepoint(self, connection, name): + connection.execute(expression.RollbackToSavepointClause(name)) + + def do_release_savepoint(self, connection, name): + connection.execute(expression.ReleaseSavepointClause(name)) + + def _deliver_insertmanyvalues_batches( + self, + connection, + cursor, + statement, + parameters, + generic_setinputsizes, + context, + ): + context = cast(DefaultExecutionContext, context) + compiled = cast(SQLCompiler, context.compiled) + + _composite_sentinel_proc: Sequence[ + Optional[_ResultProcessorType[Any]] + ] = () + _scalar_sentinel_proc: Optional[_ResultProcessorType[Any]] = None + _sentinel_proc_initialized: bool = False + + compiled_parameters = context.compiled_parameters + + imv = compiled._insertmanyvalues + assert imv is not None + + is_returning: Final[bool] = bool(compiled.effective_returning) + batch_size = context.execution_options.get( + "insertmanyvalues_page_size", self.insertmanyvalues_page_size + ) + + if compiled.schema_translate_map: + schema_translate_map = context.execution_options.get( + "schema_translate_map", {} + ) + else: + schema_translate_map = None + + if is_returning: + result: Optional[List[Any]] = [] + context._insertmanyvalues_rows = result + + sort_by_parameter_order = imv.sort_by_parameter_order + + else: + sort_by_parameter_order = False + result = None + + for imv_batch in compiled._deliver_insertmanyvalues_batches( + statement, + parameters, + compiled_parameters, + generic_setinputsizes, + batch_size, + sort_by_parameter_order, + schema_translate_map, + ): + yield imv_batch + + if is_returning: + + try: + rows = context.fetchall_for_returning(cursor) + except BaseException as be: + connection._handle_dbapi_exception( + be, + sql_util._long_statement(imv_batch.replaced_statement), + imv_batch.replaced_parameters, + None, + context, + is_sub_exec=True, + ) + + # I would have thought "is_returning: Final[bool]" + # would have assured this but pylance thinks not + assert result is not None + + if imv.num_sentinel_columns and not imv_batch.is_downgraded: + composite_sentinel = imv.num_sentinel_columns > 1 + if imv.implicit_sentinel: + # for implicit sentinel, which is currently single-col + # integer autoincrement, do a simple sort. + assert not composite_sentinel + result.extend( + sorted(rows, key=operator.itemgetter(-1)) + ) + continue + + # otherwise, create dictionaries to match up batches + # with parameters + assert imv.sentinel_param_keys + assert imv.sentinel_columns + + _nsc = imv.num_sentinel_columns + + if not _sentinel_proc_initialized: + if composite_sentinel: + _composite_sentinel_proc = [ + col.type._cached_result_processor( + self, cursor_desc[1] + ) + for col, cursor_desc in zip( + imv.sentinel_columns, + cursor.description[-_nsc:], + ) + ] + else: + _scalar_sentinel_proc = ( + imv.sentinel_columns[0] + ).type._cached_result_processor( + self, cursor.description[-1][1] + ) + _sentinel_proc_initialized = True + + rows_by_sentinel: Union[ + Dict[Tuple[Any, ...], Any], + Dict[Any, Any], + ] + if composite_sentinel: + rows_by_sentinel = { + tuple( + (proc(val) if proc else val) + for val, proc in zip( + row[-_nsc:], _composite_sentinel_proc + ) + ): row + for row in rows + } + elif _scalar_sentinel_proc: + rows_by_sentinel = { + _scalar_sentinel_proc(row[-1]): row for row in rows + } + else: + rows_by_sentinel = {row[-1]: row for row in rows} + + if len(rows_by_sentinel) != len(imv_batch.batch): + # see test_insert_exec.py:: + # IMVSentinelTest::test_sentinel_incorrect_rowcount + # for coverage / demonstration + raise exc.InvalidRequestError( + f"Sentinel-keyed result set did not produce " + f"correct number of rows {len(imv_batch.batch)}; " + "produced " + f"{len(rows_by_sentinel)}. Please ensure the " + "sentinel column is fully unique and populated in " + "all cases." + ) + + try: + ordered_rows = [ + rows_by_sentinel[sentinel_keys] + for sentinel_keys in imv_batch.sentinel_values + ] + except KeyError as ke: + # see test_insert_exec.py:: + # IMVSentinelTest::test_sentinel_cant_match_keys + # for coverage / demonstration + raise exc.InvalidRequestError( + f"Can't match sentinel values in result set to " + f"parameter sets; key {ke.args[0]!r} was not " + "found. " + "There may be a mismatch between the datatype " + "passed to the DBAPI driver vs. that which it " + "returns in a result row. Ensure the given " + "Python value matches the expected result type " + "*exactly*, taking care to not rely upon implicit " + "conversions which may occur such as when using " + "strings in place of UUID or integer values, etc. " + ) from ke + + result.extend(ordered_rows) + + else: + result.extend(rows) + + def do_executemany(self, cursor, statement, parameters, context=None): + cursor.executemany(statement, parameters) + + def do_execute(self, cursor, statement, parameters, context=None): + cursor.execute(statement, parameters) + + def do_execute_no_params(self, cursor, statement, context=None): + cursor.execute(statement) + + def is_disconnect(self, e, connection, cursor): + return False + + @util.memoized_instancemethod + def _gen_allowed_isolation_levels(self, dbapi_conn): + try: + raw_levels = list(self.get_isolation_level_values(dbapi_conn)) + except NotImplementedError: + return None + else: + normalized_levels = [ + level.replace("_", " ").upper() for level in raw_levels + ] + if raw_levels != normalized_levels: + raise ValueError( + f"Dialect {self.name!r} get_isolation_level_values() " + f"method should return names as UPPERCASE using spaces, " + f"not underscores; got " + f"{sorted(set(raw_levels).difference(normalized_levels))}" + ) + return tuple(normalized_levels) + + def _assert_and_set_isolation_level(self, dbapi_conn, level): + level = level.replace("_", " ").upper() + + _allowed_isolation_levels = self._gen_allowed_isolation_levels( + dbapi_conn + ) + if ( + _allowed_isolation_levels + and level not in _allowed_isolation_levels + ): + raise exc.ArgumentError( + f"Invalid value {level!r} for isolation_level. " + f"Valid isolation levels for {self.name!r} are " + f"{', '.join(_allowed_isolation_levels)}" + ) + + self.set_isolation_level(dbapi_conn, level) + + def reset_isolation_level(self, dbapi_conn): + if self._on_connect_isolation_level is not None: + assert ( + self._on_connect_isolation_level == "AUTOCOMMIT" + or self._on_connect_isolation_level + == self.default_isolation_level + ) + self._assert_and_set_isolation_level( + dbapi_conn, self._on_connect_isolation_level + ) + else: + assert self.default_isolation_level is not None + self._assert_and_set_isolation_level( + dbapi_conn, + self.default_isolation_level, + ) + + def normalize_name(self, name): + if name is None: + return None + + name_lower = name.lower() + name_upper = name.upper() + + if name_upper == name_lower: + # name has no upper/lower conversion, e.g. non-european characters. + # return unchanged + return name + elif name_upper == name and not ( + self.identifier_preparer._requires_quotes + )(name_lower): + # name is all uppercase and doesn't require quoting; normalize + # to all lower case + return name_lower + elif name_lower == name: + # name is all lower case, which if denormalized means we need to + # force quoting on it + return quoted_name(name, quote=True) + else: + # name is mixed case, means it will be quoted in SQL when used + # later, no normalizes + return name + + def denormalize_name(self, name): + if name is None: + return None + + name_lower = name.lower() + name_upper = name.upper() + + if name_upper == name_lower: + # name has no upper/lower conversion, e.g. non-european characters. + # return unchanged + return name + elif name_lower == name and not ( + self.identifier_preparer._requires_quotes + )(name_lower): + name = name_upper + return name + + def get_driver_connection(self, connection): + return connection + + def _overrides_default(self, method): + return ( + getattr(type(self), method).__code__ + is not getattr(DefaultDialect, method).__code__ + ) + + def _default_multi_reflect( + self, + single_tbl_method, + connection, + kind, + schema, + filter_names, + scope, + **kw, + ): + names_fns = [] + temp_names_fns = [] + if ObjectKind.TABLE in kind: + names_fns.append(self.get_table_names) + temp_names_fns.append(self.get_temp_table_names) + if ObjectKind.VIEW in kind: + names_fns.append(self.get_view_names) + temp_names_fns.append(self.get_temp_view_names) + if ObjectKind.MATERIALIZED_VIEW in kind: + names_fns.append(self.get_materialized_view_names) + # no temp materialized view at the moment + # temp_names_fns.append(self.get_temp_materialized_view_names) + + unreflectable = kw.pop("unreflectable", {}) + + if ( + filter_names + and scope is ObjectScope.ANY + and kind is ObjectKind.ANY + ): + # if names are given and no qualification on type of table + # (i.e. the Table(..., autoload) case), take the names as given, + # don't run names queries. If a table does not exit + # NoSuchTableError is raised and it's skipped + + # this also suits the case for mssql where we can reflect + # individual temp tables but there's no temp_names_fn + names = filter_names + else: + names = [] + name_kw = {"schema": schema, **kw} + fns = [] + if ObjectScope.DEFAULT in scope: + fns.extend(names_fns) + if ObjectScope.TEMPORARY in scope: + fns.extend(temp_names_fns) + + for fn in fns: + try: + names.extend(fn(connection, **name_kw)) + except NotImplementedError: + pass + + if filter_names: + filter_names = set(filter_names) + + # iterate over all the tables/views and call the single table method + for table in names: + if not filter_names or table in filter_names: + key = (schema, table) + try: + yield ( + key, + single_tbl_method( + connection, table, schema=schema, **kw + ), + ) + except exc.UnreflectableTableError as err: + if key not in unreflectable: + unreflectable[key] = err + except exc.NoSuchTableError: + pass + + def get_multi_table_options(self, connection, **kw): + return self._default_multi_reflect( + self.get_table_options, connection, **kw + ) + + def get_multi_columns(self, connection, **kw): + return self._default_multi_reflect(self.get_columns, connection, **kw) + + def get_multi_pk_constraint(self, connection, **kw): + return self._default_multi_reflect( + self.get_pk_constraint, connection, **kw + ) + + def get_multi_foreign_keys(self, connection, **kw): + return self._default_multi_reflect( + self.get_foreign_keys, connection, **kw + ) + + def get_multi_indexes(self, connection, **kw): + return self._default_multi_reflect(self.get_indexes, connection, **kw) + + def get_multi_unique_constraints(self, connection, **kw): + return self._default_multi_reflect( + self.get_unique_constraints, connection, **kw + ) + + def get_multi_check_constraints(self, connection, **kw): + return self._default_multi_reflect( + self.get_check_constraints, connection, **kw + ) + + def get_multi_table_comment(self, connection, **kw): + return self._default_multi_reflect( + self.get_table_comment, connection, **kw + ) + + +class StrCompileDialect(DefaultDialect): + statement_compiler = compiler.StrSQLCompiler + ddl_compiler = compiler.DDLCompiler + type_compiler_cls = compiler.StrSQLTypeCompiler + preparer = compiler.IdentifierPreparer + + insert_returning = True + update_returning = True + delete_returning = True + + supports_statement_cache = True + + supports_identity_columns = True + + supports_sequences = True + sequences_optional = True + preexecute_autoincrement_sequences = False + + supports_native_boolean = True + + supports_multivalues_insert = True + supports_simple_order_by_label = True + + +class DefaultExecutionContext(ExecutionContext): + isinsert = False + isupdate = False + isdelete = False + is_crud = False + is_text = False + isddl = False + + execute_style: ExecuteStyle = ExecuteStyle.EXECUTE + + compiled: Optional[Compiled] = None + result_column_struct: Optional[ + Tuple[List[ResultColumnsEntry], bool, bool, bool, bool] + ] = None + returned_default_rows: Optional[Sequence[Row[Any]]] = None + + execution_options: _ExecuteOptions = util.EMPTY_DICT + + cursor_fetch_strategy = _cursor._DEFAULT_FETCH + + invoked_statement: Optional[Executable] = None + + _is_implicit_returning = False + _is_explicit_returning = False + _is_supplemental_returning = False + _is_server_side = False + + _soft_closed = False + + _rowcount: Optional[int] = None + + # a hook for SQLite's translation of + # result column names + # NOTE: pyhive is using this hook, can't remove it :( + _translate_colname: Optional[Callable[[str], str]] = None + + _expanded_parameters: Mapping[str, List[str]] = util.immutabledict() + """used by set_input_sizes(). + + This collection comes from ``ExpandedState.parameter_expansion``. + + """ + + cache_hit = NO_CACHE_KEY + + root_connection: Connection + _dbapi_connection: PoolProxiedConnection + dialect: Dialect + unicode_statement: str + cursor: DBAPICursor + compiled_parameters: List[_MutableCoreSingleExecuteParams] + parameters: _DBAPIMultiExecuteParams + extracted_parameters: Optional[Sequence[BindParameter[Any]]] + + _empty_dict_params = cast("Mapping[str, Any]", util.EMPTY_DICT) + + _insertmanyvalues_rows: Optional[List[Tuple[Any, ...]]] = None + _num_sentinel_cols: int = 0 + + @classmethod + def _init_ddl( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + compiled_ddl: DDLCompiler, + ) -> ExecutionContext: + """Initialize execution context for an ExecutableDDLElement + construct.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + + self.compiled = compiled = compiled_ddl + self.isddl = True + + self.execution_options = execution_options + + self.unicode_statement = str(compiled) + if compiled.schema_translate_map: + schema_translate_map = self.execution_options.get( + "schema_translate_map", {} + ) + + rst = compiled.preparer._render_schema_translates + self.unicode_statement = rst( + self.unicode_statement, schema_translate_map + ) + + self.statement = self.unicode_statement + + self.cursor = self.create_cursor() + self.compiled_parameters = [] + + if dialect.positional: + self.parameters = [dialect.execute_sequence_format()] + else: + self.parameters = [self._empty_dict_params] + + return self + + @classmethod + def _init_compiled( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + compiled: SQLCompiler, + parameters: _CoreMultiExecuteParams, + invoked_statement: Executable, + extracted_parameters: Optional[Sequence[BindParameter[Any]]], + cache_hit: CacheStats = CacheStats.CACHING_DISABLED, + ) -> ExecutionContext: + """Initialize execution context for a Compiled construct.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + self.extracted_parameters = extracted_parameters + self.invoked_statement = invoked_statement + self.compiled = compiled + self.cache_hit = cache_hit + + self.execution_options = execution_options + + self.result_column_struct = ( + compiled._result_columns, + compiled._ordered_columns, + compiled._textual_ordered_columns, + compiled._ad_hoc_textual, + compiled._loose_column_name_matching, + ) + + self.isinsert = ii = compiled.isinsert + self.isupdate = iu = compiled.isupdate + self.isdelete = id_ = compiled.isdelete + self.is_text = compiled.isplaintext + + if ii or iu or id_: + dml_statement = compiled.compile_state.statement # type: ignore + if TYPE_CHECKING: + assert isinstance(dml_statement, UpdateBase) + self.is_crud = True + self._is_explicit_returning = ier = bool(dml_statement._returning) + self._is_implicit_returning = iir = bool( + compiled.implicit_returning + ) + if iir and dml_statement._supplemental_returning: + self._is_supplemental_returning = True + + # dont mix implicit and explicit returning + assert not (iir and ier) + + if (ier or iir) and compiled.for_executemany: + if ii and not self.dialect.insert_executemany_returning: + raise exc.InvalidRequestError( + f"Dialect {self.dialect.dialect_description} with " + f"current server capabilities does not support " + "INSERT..RETURNING when executemany is used" + ) + elif ( + ii + and dml_statement._sort_by_parameter_order + and not self.dialect.insert_executemany_returning_sort_by_parameter_order # noqa: E501 + ): + raise exc.InvalidRequestError( + f"Dialect {self.dialect.dialect_description} with " + f"current server capabilities does not support " + "INSERT..RETURNING with deterministic row ordering " + "when executemany is used" + ) + elif ( + ii + and self.dialect.use_insertmanyvalues + and not compiled._insertmanyvalues + ): + raise exc.InvalidRequestError( + 'Statement does not have "insertmanyvalues" ' + "enabled, can't use INSERT..RETURNING with " + "executemany in this case." + ) + elif iu and not self.dialect.update_executemany_returning: + raise exc.InvalidRequestError( + f"Dialect {self.dialect.dialect_description} with " + f"current server capabilities does not support " + "UPDATE..RETURNING when executemany is used" + ) + elif id_ and not self.dialect.delete_executemany_returning: + raise exc.InvalidRequestError( + f"Dialect {self.dialect.dialect_description} with " + f"current server capabilities does not support " + "DELETE..RETURNING when executemany is used" + ) + + if not parameters: + self.compiled_parameters = [ + compiled.construct_params( + extracted_parameters=extracted_parameters, + escape_names=False, + ) + ] + else: + self.compiled_parameters = [ + compiled.construct_params( + m, + escape_names=False, + _group_number=grp, + extracted_parameters=extracted_parameters, + ) + for grp, m in enumerate(parameters) + ] + + if len(parameters) > 1: + if self.isinsert and compiled._insertmanyvalues: + self.execute_style = ExecuteStyle.INSERTMANYVALUES + + imv = compiled._insertmanyvalues + if imv.sentinel_columns is not None: + self._num_sentinel_cols = imv.num_sentinel_columns + else: + self.execute_style = ExecuteStyle.EXECUTEMANY + + self.unicode_statement = compiled.string + + self.cursor = self.create_cursor() + + if self.compiled.insert_prefetch or self.compiled.update_prefetch: + self._process_execute_defaults() + + processors = compiled._bind_processors + + flattened_processors: Mapping[ + str, _BindProcessorType[Any] + ] = processors # type: ignore[assignment] + + if compiled.literal_execute_params or compiled.post_compile_params: + if self.executemany: + raise exc.InvalidRequestError( + "'literal_execute' or 'expanding' parameters can't be " + "used with executemany()" + ) + + expanded_state = compiled._process_parameters_for_postcompile( + self.compiled_parameters[0] + ) + + # re-assign self.unicode_statement + self.unicode_statement = expanded_state.statement + + self._expanded_parameters = expanded_state.parameter_expansion + + flattened_processors = dict(processors) # type: ignore + flattened_processors.update(expanded_state.processors) + positiontup = expanded_state.positiontup + elif compiled.positional: + positiontup = self.compiled.positiontup + else: + positiontup = None + + if compiled.schema_translate_map: + schema_translate_map = self.execution_options.get( + "schema_translate_map", {} + ) + rst = compiled.preparer._render_schema_translates + self.unicode_statement = rst( + self.unicode_statement, schema_translate_map + ) + + # final self.unicode_statement is now assigned, encode if needed + # by dialect + self.statement = self.unicode_statement + + # Convert the dictionary of bind parameter values + # into a dict or list to be sent to the DBAPI's + # execute() or executemany() method. + + if compiled.positional: + core_positional_parameters: MutableSequence[Sequence[Any]] = [] + assert positiontup is not None + for compiled_params in self.compiled_parameters: + l_param: List[Any] = [ + ( + flattened_processors[key](compiled_params[key]) + if key in flattened_processors + else compiled_params[key] + ) + for key in positiontup + ] + core_positional_parameters.append( + dialect.execute_sequence_format(l_param) + ) + + self.parameters = core_positional_parameters + else: + core_dict_parameters: MutableSequence[Dict[str, Any]] = [] + escaped_names = compiled.escaped_bind_names + + # note that currently, "expanded" parameters will be present + # in self.compiled_parameters in their quoted form. This is + # slightly inconsistent with the approach taken as of + # #8056 where self.compiled_parameters is meant to contain unquoted + # param names. + d_param: Dict[str, Any] + for compiled_params in self.compiled_parameters: + if escaped_names: + d_param = { + escaped_names.get(key, key): ( + flattened_processors[key](compiled_params[key]) + if key in flattened_processors + else compiled_params[key] + ) + for key in compiled_params + } + else: + d_param = { + key: ( + flattened_processors[key](compiled_params[key]) + if key in flattened_processors + else compiled_params[key] + ) + for key in compiled_params + } + + core_dict_parameters.append(d_param) + + self.parameters = core_dict_parameters + + return self + + @classmethod + def _init_statement( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + statement: str, + parameters: _DBAPIMultiExecuteParams, + ) -> ExecutionContext: + """Initialize execution context for a string SQL statement.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + self.is_text = True + + self.execution_options = execution_options + + if not parameters: + if self.dialect.positional: + self.parameters = [dialect.execute_sequence_format()] + else: + self.parameters = [self._empty_dict_params] + elif isinstance(parameters[0], dialect.execute_sequence_format): + self.parameters = parameters + elif isinstance(parameters[0], dict): + self.parameters = parameters + else: + self.parameters = [ + dialect.execute_sequence_format(p) for p in parameters + ] + + if len(parameters) > 1: + self.execute_style = ExecuteStyle.EXECUTEMANY + + self.statement = self.unicode_statement = statement + + self.cursor = self.create_cursor() + return self + + @classmethod + def _init_default( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + ) -> ExecutionContext: + """Initialize execution context for a ColumnDefault construct.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + + self.execution_options = execution_options + + self.cursor = self.create_cursor() + return self + + def _get_cache_stats(self) -> str: + if self.compiled is None: + return "raw sql" + + now = perf_counter() + + ch = self.cache_hit + + gen_time = self.compiled._gen_time + assert gen_time is not None + + if ch is NO_CACHE_KEY: + return "no key %.5fs" % (now - gen_time,) + elif ch is CACHE_HIT: + return "cached since %.4gs ago" % (now - gen_time,) + elif ch is CACHE_MISS: + return "generated in %.5fs" % (now - gen_time,) + elif ch is CACHING_DISABLED: + if "_cache_disable_reason" in self.execution_options: + return "caching disabled (%s) %.5fs " % ( + self.execution_options["_cache_disable_reason"], + now - gen_time, + ) + else: + return "caching disabled %.5fs" % (now - gen_time,) + elif ch is NO_DIALECT_SUPPORT: + return "dialect %s+%s does not support caching %.5fs" % ( + self.dialect.name, + self.dialect.driver, + now - gen_time, + ) + else: + return "unknown" + + @property + def executemany(self): + return self.execute_style in ( + ExecuteStyle.EXECUTEMANY, + ExecuteStyle.INSERTMANYVALUES, + ) + + @util.memoized_property + def identifier_preparer(self): + if self.compiled: + return self.compiled.preparer + elif "schema_translate_map" in self.execution_options: + return self.dialect.identifier_preparer._with_schema_translate( + self.execution_options["schema_translate_map"] + ) + else: + return self.dialect.identifier_preparer + + @util.memoized_property + def engine(self): + return self.root_connection.engine + + @util.memoized_property + def postfetch_cols(self) -> Optional[Sequence[Column[Any]]]: + if TYPE_CHECKING: + assert isinstance(self.compiled, SQLCompiler) + return self.compiled.postfetch + + @util.memoized_property + def prefetch_cols(self) -> Optional[Sequence[Column[Any]]]: + if TYPE_CHECKING: + assert isinstance(self.compiled, SQLCompiler) + if self.isinsert: + return self.compiled.insert_prefetch + elif self.isupdate: + return self.compiled.update_prefetch + else: + return () + + @util.memoized_property + def no_parameters(self): + return self.execution_options.get("no_parameters", False) + + def _execute_scalar(self, stmt, type_, parameters=None): + """Execute a string statement on the current cursor, returning a + scalar result. + + Used to fire off sequences, default phrases, and "select lastrowid" + types of statements individually or in the context of a parent INSERT + or UPDATE statement. + + """ + + conn = self.root_connection + + if "schema_translate_map" in self.execution_options: + schema_translate_map = self.execution_options.get( + "schema_translate_map", {} + ) + + rst = self.identifier_preparer._render_schema_translates + stmt = rst(stmt, schema_translate_map) + + if not parameters: + if self.dialect.positional: + parameters = self.dialect.execute_sequence_format() + else: + parameters = {} + + conn._cursor_execute(self.cursor, stmt, parameters, context=self) + row = self.cursor.fetchone() + if row is not None: + r = row[0] + else: + r = None + if type_ is not None: + # apply type post processors to the result + proc = type_._cached_result_processor( + self.dialect, self.cursor.description[0][1] + ) + if proc: + return proc(r) + return r + + @util.memoized_property + def connection(self): + return self.root_connection + + def _use_server_side_cursor(self): + if not self.dialect.supports_server_side_cursors: + return False + + if self.dialect.server_side_cursors: + # this is deprecated + use_server_side = self.execution_options.get( + "stream_results", True + ) and ( + self.compiled + and isinstance(self.compiled.statement, expression.Selectable) + or ( + ( + not self.compiled + or isinstance( + self.compiled.statement, expression.TextClause + ) + ) + and self.unicode_statement + and SERVER_SIDE_CURSOR_RE.match(self.unicode_statement) + ) + ) + else: + use_server_side = self.execution_options.get( + "stream_results", False + ) + + return use_server_side + + def create_cursor(self): + if ( + # inlining initial preference checks for SS cursors + self.dialect.supports_server_side_cursors + and ( + self.execution_options.get("stream_results", False) + or ( + self.dialect.server_side_cursors + and self._use_server_side_cursor() + ) + ) + ): + self._is_server_side = True + return self.create_server_side_cursor() + else: + self._is_server_side = False + return self.create_default_cursor() + + def fetchall_for_returning(self, cursor): + return cursor.fetchall() + + def create_default_cursor(self): + return self._dbapi_connection.cursor() + + def create_server_side_cursor(self): + raise NotImplementedError() + + def pre_exec(self): + pass + + def get_out_parameter_values(self, names): + raise NotImplementedError( + "This dialect does not support OUT parameters" + ) + + def post_exec(self): + pass + + def get_result_processor(self, type_, colname, coltype): + """Return a 'result processor' for a given type as present in + cursor.description. + + This has a default implementation that dialects can override + for context-sensitive result type handling. + + """ + return type_._cached_result_processor(self.dialect, coltype) + + def get_lastrowid(self): + """return self.cursor.lastrowid, or equivalent, after an INSERT. + + This may involve calling special cursor functions, issuing a new SELECT + on the cursor (or a new one), or returning a stored value that was + calculated within post_exec(). + + This function will only be called for dialects which support "implicit" + primary key generation, keep preexecute_autoincrement_sequences set to + False, and when no explicit id value was bound to the statement. + + The function is called once for an INSERT statement that would need to + return the last inserted primary key for those dialects that make use + of the lastrowid concept. In these cases, it is called directly after + :meth:`.ExecutionContext.post_exec`. + + """ + return self.cursor.lastrowid + + def handle_dbapi_exception(self, e): + pass + + @util.non_memoized_property + def rowcount(self) -> int: + if self._rowcount is not None: + return self._rowcount + else: + return self.cursor.rowcount + + @property + def _has_rowcount(self): + return self._rowcount is not None + + def supports_sane_rowcount(self): + return self.dialect.supports_sane_rowcount + + def supports_sane_multi_rowcount(self): + return self.dialect.supports_sane_multi_rowcount + + def _setup_result_proxy(self): + exec_opt = self.execution_options + + if self._rowcount is None and exec_opt.get("preserve_rowcount", False): + self._rowcount = self.cursor.rowcount + + if self.is_crud or self.is_text: + result = self._setup_dml_or_text_result() + yp = sr = False + else: + yp = exec_opt.get("yield_per", None) + sr = self._is_server_side or exec_opt.get("stream_results", False) + strategy = self.cursor_fetch_strategy + if sr and strategy is _cursor._DEFAULT_FETCH: + strategy = _cursor.BufferedRowCursorFetchStrategy( + self.cursor, self.execution_options + ) + cursor_description: _DBAPICursorDescription = ( + strategy.alternate_cursor_description + or self.cursor.description + ) + if cursor_description is None: + strategy = _cursor._NO_CURSOR_DQL + + result = _cursor.CursorResult(self, strategy, cursor_description) + + compiled = self.compiled + + if ( + compiled + and not self.isddl + and cast(SQLCompiler, compiled).has_out_parameters + ): + self._setup_out_parameters(result) + + self._soft_closed = result._soft_closed + + if yp: + result = result.yield_per(yp) + + return result + + def _setup_out_parameters(self, result): + compiled = cast(SQLCompiler, self.compiled) + + out_bindparams = [ + (param, name) + for param, name in compiled.bind_names.items() + if param.isoutparam + ] + out_parameters = {} + + for bindparam, raw_value in zip( + [param for param, name in out_bindparams], + self.get_out_parameter_values( + [name for param, name in out_bindparams] + ), + ): + type_ = bindparam.type + impl_type = type_.dialect_impl(self.dialect) + dbapi_type = impl_type.get_dbapi_type(self.dialect.loaded_dbapi) + result_processor = impl_type.result_processor( + self.dialect, dbapi_type + ) + if result_processor is not None: + raw_value = result_processor(raw_value) + out_parameters[bindparam.key] = raw_value + + result.out_parameters = out_parameters + + def _setup_dml_or_text_result(self): + compiled = cast(SQLCompiler, self.compiled) + + strategy: ResultFetchStrategy = self.cursor_fetch_strategy + + if self.isinsert: + if ( + self.execute_style is ExecuteStyle.INSERTMANYVALUES + and compiled.effective_returning + ): + strategy = _cursor.FullyBufferedCursorFetchStrategy( + self.cursor, + initial_buffer=self._insertmanyvalues_rows, + # maintain alt cursor description if set by the + # dialect, e.g. mssql preserves it + alternate_description=( + strategy.alternate_cursor_description + ), + ) + + if compiled.postfetch_lastrowid: + self.inserted_primary_key_rows = ( + self._setup_ins_pk_from_lastrowid() + ) + # else if not self._is_implicit_returning, + # the default inserted_primary_key_rows accessor will + # return an "empty" primary key collection when accessed. + + if self._is_server_side and strategy is _cursor._DEFAULT_FETCH: + strategy = _cursor.BufferedRowCursorFetchStrategy( + self.cursor, self.execution_options + ) + + if strategy is _cursor._NO_CURSOR_DML: + cursor_description = None + else: + cursor_description = ( + strategy.alternate_cursor_description + or self.cursor.description + ) + + if cursor_description is None: + strategy = _cursor._NO_CURSOR_DML + elif self._num_sentinel_cols: + assert self.execute_style is ExecuteStyle.INSERTMANYVALUES + # strip out the sentinel columns from cursor description + # a similar logic is done to the rows only in CursorResult + cursor_description = cursor_description[ + 0 : -self._num_sentinel_cols + ] + + result: _cursor.CursorResult[Any] = _cursor.CursorResult( + self, strategy, cursor_description + ) + + if self.isinsert: + if self._is_implicit_returning: + rows = result.all() + + self.returned_default_rows = rows + + self.inserted_primary_key_rows = ( + self._setup_ins_pk_from_implicit_returning(result, rows) + ) + + # test that it has a cursor metadata that is accurate. the + # first row will have been fetched and current assumptions + # are that the result has only one row, until executemany() + # support is added here. + assert result._metadata.returns_rows + + # Insert statement has both return_defaults() and + # returning(). rewind the result on the list of rows + # we just used. + if self._is_supplemental_returning: + result._rewind(rows) + else: + result._soft_close() + elif not self._is_explicit_returning: + result._soft_close() + + # we assume here the result does not return any rows. + # *usually*, this will be true. However, some dialects + # such as that of MSSQL/pyodbc need to SELECT a post fetch + # function so this is not necessarily true. + # assert not result.returns_rows + + elif self._is_implicit_returning: + rows = result.all() + + if rows: + self.returned_default_rows = rows + self._rowcount = len(rows) + + if self._is_supplemental_returning: + result._rewind(rows) + else: + result._soft_close() + + # test that it has a cursor metadata that is accurate. + # the rows have all been fetched however. + assert result._metadata.returns_rows + + elif not result._metadata.returns_rows: + # no results, get rowcount + # (which requires open cursor on some drivers) + if self._rowcount is None: + self._rowcount = self.cursor.rowcount + result._soft_close() + elif self.isupdate or self.isdelete: + if self._rowcount is None: + self._rowcount = self.cursor.rowcount + return result + + @util.memoized_property + def inserted_primary_key_rows(self): + # if no specific "get primary key" strategy was set up + # during execution, return a "default" primary key based + # on what's in the compiled_parameters and nothing else. + return self._setup_ins_pk_from_empty() + + def _setup_ins_pk_from_lastrowid(self): + getter = cast( + SQLCompiler, self.compiled + )._inserted_primary_key_from_lastrowid_getter + lastrowid = self.get_lastrowid() + return [getter(lastrowid, self.compiled_parameters[0])] + + def _setup_ins_pk_from_empty(self): + getter = cast( + SQLCompiler, self.compiled + )._inserted_primary_key_from_lastrowid_getter + return [getter(None, param) for param in self.compiled_parameters] + + def _setup_ins_pk_from_implicit_returning(self, result, rows): + if not rows: + return [] + + getter = cast( + SQLCompiler, self.compiled + )._inserted_primary_key_from_returning_getter + compiled_params = self.compiled_parameters + + return [ + getter(row, param) for row, param in zip(rows, compiled_params) + ] + + def lastrow_has_defaults(self): + return (self.isinsert or self.isupdate) and bool( + cast(SQLCompiler, self.compiled).postfetch + ) + + def _prepare_set_input_sizes( + self, + ) -> Optional[List[Tuple[str, Any, TypeEngine[Any]]]]: + """Given a cursor and ClauseParameters, prepare arguments + in order to call the appropriate + style of ``setinputsizes()`` on the cursor, using DB-API types + from the bind parameter's ``TypeEngine`` objects. + + This method only called by those dialects which set + the :attr:`.Dialect.bind_typing` attribute to + :attr:`.BindTyping.SETINPUTSIZES`. cx_Oracle is the only DBAPI + that requires setinputsizes(), pyodbc offers it as an option. + + Prior to SQLAlchemy 2.0, the setinputsizes() approach was also used + for pg8000 and asyncpg, which has been changed to inline rendering + of casts. + + """ + if self.isddl or self.is_text: + return None + + compiled = cast(SQLCompiler, self.compiled) + + inputsizes = compiled._get_set_input_sizes_lookup() + + if inputsizes is None: + return None + + dialect = self.dialect + + # all of the rest of this... cython? + + if dialect._has_events: + inputsizes = dict(inputsizes) + dialect.dispatch.do_setinputsizes( + inputsizes, self.cursor, self.statement, self.parameters, self + ) + + if compiled.escaped_bind_names: + escaped_bind_names = compiled.escaped_bind_names + else: + escaped_bind_names = None + + if dialect.positional: + items = [ + (key, compiled.binds[key]) + for key in compiled.positiontup or () + ] + else: + items = [ + (key, bindparam) + for bindparam, key in compiled.bind_names.items() + ] + + generic_inputsizes: List[Tuple[str, Any, TypeEngine[Any]]] = [] + for key, bindparam in items: + if bindparam in compiled.literal_execute_params: + continue + + if key in self._expanded_parameters: + if is_tuple_type(bindparam.type): + num = len(bindparam.type.types) + dbtypes = inputsizes[bindparam] + generic_inputsizes.extend( + ( + ( + escaped_bind_names.get(paramname, paramname) + if escaped_bind_names is not None + else paramname + ), + dbtypes[idx % num], + bindparam.type.types[idx % num], + ) + for idx, paramname in enumerate( + self._expanded_parameters[key] + ) + ) + else: + dbtype = inputsizes.get(bindparam, None) + generic_inputsizes.extend( + ( + ( + escaped_bind_names.get(paramname, paramname) + if escaped_bind_names is not None + else paramname + ), + dbtype, + bindparam.type, + ) + for paramname in self._expanded_parameters[key] + ) + else: + dbtype = inputsizes.get(bindparam, None) + + escaped_name = ( + escaped_bind_names.get(key, key) + if escaped_bind_names is not None + else key + ) + + generic_inputsizes.append( + (escaped_name, dbtype, bindparam.type) + ) + + return generic_inputsizes + + def _exec_default(self, column, default, type_): + if default.is_sequence: + return self.fire_sequence(default, type_) + elif default.is_callable: + # this codepath is not normally used as it's inlined + # into _process_execute_defaults + self.current_column = column + return default.arg(self) + elif default.is_clause_element: + return self._exec_default_clause_element(column, default, type_) + else: + # this codepath is not normally used as it's inlined + # into _process_execute_defaults + return default.arg + + def _exec_default_clause_element(self, column, default, type_): + # execute a default that's a complete clause element. Here, we have + # to re-implement a miniature version of the compile->parameters-> + # cursor.execute() sequence, since we don't want to modify the state + # of the connection / result in progress or create new connection/ + # result objects etc. + # .. versionchanged:: 1.4 + + if not default._arg_is_typed: + default_arg = expression.type_coerce(default.arg, type_) + else: + default_arg = default.arg + compiled = expression.select(default_arg).compile(dialect=self.dialect) + compiled_params = compiled.construct_params() + processors = compiled._bind_processors + if compiled.positional: + parameters = self.dialect.execute_sequence_format( + [ + ( + processors[key](compiled_params[key]) # type: ignore + if key in processors + else compiled_params[key] + ) + for key in compiled.positiontup or () + ] + ) + else: + parameters = { + key: ( + processors[key](compiled_params[key]) # type: ignore + if key in processors + else compiled_params[key] + ) + for key in compiled_params + } + return self._execute_scalar( + str(compiled), type_, parameters=parameters + ) + + current_parameters: Optional[_CoreSingleExecuteParams] = None + """A dictionary of parameters applied to the current row. + + This attribute is only available in the context of a user-defined default + generation function, e.g. as described at :ref:`context_default_functions`. + It consists of a dictionary which includes entries for each column/value + pair that is to be part of the INSERT or UPDATE statement. The keys of the + dictionary will be the key value of each :class:`_schema.Column`, + which is usually + synonymous with the name. + + Note that the :attr:`.DefaultExecutionContext.current_parameters` attribute + does not accommodate for the "multi-values" feature of the + :meth:`_expression.Insert.values` method. The + :meth:`.DefaultExecutionContext.get_current_parameters` method should be + preferred. + + .. seealso:: + + :meth:`.DefaultExecutionContext.get_current_parameters` + + :ref:`context_default_functions` + + """ + + def get_current_parameters(self, isolate_multiinsert_groups=True): + """Return a dictionary of parameters applied to the current row. + + This method can only be used in the context of a user-defined default + generation function, e.g. as described at + :ref:`context_default_functions`. When invoked, a dictionary is + returned which includes entries for each column/value pair that is part + of the INSERT or UPDATE statement. The keys of the dictionary will be + the key value of each :class:`_schema.Column`, + which is usually synonymous + with the name. + + :param isolate_multiinsert_groups=True: indicates that multi-valued + INSERT constructs created using :meth:`_expression.Insert.values` + should be + handled by returning only the subset of parameters that are local + to the current column default invocation. When ``False``, the + raw parameters of the statement are returned including the + naming convention used in the case of multi-valued INSERT. + + .. versionadded:: 1.2 added + :meth:`.DefaultExecutionContext.get_current_parameters` + which provides more functionality over the existing + :attr:`.DefaultExecutionContext.current_parameters` + attribute. + + .. seealso:: + + :attr:`.DefaultExecutionContext.current_parameters` + + :ref:`context_default_functions` + + """ + try: + parameters = self.current_parameters + column = self.current_column + except AttributeError: + raise exc.InvalidRequestError( + "get_current_parameters() can only be invoked in the " + "context of a Python side column default function" + ) + else: + assert column is not None + assert parameters is not None + compile_state = cast( + "DMLState", cast(SQLCompiler, self.compiled).compile_state + ) + assert compile_state is not None + if ( + isolate_multiinsert_groups + and dml.isinsert(compile_state) + and compile_state._has_multi_parameters + ): + if column._is_multiparam_column: + index = column.index + 1 + d = {column.original.key: parameters[column.key]} + else: + d = {column.key: parameters[column.key]} + index = 0 + assert compile_state._dict_parameters is not None + keys = compile_state._dict_parameters.keys() + d.update( + (key, parameters["%s_m%d" % (key, index)]) for key in keys + ) + return d + else: + return parameters + + def get_insert_default(self, column): + if column.default is None: + return None + else: + return self._exec_default(column, column.default, column.type) + + def get_update_default(self, column): + if column.onupdate is None: + return None + else: + return self._exec_default(column, column.onupdate, column.type) + + def _process_execute_defaults(self): + compiled = cast(SQLCompiler, self.compiled) + + key_getter = compiled._within_exec_param_key_getter + + sentinel_counter = 0 + + if compiled.insert_prefetch: + prefetch_recs = [ + ( + c, + key_getter(c), + c._default_description_tuple, + self.get_insert_default, + ) + for c in compiled.insert_prefetch + ] + elif compiled.update_prefetch: + prefetch_recs = [ + ( + c, + key_getter(c), + c._onupdate_description_tuple, + self.get_update_default, + ) + for c in compiled.update_prefetch + ] + else: + prefetch_recs = [] + + for param in self.compiled_parameters: + self.current_parameters = param + + for ( + c, + param_key, + (arg, is_scalar, is_callable, is_sentinel), + fallback, + ) in prefetch_recs: + if is_sentinel: + param[param_key] = sentinel_counter + sentinel_counter += 1 + elif is_scalar: + param[param_key] = arg + elif is_callable: + self.current_column = c + param[param_key] = arg(self) + else: + val = fallback(c) + if val is not None: + param[param_key] = val + + del self.current_parameters + + +DefaultDialect.execution_ctx_cls = DefaultExecutionContext diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py new file mode 100644 index 00000000..b8e8936b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py @@ -0,0 +1,951 @@ +# engine/events.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + + +from __future__ import annotations + +import typing +from typing import Any +from typing import Dict +from typing import Optional +from typing import Tuple +from typing import Type +from typing import Union + +from .base import Connection +from .base import Engine +from .interfaces import ConnectionEventsTarget +from .interfaces import DBAPIConnection +from .interfaces import DBAPICursor +from .interfaces import Dialect +from .. import event +from .. import exc +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from .interfaces import _CoreMultiExecuteParams + from .interfaces import _CoreSingleExecuteParams + from .interfaces import _DBAPIAnyExecuteParams + from .interfaces import _DBAPIMultiExecuteParams + from .interfaces import _DBAPISingleExecuteParams + from .interfaces import _ExecuteOptions + from .interfaces import ExceptionContext + from .interfaces import ExecutionContext + from .result import Result + from ..pool import ConnectionPoolEntry + from ..sql import Executable + from ..sql.elements import BindParameter + + +class ConnectionEvents(event.Events[ConnectionEventsTarget]): + """Available events for + :class:`_engine.Connection` and :class:`_engine.Engine`. + + The methods here define the name of an event as well as the names of + members that are passed to listener functions. + + An event listener can be associated with any + :class:`_engine.Connection` or :class:`_engine.Engine` + class or instance, such as an :class:`_engine.Engine`, e.g.:: + + from sqlalchemy import event, create_engine + + def before_cursor_execute(conn, cursor, statement, parameters, context, + executemany): + log.info("Received statement: %s", statement) + + engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/test') + event.listen(engine, "before_cursor_execute", before_cursor_execute) + + or with a specific :class:`_engine.Connection`:: + + with engine.begin() as conn: + @event.listens_for(conn, 'before_cursor_execute') + def before_cursor_execute(conn, cursor, statement, parameters, + context, executemany): + log.info("Received statement: %s", statement) + + When the methods are called with a `statement` parameter, such as in + :meth:`.after_cursor_execute` or :meth:`.before_cursor_execute`, + the statement is the exact SQL string that was prepared for transmission + to the DBAPI ``cursor`` in the connection's :class:`.Dialect`. + + The :meth:`.before_execute` and :meth:`.before_cursor_execute` + events can also be established with the ``retval=True`` flag, which + allows modification of the statement and parameters to be sent + to the database. The :meth:`.before_cursor_execute` event is + particularly useful here to add ad-hoc string transformations, such + as comments, to all executions:: + + from sqlalchemy.engine import Engine + from sqlalchemy import event + + @event.listens_for(Engine, "before_cursor_execute", retval=True) + def comment_sql_calls(conn, cursor, statement, parameters, + context, executemany): + statement = statement + " -- some comment" + return statement, parameters + + .. note:: :class:`_events.ConnectionEvents` can be established on any + combination of :class:`_engine.Engine`, :class:`_engine.Connection`, + as well + as instances of each of those classes. Events across all + four scopes will fire off for a given instance of + :class:`_engine.Connection`. However, for performance reasons, the + :class:`_engine.Connection` object determines at instantiation time + whether or not its parent :class:`_engine.Engine` has event listeners + established. Event listeners added to the :class:`_engine.Engine` + class or to an instance of :class:`_engine.Engine` + *after* the instantiation + of a dependent :class:`_engine.Connection` instance will usually + *not* be available on that :class:`_engine.Connection` instance. + The newly + added listeners will instead take effect for + :class:`_engine.Connection` + instances created subsequent to those event listeners being + established on the parent :class:`_engine.Engine` class or instance. + + :param retval=False: Applies to the :meth:`.before_execute` and + :meth:`.before_cursor_execute` events only. When True, the + user-defined event function must have a return value, which + is a tuple of parameters that replace the given statement + and parameters. See those methods for a description of + specific return arguments. + + """ # noqa + + _target_class_doc = "SomeEngine" + _dispatch_target = ConnectionEventsTarget + + @classmethod + def _accept_with( + cls, + target: Union[ConnectionEventsTarget, Type[ConnectionEventsTarget]], + identifier: str, + ) -> Optional[Union[ConnectionEventsTarget, Type[ConnectionEventsTarget]]]: + default_dispatch = super()._accept_with(target, identifier) + if default_dispatch is None and hasattr( + target, "_no_async_engine_events" + ): + target._no_async_engine_events() + + return default_dispatch + + @classmethod + def _listen( + cls, + event_key: event._EventKey[ConnectionEventsTarget], + *, + retval: bool = False, + **kw: Any, + ) -> None: + target, identifier, fn = ( + event_key.dispatch_target, + event_key.identifier, + event_key._listen_fn, + ) + target._has_events = True + + if not retval: + if identifier == "before_execute": + orig_fn = fn + + def wrap_before_execute( # type: ignore + conn, clauseelement, multiparams, params, execution_options + ): + orig_fn( + conn, + clauseelement, + multiparams, + params, + execution_options, + ) + return clauseelement, multiparams, params + + fn = wrap_before_execute + elif identifier == "before_cursor_execute": + orig_fn = fn + + def wrap_before_cursor_execute( # type: ignore + conn, cursor, statement, parameters, context, executemany + ): + orig_fn( + conn, + cursor, + statement, + parameters, + context, + executemany, + ) + return statement, parameters + + fn = wrap_before_cursor_execute + elif retval and identifier not in ( + "before_execute", + "before_cursor_execute", + ): + raise exc.ArgumentError( + "Only the 'before_execute', " + "'before_cursor_execute' and 'handle_error' engine " + "event listeners accept the 'retval=True' " + "argument." + ) + event_key.with_wrapper(fn).base_listen() + + @event._legacy_signature( + "1.4", + ["conn", "clauseelement", "multiparams", "params"], + lambda conn, clauseelement, multiparams, params, execution_options: ( + conn, + clauseelement, + multiparams, + params, + ), + ) + def before_execute( + self, + conn: Connection, + clauseelement: Executable, + multiparams: _CoreMultiExecuteParams, + params: _CoreSingleExecuteParams, + execution_options: _ExecuteOptions, + ) -> Optional[ + Tuple[Executable, _CoreMultiExecuteParams, _CoreSingleExecuteParams] + ]: + """Intercept high level execute() events, receiving uncompiled + SQL constructs and other objects prior to rendering into SQL. + + This event is good for debugging SQL compilation issues as well + as early manipulation of the parameters being sent to the database, + as the parameter lists will be in a consistent format here. + + This event can be optionally established with the ``retval=True`` + flag. The ``clauseelement``, ``multiparams``, and ``params`` + arguments should be returned as a three-tuple in this case:: + + @event.listens_for(Engine, "before_execute", retval=True) + def before_execute(conn, clauseelement, multiparams, params): + # do something with clauseelement, multiparams, params + return clauseelement, multiparams, params + + :param conn: :class:`_engine.Connection` object + :param clauseelement: SQL expression construct, :class:`.Compiled` + instance, or string statement passed to + :meth:`_engine.Connection.execute`. + :param multiparams: Multiple parameter sets, a list of dictionaries. + :param params: Single parameter set, a single dictionary. + :param execution_options: dictionary of execution + options passed along with the statement, if any. This is a merge + of all options that will be used, including those of the statement, + the connection, and those passed in to the method itself for + the 2.0 style of execution. + + .. versionadded: 1.4 + + .. seealso:: + + :meth:`.before_cursor_execute` + + """ + + @event._legacy_signature( + "1.4", + ["conn", "clauseelement", "multiparams", "params", "result"], + lambda conn, clauseelement, multiparams, params, execution_options, result: ( # noqa + conn, + clauseelement, + multiparams, + params, + result, + ), + ) + def after_execute( + self, + conn: Connection, + clauseelement: Executable, + multiparams: _CoreMultiExecuteParams, + params: _CoreSingleExecuteParams, + execution_options: _ExecuteOptions, + result: Result[Any], + ) -> None: + """Intercept high level execute() events after execute. + + + :param conn: :class:`_engine.Connection` object + :param clauseelement: SQL expression construct, :class:`.Compiled` + instance, or string statement passed to + :meth:`_engine.Connection.execute`. + :param multiparams: Multiple parameter sets, a list of dictionaries. + :param params: Single parameter set, a single dictionary. + :param execution_options: dictionary of execution + options passed along with the statement, if any. This is a merge + of all options that will be used, including those of the statement, + the connection, and those passed in to the method itself for + the 2.0 style of execution. + + .. versionadded: 1.4 + + :param result: :class:`_engine.CursorResult` generated by the + execution. + + """ + + def before_cursor_execute( + self, + conn: Connection, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIAnyExecuteParams, + context: Optional[ExecutionContext], + executemany: bool, + ) -> Optional[Tuple[str, _DBAPIAnyExecuteParams]]: + """Intercept low-level cursor execute() events before execution, + receiving the string SQL statement and DBAPI-specific parameter list to + be invoked against a cursor. + + This event is a good choice for logging as well as late modifications + to the SQL string. It's less ideal for parameter modifications except + for those which are specific to a target backend. + + This event can be optionally established with the ``retval=True`` + flag. The ``statement`` and ``parameters`` arguments should be + returned as a two-tuple in this case:: + + @event.listens_for(Engine, "before_cursor_execute", retval=True) + def before_cursor_execute(conn, cursor, statement, + parameters, context, executemany): + # do something with statement, parameters + return statement, parameters + + See the example at :class:`_events.ConnectionEvents`. + + :param conn: :class:`_engine.Connection` object + :param cursor: DBAPI cursor object + :param statement: string SQL statement, as to be passed to the DBAPI + :param parameters: Dictionary, tuple, or list of parameters being + passed to the ``execute()`` or ``executemany()`` method of the + DBAPI ``cursor``. In some cases may be ``None``. + :param context: :class:`.ExecutionContext` object in use. May + be ``None``. + :param executemany: boolean, if ``True``, this is an ``executemany()`` + call, if ``False``, this is an ``execute()`` call. + + .. seealso:: + + :meth:`.before_execute` + + :meth:`.after_cursor_execute` + + """ + + def after_cursor_execute( + self, + conn: Connection, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIAnyExecuteParams, + context: Optional[ExecutionContext], + executemany: bool, + ) -> None: + """Intercept low-level cursor execute() events after execution. + + :param conn: :class:`_engine.Connection` object + :param cursor: DBAPI cursor object. Will have results pending + if the statement was a SELECT, but these should not be consumed + as they will be needed by the :class:`_engine.CursorResult`. + :param statement: string SQL statement, as passed to the DBAPI + :param parameters: Dictionary, tuple, or list of parameters being + passed to the ``execute()`` or ``executemany()`` method of the + DBAPI ``cursor``. In some cases may be ``None``. + :param context: :class:`.ExecutionContext` object in use. May + be ``None``. + :param executemany: boolean, if ``True``, this is an ``executemany()`` + call, if ``False``, this is an ``execute()`` call. + + """ + + @event._legacy_signature( + "2.0", ["conn", "branch"], converter=lambda conn: (conn, False) + ) + def engine_connect(self, conn: Connection) -> None: + """Intercept the creation of a new :class:`_engine.Connection`. + + This event is called typically as the direct result of calling + the :meth:`_engine.Engine.connect` method. + + It differs from the :meth:`_events.PoolEvents.connect` method, which + refers to the actual connection to a database at the DBAPI level; + a DBAPI connection may be pooled and reused for many operations. + In contrast, this event refers only to the production of a higher level + :class:`_engine.Connection` wrapper around such a DBAPI connection. + + It also differs from the :meth:`_events.PoolEvents.checkout` event + in that it is specific to the :class:`_engine.Connection` object, + not the + DBAPI connection that :meth:`_events.PoolEvents.checkout` deals with, + although + this DBAPI connection is available here via the + :attr:`_engine.Connection.connection` attribute. + But note there can in fact + be multiple :meth:`_events.PoolEvents.checkout` + events within the lifespan + of a single :class:`_engine.Connection` object, if that + :class:`_engine.Connection` + is invalidated and re-established. + + :param conn: :class:`_engine.Connection` object. + + .. seealso:: + + :meth:`_events.PoolEvents.checkout` + the lower-level pool checkout event + for an individual DBAPI connection + + """ + + def set_connection_execution_options( + self, conn: Connection, opts: Dict[str, Any] + ) -> None: + """Intercept when the :meth:`_engine.Connection.execution_options` + method is called. + + This method is called after the new :class:`_engine.Connection` + has been + produced, with the newly updated execution options collection, but + before the :class:`.Dialect` has acted upon any of those new options. + + Note that this method is not called when a new + :class:`_engine.Connection` + is produced which is inheriting execution options from its parent + :class:`_engine.Engine`; to intercept this condition, use the + :meth:`_events.ConnectionEvents.engine_connect` event. + + :param conn: The newly copied :class:`_engine.Connection` object + + :param opts: dictionary of options that were passed to the + :meth:`_engine.Connection.execution_options` method. + This dictionary may be modified in place to affect the ultimate + options which take effect. + + .. versionadded:: 2.0 the ``opts`` dictionary may be modified + in place. + + + .. seealso:: + + :meth:`_events.ConnectionEvents.set_engine_execution_options` + - event + which is called when :meth:`_engine.Engine.execution_options` + is called. + + + """ + + def set_engine_execution_options( + self, engine: Engine, opts: Dict[str, Any] + ) -> None: + """Intercept when the :meth:`_engine.Engine.execution_options` + method is called. + + The :meth:`_engine.Engine.execution_options` method produces a shallow + copy of the :class:`_engine.Engine` which stores the new options. + That new + :class:`_engine.Engine` is passed here. + A particular application of this + method is to add a :meth:`_events.ConnectionEvents.engine_connect` + event + handler to the given :class:`_engine.Engine` + which will perform some per- + :class:`_engine.Connection` task specific to these execution options. + + :param conn: The newly copied :class:`_engine.Engine` object + + :param opts: dictionary of options that were passed to the + :meth:`_engine.Connection.execution_options` method. + This dictionary may be modified in place to affect the ultimate + options which take effect. + + .. versionadded:: 2.0 the ``opts`` dictionary may be modified + in place. + + .. seealso:: + + :meth:`_events.ConnectionEvents.set_connection_execution_options` + - event + which is called when :meth:`_engine.Connection.execution_options` + is + called. + + """ + + def engine_disposed(self, engine: Engine) -> None: + """Intercept when the :meth:`_engine.Engine.dispose` method is called. + + The :meth:`_engine.Engine.dispose` method instructs the engine to + "dispose" of it's connection pool (e.g. :class:`_pool.Pool`), and + replaces it with a new one. Disposing of the old pool has the + effect that existing checked-in connections are closed. The new + pool does not establish any new connections until it is first used. + + This event can be used to indicate that resources related to the + :class:`_engine.Engine` should also be cleaned up, + keeping in mind that the + :class:`_engine.Engine` + can still be used for new requests in which case + it re-acquires connection resources. + + """ + + def begin(self, conn: Connection) -> None: + """Intercept begin() events. + + :param conn: :class:`_engine.Connection` object + + """ + + def rollback(self, conn: Connection) -> None: + """Intercept rollback() events, as initiated by a + :class:`.Transaction`. + + Note that the :class:`_pool.Pool` also "auto-rolls back" + a DBAPI connection upon checkin, if the ``reset_on_return`` + flag is set to its default value of ``'rollback'``. + To intercept this + rollback, use the :meth:`_events.PoolEvents.reset` hook. + + :param conn: :class:`_engine.Connection` object + + .. seealso:: + + :meth:`_events.PoolEvents.reset` + + """ + + def commit(self, conn: Connection) -> None: + """Intercept commit() events, as initiated by a + :class:`.Transaction`. + + Note that the :class:`_pool.Pool` may also "auto-commit" + a DBAPI connection upon checkin, if the ``reset_on_return`` + flag is set to the value ``'commit'``. To intercept this + commit, use the :meth:`_events.PoolEvents.reset` hook. + + :param conn: :class:`_engine.Connection` object + """ + + def savepoint(self, conn: Connection, name: str) -> None: + """Intercept savepoint() events. + + :param conn: :class:`_engine.Connection` object + :param name: specified name used for the savepoint. + + """ + + def rollback_savepoint( + self, conn: Connection, name: str, context: None + ) -> None: + """Intercept rollback_savepoint() events. + + :param conn: :class:`_engine.Connection` object + :param name: specified name used for the savepoint. + :param context: not used + + """ + # TODO: deprecate "context" + + def release_savepoint( + self, conn: Connection, name: str, context: None + ) -> None: + """Intercept release_savepoint() events. + + :param conn: :class:`_engine.Connection` object + :param name: specified name used for the savepoint. + :param context: not used + + """ + # TODO: deprecate "context" + + def begin_twophase(self, conn: Connection, xid: Any) -> None: + """Intercept begin_twophase() events. + + :param conn: :class:`_engine.Connection` object + :param xid: two-phase XID identifier + + """ + + def prepare_twophase(self, conn: Connection, xid: Any) -> None: + """Intercept prepare_twophase() events. + + :param conn: :class:`_engine.Connection` object + :param xid: two-phase XID identifier + """ + + def rollback_twophase( + self, conn: Connection, xid: Any, is_prepared: bool + ) -> None: + """Intercept rollback_twophase() events. + + :param conn: :class:`_engine.Connection` object + :param xid: two-phase XID identifier + :param is_prepared: boolean, indicates if + :meth:`.TwoPhaseTransaction.prepare` was called. + + """ + + def commit_twophase( + self, conn: Connection, xid: Any, is_prepared: bool + ) -> None: + """Intercept commit_twophase() events. + + :param conn: :class:`_engine.Connection` object + :param xid: two-phase XID identifier + :param is_prepared: boolean, indicates if + :meth:`.TwoPhaseTransaction.prepare` was called. + + """ + + +class DialectEvents(event.Events[Dialect]): + """event interface for execution-replacement functions. + + These events allow direct instrumentation and replacement + of key dialect functions which interact with the DBAPI. + + .. note:: + + :class:`.DialectEvents` hooks should be considered **semi-public** + and experimental. + These hooks are not for general use and are only for those situations + where intricate re-statement of DBAPI mechanics must be injected onto + an existing dialect. For general-use statement-interception events, + please use the :class:`_events.ConnectionEvents` interface. + + .. seealso:: + + :meth:`_events.ConnectionEvents.before_cursor_execute` + + :meth:`_events.ConnectionEvents.before_execute` + + :meth:`_events.ConnectionEvents.after_cursor_execute` + + :meth:`_events.ConnectionEvents.after_execute` + + """ + + _target_class_doc = "SomeEngine" + _dispatch_target = Dialect + + @classmethod + def _listen( + cls, + event_key: event._EventKey[Dialect], + *, + retval: bool = False, + **kw: Any, + ) -> None: + target = event_key.dispatch_target + + target._has_events = True + event_key.base_listen() + + @classmethod + def _accept_with( + cls, + target: Union[Engine, Type[Engine], Dialect, Type[Dialect]], + identifier: str, + ) -> Optional[Union[Dialect, Type[Dialect]]]: + if isinstance(target, type): + if issubclass(target, Engine): + return Dialect + elif issubclass(target, Dialect): + return target + elif isinstance(target, Engine): + return target.dialect + elif isinstance(target, Dialect): + return target + elif isinstance(target, Connection) and identifier == "handle_error": + raise exc.InvalidRequestError( + "The handle_error() event hook as of SQLAlchemy 2.0 is " + "established on the Dialect, and may only be applied to the " + "Engine as a whole or to a specific Dialect as a whole, " + "not on a per-Connection basis." + ) + elif hasattr(target, "_no_async_engine_events"): + target._no_async_engine_events() + else: + return None + + def handle_error( + self, exception_context: ExceptionContext + ) -> Optional[BaseException]: + r"""Intercept all exceptions processed by the + :class:`_engine.Dialect`, typically but not limited to those + emitted within the scope of a :class:`_engine.Connection`. + + .. versionchanged:: 2.0 the :meth:`.DialectEvents.handle_error` event + is moved to the :class:`.DialectEvents` class, moved from the + :class:`.ConnectionEvents` class, so that it may also participate in + the "pre ping" operation configured with the + :paramref:`_sa.create_engine.pool_pre_ping` parameter. The event + remains registered by using the :class:`_engine.Engine` as the event + target, however note that using the :class:`_engine.Connection` as + an event target for :meth:`.DialectEvents.handle_error` is no longer + supported. + + This includes all exceptions emitted by the DBAPI as well as + within SQLAlchemy's statement invocation process, including + encoding errors and other statement validation errors. Other areas + in which the event is invoked include transaction begin and end, + result row fetching, cursor creation. + + Note that :meth:`.handle_error` may support new kinds of exceptions + and new calling scenarios at *any time*. Code which uses this + event must expect new calling patterns to be present in minor + releases. + + To support the wide variety of members that correspond to an exception, + as well as to allow extensibility of the event without backwards + incompatibility, the sole argument received is an instance of + :class:`.ExceptionContext`. This object contains data members + representing detail about the exception. + + Use cases supported by this hook include: + + * read-only, low-level exception handling for logging and + debugging purposes + * Establishing whether a DBAPI connection error message indicates + that the database connection needs to be reconnected, including + for the "pre_ping" handler used by **some** dialects + * Establishing or disabling whether a connection or the owning + connection pool is invalidated or expired in response to a + specific exception + * exception re-writing + + The hook is called while the cursor from the failed operation + (if any) is still open and accessible. Special cleanup operations + can be called on this cursor; SQLAlchemy will attempt to close + this cursor subsequent to this hook being invoked. + + As of SQLAlchemy 2.0, the "pre_ping" handler enabled using the + :paramref:`_sa.create_engine.pool_pre_ping` parameter will also + participate in the :meth:`.handle_error` process, **for those dialects + that rely upon disconnect codes to detect database liveness**. Note + that some dialects such as psycopg, psycopg2, and most MySQL dialects + make use of a native ``ping()`` method supplied by the DBAPI which does + not make use of disconnect codes. + + .. versionchanged:: 2.0.0 The :meth:`.DialectEvents.handle_error` + event hook participates in connection pool "pre-ping" operations. + Within this usage, the :attr:`.ExceptionContext.engine` attribute + will be ``None``, however the :class:`.Dialect` in use is always + available via the :attr:`.ExceptionContext.dialect` attribute. + + .. versionchanged:: 2.0.5 Added :attr:`.ExceptionContext.is_pre_ping` + attribute which will be set to ``True`` when the + :meth:`.DialectEvents.handle_error` event hook is triggered within + a connection pool pre-ping operation. + + .. versionchanged:: 2.0.5 An issue was repaired that allows for the + PostgreSQL ``psycopg`` and ``psycopg2`` drivers, as well as all + MySQL drivers, to properly participate in the + :meth:`.DialectEvents.handle_error` event hook during + connection pool "pre-ping" operations; previously, the + implementation was non-working for these drivers. + + + A handler function has two options for replacing + the SQLAlchemy-constructed exception into one that is user + defined. It can either raise this new exception directly, in + which case all further event listeners are bypassed and the + exception will be raised, after appropriate cleanup as taken + place:: + + @event.listens_for(Engine, "handle_error") + def handle_exception(context): + if isinstance(context.original_exception, + psycopg2.OperationalError) and \ + "failed" in str(context.original_exception): + raise MySpecialException("failed operation") + + .. warning:: Because the + :meth:`_events.DialectEvents.handle_error` + event specifically provides for exceptions to be re-thrown as + the ultimate exception raised by the failed statement, + **stack traces will be misleading** if the user-defined event + handler itself fails and throws an unexpected exception; + the stack trace may not illustrate the actual code line that + failed! It is advised to code carefully here and use + logging and/or inline debugging if unexpected exceptions are + occurring. + + Alternatively, a "chained" style of event handling can be + used, by configuring the handler with the ``retval=True`` + modifier and returning the new exception instance from the + function. In this case, event handling will continue onto the + next handler. The "chained" exception is available using + :attr:`.ExceptionContext.chained_exception`:: + + @event.listens_for(Engine, "handle_error", retval=True) + def handle_exception(context): + if context.chained_exception is not None and \ + "special" in context.chained_exception.message: + return MySpecialException("failed", + cause=context.chained_exception) + + Handlers that return ``None`` may be used within the chain; when + a handler returns ``None``, the previous exception instance, + if any, is maintained as the current exception that is passed onto the + next handler. + + When a custom exception is raised or returned, SQLAlchemy raises + this new exception as-is, it is not wrapped by any SQLAlchemy + object. If the exception is not a subclass of + :class:`sqlalchemy.exc.StatementError`, + certain features may not be available; currently this includes + the ORM's feature of adding a detail hint about "autoflush" to + exceptions raised within the autoflush process. + + :param context: an :class:`.ExceptionContext` object. See this + class for details on all available members. + + + .. seealso:: + + :ref:`pool_new_disconnect_codes` + + """ + + def do_connect( + self, + dialect: Dialect, + conn_rec: ConnectionPoolEntry, + cargs: Tuple[Any, ...], + cparams: Dict[str, Any], + ) -> Optional[DBAPIConnection]: + """Receive connection arguments before a connection is made. + + This event is useful in that it allows the handler to manipulate the + cargs and/or cparams collections that control how the DBAPI + ``connect()`` function will be called. ``cargs`` will always be a + Python list that can be mutated in-place, and ``cparams`` a Python + dictionary that may also be mutated:: + + e = create_engine("postgresql+psycopg2://user@host/dbname") + + @event.listens_for(e, 'do_connect') + def receive_do_connect(dialect, conn_rec, cargs, cparams): + cparams["password"] = "some_password" + + The event hook may also be used to override the call to ``connect()`` + entirely, by returning a non-``None`` DBAPI connection object:: + + e = create_engine("postgresql+psycopg2://user@host/dbname") + + @event.listens_for(e, 'do_connect') + def receive_do_connect(dialect, conn_rec, cargs, cparams): + return psycopg2.connect(*cargs, **cparams) + + .. seealso:: + + :ref:`custom_dbapi_args` + + """ + + def do_executemany( + self, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIMultiExecuteParams, + context: ExecutionContext, + ) -> Optional[Literal[True]]: + """Receive a cursor to have executemany() called. + + Return the value True to halt further events from invoking, + and to indicate that the cursor execution has already taken + place within the event handler. + + """ + + def do_execute_no_params( + self, cursor: DBAPICursor, statement: str, context: ExecutionContext + ) -> Optional[Literal[True]]: + """Receive a cursor to have execute() with no parameters called. + + Return the value True to halt further events from invoking, + and to indicate that the cursor execution has already taken + place within the event handler. + + """ + + def do_execute( + self, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPISingleExecuteParams, + context: ExecutionContext, + ) -> Optional[Literal[True]]: + """Receive a cursor to have execute() called. + + Return the value True to halt further events from invoking, + and to indicate that the cursor execution has already taken + place within the event handler. + + """ + + def do_setinputsizes( + self, + inputsizes: Dict[BindParameter[Any], Any], + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIAnyExecuteParams, + context: ExecutionContext, + ) -> None: + """Receive the setinputsizes dictionary for possible modification. + + This event is emitted in the case where the dialect makes use of the + DBAPI ``cursor.setinputsizes()`` method which passes information about + parameter binding for a particular statement. The given + ``inputsizes`` dictionary will contain :class:`.BindParameter` objects + as keys, linked to DBAPI-specific type objects as values; for + parameters that are not bound, they are added to the dictionary with + ``None`` as the value, which means the parameter will not be included + in the ultimate setinputsizes call. The event may be used to inspect + and/or log the datatypes that are being bound, as well as to modify the + dictionary in place. Parameters can be added, modified, or removed + from this dictionary. Callers will typically want to inspect the + :attr:`.BindParameter.type` attribute of the given bind objects in + order to make decisions about the DBAPI object. + + After the event, the ``inputsizes`` dictionary is converted into + an appropriate datastructure to be passed to ``cursor.setinputsizes``; + either a list for a positional bound parameter execution style, + or a dictionary of string parameter keys to DBAPI type objects for + a named bound parameter execution style. + + The setinputsizes hook overall is only used for dialects which include + the flag ``use_setinputsizes=True``. Dialects which use this + include cx_Oracle, pg8000, asyncpg, and pyodbc dialects. + + .. note:: + + For use with pyodbc, the ``use_setinputsizes`` flag + must be passed to the dialect, e.g.:: + + create_engine("mssql+pyodbc://...", use_setinputsizes=True) + + .. seealso:: + + :ref:`mssql_pyodbc_setinputsizes` + + .. versionadded:: 1.2.9 + + .. seealso:: + + :ref:`cx_oracle_setinputsizes` + + """ + pass diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py new file mode 100644 index 00000000..17a133f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py @@ -0,0 +1,3403 @@ +# engine/interfaces.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Define core interfaces used by the engine system.""" + +from __future__ import annotations + +from enum import Enum +from types import ModuleType +from typing import Any +from typing import Awaitable +from typing import Callable +from typing import ClassVar +from typing import Collection +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .. import util +from ..event import EventTarget +from ..pool import Pool +from ..pool import PoolProxiedConnection +from ..sql.compiler import Compiled as Compiled +from ..sql.compiler import Compiled # noqa +from ..sql.compiler import TypeCompiler as TypeCompiler +from ..sql.compiler import TypeCompiler # noqa +from ..util import immutabledict +from ..util.concurrency import await_only +from ..util.typing import Literal +from ..util.typing import NotRequired +from ..util.typing import Protocol +from ..util.typing import TypedDict + +if TYPE_CHECKING: + from .base import Connection + from .base import Engine + from .cursor import CursorResult + from .url import URL + from ..event import _ListenerFnType + from ..event import dispatcher + from ..exc import StatementError + from ..sql import Executable + from ..sql.compiler import _InsertManyValuesBatch + from ..sql.compiler import DDLCompiler + from ..sql.compiler import IdentifierPreparer + from ..sql.compiler import InsertmanyvaluesSentinelOpts + from ..sql.compiler import Linting + from ..sql.compiler import SQLCompiler + from ..sql.elements import BindParameter + from ..sql.elements import ClauseElement + from ..sql.schema import Column + from ..sql.schema import DefaultGenerator + from ..sql.schema import SchemaItem + from ..sql.schema import Sequence as Sequence_SchemaItem + from ..sql.sqltypes import Integer + from ..sql.type_api import _TypeMemoDict + from ..sql.type_api import TypeEngine + +ConnectArgsType = Tuple[Sequence[str], MutableMapping[str, Any]] + +_T = TypeVar("_T", bound="Any") + + +class CacheStats(Enum): + CACHE_HIT = 0 + CACHE_MISS = 1 + CACHING_DISABLED = 2 + NO_CACHE_KEY = 3 + NO_DIALECT_SUPPORT = 4 + + +class ExecuteStyle(Enum): + """indicates the :term:`DBAPI` cursor method that will be used to invoke + a statement.""" + + EXECUTE = 0 + """indicates cursor.execute() will be used""" + + EXECUTEMANY = 1 + """indicates cursor.executemany() will be used.""" + + INSERTMANYVALUES = 2 + """indicates cursor.execute() will be used with an INSERT where the + VALUES expression will be expanded to accommodate for multiple + parameter sets + + .. seealso:: + + :ref:`engine_insertmanyvalues` + + """ + + +class DBAPIConnection(Protocol): + """protocol representing a :pep:`249` database connection. + + .. versionadded:: 2.0 + + .. seealso:: + + `Connection Objects `_ + - in :pep:`249` + + """ # noqa: E501 + + def close(self) -> None: ... + + def commit(self) -> None: ... + + def cursor(self) -> DBAPICursor: ... + + def rollback(self) -> None: ... + + autocommit: bool + + +class DBAPIType(Protocol): + """protocol representing a :pep:`249` database type. + + .. versionadded:: 2.0 + + .. seealso:: + + `Type Objects `_ + - in :pep:`249` + + """ # noqa: E501 + + +class DBAPICursor(Protocol): + """protocol representing a :pep:`249` database cursor. + + .. versionadded:: 2.0 + + .. seealso:: + + `Cursor Objects `_ + - in :pep:`249` + + """ # noqa: E501 + + @property + def description( + self, + ) -> _DBAPICursorDescription: + """The description attribute of the Cursor. + + .. seealso:: + + `cursor.description `_ + - in :pep:`249` + + + """ # noqa: E501 + ... + + @property + def rowcount(self) -> int: ... + + arraysize: int + + lastrowid: int + + def close(self) -> None: ... + + def execute( + self, + operation: Any, + parameters: Optional[_DBAPISingleExecuteParams] = None, + ) -> Any: ... + + def executemany( + self, + operation: Any, + parameters: _DBAPIMultiExecuteParams, + ) -> Any: ... + + def fetchone(self) -> Optional[Any]: ... + + def fetchmany(self, size: int = ...) -> Sequence[Any]: ... + + def fetchall(self) -> Sequence[Any]: ... + + def setinputsizes(self, sizes: Sequence[Any]) -> None: ... + + def setoutputsize(self, size: Any, column: Any) -> None: ... + + def callproc( + self, procname: str, parameters: Sequence[Any] = ... + ) -> Any: ... + + def nextset(self) -> Optional[bool]: ... + + def __getattr__(self, key: str) -> Any: ... + + +_CoreSingleExecuteParams = Mapping[str, Any] +_MutableCoreSingleExecuteParams = MutableMapping[str, Any] +_CoreMultiExecuteParams = Sequence[_CoreSingleExecuteParams] +_CoreAnyExecuteParams = Union[ + _CoreMultiExecuteParams, _CoreSingleExecuteParams +] + +_DBAPISingleExecuteParams = Union[Sequence[Any], _CoreSingleExecuteParams] + +_DBAPIMultiExecuteParams = Union[ + Sequence[Sequence[Any]], _CoreMultiExecuteParams +] +_DBAPIAnyExecuteParams = Union[ + _DBAPIMultiExecuteParams, _DBAPISingleExecuteParams +] +_DBAPICursorDescription = Sequence[ + Tuple[ + str, + "DBAPIType", + Optional[int], + Optional[int], + Optional[int], + Optional[int], + Optional[bool], + ] +] + +_AnySingleExecuteParams = _DBAPISingleExecuteParams +_AnyMultiExecuteParams = _DBAPIMultiExecuteParams +_AnyExecuteParams = _DBAPIAnyExecuteParams + +CompiledCacheType = MutableMapping[Any, "Compiled"] +SchemaTranslateMapType = Mapping[Optional[str], Optional[str]] + +_ImmutableExecuteOptions = immutabledict[str, Any] + +_ParamStyle = Literal[ + "qmark", "numeric", "named", "format", "pyformat", "numeric_dollar" +] + +_GenericSetInputSizesType = List[Tuple[str, Any, "TypeEngine[Any]"]] + +IsolationLevel = Literal[ + "SERIALIZABLE", + "REPEATABLE READ", + "READ COMMITTED", + "READ UNCOMMITTED", + "AUTOCOMMIT", +] + + +class _CoreKnownExecutionOptions(TypedDict, total=False): + compiled_cache: Optional[CompiledCacheType] + logging_token: str + isolation_level: IsolationLevel + no_parameters: bool + stream_results: bool + max_row_buffer: int + yield_per: int + insertmanyvalues_page_size: int + schema_translate_map: Optional[SchemaTranslateMapType] + preserve_rowcount: bool + + +_ExecuteOptions = immutabledict[str, Any] +CoreExecuteOptionsParameter = Union[ + _CoreKnownExecutionOptions, Mapping[str, Any] +] + + +class ReflectedIdentity(TypedDict): + """represent the reflected IDENTITY structure of a column, corresponding + to the :class:`_schema.Identity` construct. + + The :class:`.ReflectedIdentity` structure is part of the + :class:`.ReflectedColumn` structure, which is returned by the + :meth:`.Inspector.get_columns` method. + + """ + + always: bool + """type of identity column""" + + on_null: bool + """indicates ON NULL""" + + start: int + """starting index of the sequence""" + + increment: int + """increment value of the sequence""" + + minvalue: int + """the minimum value of the sequence.""" + + maxvalue: int + """the maximum value of the sequence.""" + + nominvalue: bool + """no minimum value of the sequence.""" + + nomaxvalue: bool + """no maximum value of the sequence.""" + + cycle: bool + """allows the sequence to wrap around when the maxvalue + or minvalue has been reached.""" + + cache: Optional[int] + """number of future values in the + sequence which are calculated in advance.""" + + order: bool + """if true, renders the ORDER keyword.""" + + +class ReflectedComputed(TypedDict): + """Represent the reflected elements of a computed column, corresponding + to the :class:`_schema.Computed` construct. + + The :class:`.ReflectedComputed` structure is part of the + :class:`.ReflectedColumn` structure, which is returned by the + :meth:`.Inspector.get_columns` method. + + """ + + sqltext: str + """the expression used to generate this column returned + as a string SQL expression""" + + persisted: NotRequired[bool] + """indicates if the value is stored in the table or computed on demand""" + + +class ReflectedColumn(TypedDict): + """Dictionary representing the reflected elements corresponding to + a :class:`_schema.Column` object. + + The :class:`.ReflectedColumn` structure is returned by the + :class:`.Inspector.get_columns` method. + + """ + + name: str + """column name""" + + type: TypeEngine[Any] + """column type represented as a :class:`.TypeEngine` instance.""" + + nullable: bool + """boolean flag if the column is NULL or NOT NULL""" + + default: Optional[str] + """column default expression as a SQL string""" + + autoincrement: NotRequired[bool] + """database-dependent autoincrement flag. + + This flag indicates if the column has a database-side "autoincrement" + flag of some kind. Within SQLAlchemy, other kinds of columns may + also act as an "autoincrement" column without necessarily having + such a flag on them. + + See :paramref:`_schema.Column.autoincrement` for more background on + "autoincrement". + + """ + + comment: NotRequired[Optional[str]] + """comment for the column, if present. + Only some dialects return this key + """ + + computed: NotRequired[ReflectedComputed] + """indicates that this column is computed by the database. + Only some dialects return this key. + + .. versionadded:: 1.3.16 - added support for computed reflection. + """ + + identity: NotRequired[ReflectedIdentity] + """indicates this column is an IDENTITY column. + Only some dialects return this key. + + .. versionadded:: 1.4 - added support for identity column reflection. + """ + + dialect_options: NotRequired[Dict[str, Any]] + """Additional dialect-specific options detected for this reflected + object""" + + +class ReflectedConstraint(TypedDict): + """Dictionary representing the reflected elements corresponding to + :class:`.Constraint` + + A base class for all constraints + """ + + name: Optional[str] + """constraint name""" + + comment: NotRequired[Optional[str]] + """comment for the constraint, if present""" + + +class ReflectedCheckConstraint(ReflectedConstraint): + """Dictionary representing the reflected elements corresponding to + :class:`.CheckConstraint`. + + The :class:`.ReflectedCheckConstraint` structure is returned by the + :meth:`.Inspector.get_check_constraints` method. + + """ + + sqltext: str + """the check constraint's SQL expression""" + + dialect_options: NotRequired[Dict[str, Any]] + """Additional dialect-specific options detected for this check constraint + + .. versionadded:: 1.3.8 + """ + + +class ReflectedUniqueConstraint(ReflectedConstraint): + """Dictionary representing the reflected elements corresponding to + :class:`.UniqueConstraint`. + + The :class:`.ReflectedUniqueConstraint` structure is returned by the + :meth:`.Inspector.get_unique_constraints` method. + + """ + + column_names: List[str] + """column names which comprise the unique constraint""" + + duplicates_index: NotRequired[Optional[str]] + "Indicates if this unique constraint duplicates an index with this name" + + dialect_options: NotRequired[Dict[str, Any]] + """Additional dialect-specific options detected for this unique + constraint""" + + +class ReflectedPrimaryKeyConstraint(ReflectedConstraint): + """Dictionary representing the reflected elements corresponding to + :class:`.PrimaryKeyConstraint`. + + The :class:`.ReflectedPrimaryKeyConstraint` structure is returned by the + :meth:`.Inspector.get_pk_constraint` method. + + """ + + constrained_columns: List[str] + """column names which comprise the primary key""" + + dialect_options: NotRequired[Dict[str, Any]] + """Additional dialect-specific options detected for this primary key""" + + +class ReflectedForeignKeyConstraint(ReflectedConstraint): + """Dictionary representing the reflected elements corresponding to + :class:`.ForeignKeyConstraint`. + + The :class:`.ReflectedForeignKeyConstraint` structure is returned by + the :meth:`.Inspector.get_foreign_keys` method. + + """ + + constrained_columns: List[str] + """local column names which comprise the foreign key""" + + referred_schema: Optional[str] + """schema name of the table being referred""" + + referred_table: str + """name of the table being referred""" + + referred_columns: List[str] + """referred column names that correspond to ``constrained_columns``""" + + options: NotRequired[Dict[str, Any]] + """Additional options detected for this foreign key constraint""" + + +class ReflectedIndex(TypedDict): + """Dictionary representing the reflected elements corresponding to + :class:`.Index`. + + The :class:`.ReflectedIndex` structure is returned by the + :meth:`.Inspector.get_indexes` method. + + """ + + name: Optional[str] + """index name""" + + column_names: List[Optional[str]] + """column names which the index references. + An element of this list is ``None`` if it's an expression and is + returned in the ``expressions`` list. + """ + + expressions: NotRequired[List[str]] + """Expressions that compose the index. This list, when present, contains + both plain column names (that are also in ``column_names``) and + expressions (that are ``None`` in ``column_names``). + """ + + unique: bool + """whether or not the index has a unique flag""" + + duplicates_constraint: NotRequired[Optional[str]] + "Indicates if this index mirrors a constraint with this name" + + include_columns: NotRequired[List[str]] + """columns to include in the INCLUDE clause for supporting databases. + + .. deprecated:: 2.0 + + Legacy value, will be replaced with + ``index_dict["dialect_options"]["_include"]`` + + """ + + column_sorting: NotRequired[Dict[str, Tuple[str]]] + """optional dict mapping column names or expressions to tuple of sort + keywords, which may include ``asc``, ``desc``, ``nulls_first``, + ``nulls_last``. + + .. versionadded:: 1.3.5 + """ + + dialect_options: NotRequired[Dict[str, Any]] + """Additional dialect-specific options detected for this index""" + + +class ReflectedTableComment(TypedDict): + """Dictionary representing the reflected comment corresponding to + the :attr:`_schema.Table.comment` attribute. + + The :class:`.ReflectedTableComment` structure is returned by the + :meth:`.Inspector.get_table_comment` method. + + """ + + text: Optional[str] + """text of the comment""" + + +class BindTyping(Enum): + """Define different methods of passing typing information for + bound parameters in a statement to the database driver. + + .. versionadded:: 2.0 + + """ + + NONE = 1 + """No steps are taken to pass typing information to the database driver. + + This is the default behavior for databases such as SQLite, MySQL / MariaDB, + SQL Server. + + """ + + SETINPUTSIZES = 2 + """Use the pep-249 setinputsizes method. + + This is only implemented for DBAPIs that support this method and for which + the SQLAlchemy dialect has the appropriate infrastructure for that + dialect set up. Current dialects include cx_Oracle as well as + optional support for SQL Server using pyodbc. + + When using setinputsizes, dialects also have a means of only using the + method for certain datatypes using include/exclude lists. + + When SETINPUTSIZES is used, the :meth:`.Dialect.do_set_input_sizes` method + is called for each statement executed which has bound parameters. + + """ + + RENDER_CASTS = 3 + """Render casts or other directives in the SQL string. + + This method is used for all PostgreSQL dialects, including asyncpg, + pg8000, psycopg, psycopg2. Dialects which implement this can choose + which kinds of datatypes are explicitly cast in SQL statements and which + aren't. + + When RENDER_CASTS is used, the compiler will invoke the + :meth:`.SQLCompiler.render_bind_cast` method for the rendered + string representation of each :class:`.BindParameter` object whose + dialect-level type sets the :attr:`.TypeEngine.render_bind_cast` attribute. + + The :meth:`.SQLCompiler.render_bind_cast` is also used to render casts + for one form of "insertmanyvalues" query, when both + :attr:`.InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT` and + :attr:`.InsertmanyvaluesSentinelOpts.RENDER_SELECT_COL_CASTS` are set, + where the casts are applied to the intermediary columns e.g. + "INSERT INTO t (a, b, c) SELECT p0::TYP, p1::TYP, p2::TYP " + "FROM (VALUES (?, ?), (?, ?), ...)". + + .. versionadded:: 2.0.10 - :meth:`.SQLCompiler.render_bind_cast` is now + used within some elements of the "insertmanyvalues" implementation. + + + """ + + +VersionInfoType = Tuple[Union[int, str], ...] +TableKey = Tuple[Optional[str], str] + + +class Dialect(EventTarget): + """Define the behavior of a specific database and DB-API combination. + + Any aspect of metadata definition, SQL query generation, + execution, result-set handling, or anything else which varies + between databases is defined under the general category of the + Dialect. The Dialect acts as a factory for other + database-specific object implementations including + ExecutionContext, Compiled, DefaultGenerator, and TypeEngine. + + .. note:: Third party dialects should not subclass :class:`.Dialect` + directly. Instead, subclass :class:`.default.DefaultDialect` or + descendant class. + + """ + + CACHE_HIT = CacheStats.CACHE_HIT + CACHE_MISS = CacheStats.CACHE_MISS + CACHING_DISABLED = CacheStats.CACHING_DISABLED + NO_CACHE_KEY = CacheStats.NO_CACHE_KEY + NO_DIALECT_SUPPORT = CacheStats.NO_DIALECT_SUPPORT + + dispatch: dispatcher[Dialect] + + name: str + """identifying name for the dialect from a DBAPI-neutral point of view + (i.e. 'sqlite') + """ + + driver: str + """identifying name for the dialect's DBAPI""" + + dialect_description: str + + dbapi: Optional[ModuleType] + """A reference to the DBAPI module object itself. + + SQLAlchemy dialects import DBAPI modules using the classmethod + :meth:`.Dialect.import_dbapi`. The rationale is so that any dialect + module can be imported and used to generate SQL statements without the + need for the actual DBAPI driver to be installed. Only when an + :class:`.Engine` is constructed using :func:`.create_engine` does the + DBAPI get imported; at that point, the creation process will assign + the DBAPI module to this attribute. + + Dialects should therefore implement :meth:`.Dialect.import_dbapi` + which will import the necessary module and return it, and then refer + to ``self.dbapi`` in dialect code in order to refer to the DBAPI module + contents. + + .. versionchanged:: The :attr:`.Dialect.dbapi` attribute is exclusively + used as the per-:class:`.Dialect`-instance reference to the DBAPI + module. The previous not-fully-documented ``.Dialect.dbapi()`` + classmethod is deprecated and replaced by :meth:`.Dialect.import_dbapi`. + + """ + + @util.non_memoized_property + def loaded_dbapi(self) -> ModuleType: + """same as .dbapi, but is never None; will raise an error if no + DBAPI was set up. + + .. versionadded:: 2.0 + + """ + raise NotImplementedError() + + positional: bool + """True if the paramstyle for this Dialect is positional.""" + + paramstyle: str + """the paramstyle to be used (some DB-APIs support multiple + paramstyles). + """ + + compiler_linting: Linting + + statement_compiler: Type[SQLCompiler] + """a :class:`.Compiled` class used to compile SQL statements""" + + ddl_compiler: Type[DDLCompiler] + """a :class:`.Compiled` class used to compile DDL statements""" + + type_compiler_cls: ClassVar[Type[TypeCompiler]] + """a :class:`.Compiled` class used to compile SQL type objects + + .. versionadded:: 2.0 + + """ + + type_compiler_instance: TypeCompiler + """instance of a :class:`.Compiled` class used to compile SQL type + objects + + .. versionadded:: 2.0 + + """ + + type_compiler: Any + """legacy; this is a TypeCompiler class at the class level, a + TypeCompiler instance at the instance level. + + Refer to type_compiler_instance instead. + + """ + + preparer: Type[IdentifierPreparer] + """a :class:`.IdentifierPreparer` class used to + quote identifiers. + """ + + identifier_preparer: IdentifierPreparer + """This element will refer to an instance of :class:`.IdentifierPreparer` + once a :class:`.DefaultDialect` has been constructed. + + """ + + server_version_info: Optional[Tuple[Any, ...]] + """a tuple containing a version number for the DB backend in use. + + This value is only available for supporting dialects, and is + typically populated during the initial connection to the database. + """ + + default_schema_name: Optional[str] + """the name of the default schema. This value is only available for + supporting dialects, and is typically populated during the + initial connection to the database. + + """ + + # NOTE: this does not take into effect engine-level isolation level. + # not clear if this should be changed, seems like it should + default_isolation_level: Optional[IsolationLevel] + """the isolation that is implicitly present on new connections""" + + # create_engine() -> isolation_level currently goes here + _on_connect_isolation_level: Optional[IsolationLevel] + + execution_ctx_cls: Type[ExecutionContext] + """a :class:`.ExecutionContext` class used to handle statement execution""" + + execute_sequence_format: Union[ + Type[Tuple[Any, ...]], Type[Tuple[List[Any]]] + ] + """either the 'tuple' or 'list' type, depending on what cursor.execute() + accepts for the second argument (they vary).""" + + supports_alter: bool + """``True`` if the database supports ``ALTER TABLE`` - used only for + generating foreign key constraints in certain circumstances + """ + + max_identifier_length: int + """The maximum length of identifier names.""" + + supports_server_side_cursors: bool + """indicates if the dialect supports server side cursors""" + + server_side_cursors: bool + """deprecated; indicates if the dialect should attempt to use server + side cursors by default""" + + supports_sane_rowcount: bool + """Indicate whether the dialect properly implements rowcount for + ``UPDATE`` and ``DELETE`` statements. + """ + + supports_sane_multi_rowcount: bool + """Indicate whether the dialect properly implements rowcount for + ``UPDATE`` and ``DELETE`` statements when executed via + executemany. + """ + + supports_empty_insert: bool + """dialect supports INSERT () VALUES (), i.e. a plain INSERT with no + columns in it. + + This is not usually supported; an "empty" insert is typically + suited using either "INSERT..DEFAULT VALUES" or + "INSERT ... (col) VALUES (DEFAULT)". + + """ + + supports_default_values: bool + """dialect supports INSERT... DEFAULT VALUES syntax""" + + supports_default_metavalue: bool + """dialect supports INSERT...(col) VALUES (DEFAULT) syntax. + + Most databases support this in some way, e.g. SQLite supports it using + ``VALUES (NULL)``. MS SQL Server supports the syntax also however + is the only included dialect where we have this disabled, as + MSSQL does not support the field for the IDENTITY column, which is + usually where we like to make use of the feature. + + """ + + default_metavalue_token: str = "DEFAULT" + """for INSERT... VALUES (DEFAULT) syntax, the token to put in the + parenthesis. + + E.g. for SQLite this is the keyword "NULL". + + """ + + supports_multivalues_insert: bool + """Target database supports INSERT...VALUES with multiple value + sets, i.e. INSERT INTO table (cols) VALUES (...), (...), (...), ... + + """ + + insert_executemany_returning: bool + """dialect / driver / database supports some means of providing + INSERT...RETURNING support when dialect.do_executemany() is used. + + """ + + insert_executemany_returning_sort_by_parameter_order: bool + """dialect / driver / database supports some means of providing + INSERT...RETURNING support when dialect.do_executemany() is used + along with the :paramref:`_dml.Insert.returning.sort_by_parameter_order` + parameter being set. + + """ + + update_executemany_returning: bool + """dialect supports UPDATE..RETURNING with executemany.""" + + delete_executemany_returning: bool + """dialect supports DELETE..RETURNING with executemany.""" + + use_insertmanyvalues: bool + """if True, indicates "insertmanyvalues" functionality should be used + to allow for ``insert_executemany_returning`` behavior, if possible. + + In practice, setting this to True means: + + if ``supports_multivalues_insert``, ``insert_returning`` and + ``use_insertmanyvalues`` are all True, the SQL compiler will produce + an INSERT that will be interpreted by the :class:`.DefaultDialect` + as an :attr:`.ExecuteStyle.INSERTMANYVALUES` execution that allows + for INSERT of many rows with RETURNING by rewriting a single-row + INSERT statement to have multiple VALUES clauses, also executing + the statement multiple times for a series of batches when large numbers + of rows are given. + + The parameter is False for the default dialect, and is set to + True for SQLAlchemy internal dialects SQLite, MySQL/MariaDB, PostgreSQL, + SQL Server. It remains at False for Oracle, which provides native + "executemany with RETURNING" support and also does not support + ``supports_multivalues_insert``. For MySQL/MariaDB, those MySQL + dialects that don't support RETURNING will not report + ``insert_executemany_returning`` as True. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`engine_insertmanyvalues` + + """ + + use_insertmanyvalues_wo_returning: bool + """if True, and use_insertmanyvalues is also True, INSERT statements + that don't include RETURNING will also use "insertmanyvalues". + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`engine_insertmanyvalues` + + """ + + insertmanyvalues_implicit_sentinel: InsertmanyvaluesSentinelOpts + """Options indicating the database supports a form of bulk INSERT where + the autoincrement integer primary key can be reliably used as an ordering + for INSERTed rows. + + .. versionadded:: 2.0.10 + + .. seealso:: + + :ref:`engine_insertmanyvalues_returning_order` + + """ + + insertmanyvalues_page_size: int + """Number of rows to render into an individual INSERT..VALUES() statement + for :attr:`.ExecuteStyle.INSERTMANYVALUES` executions. + + The default dialect defaults this to 1000. + + .. versionadded:: 2.0 + + .. seealso:: + + :paramref:`_engine.Connection.execution_options.insertmanyvalues_page_size` - + execution option available on :class:`_engine.Connection`, statements + + """ # noqa: E501 + + insertmanyvalues_max_parameters: int + """Alternate to insertmanyvalues_page_size, will additionally limit + page size based on number of parameters total in the statement. + + + """ + + preexecute_autoincrement_sequences: bool + """True if 'implicit' primary key functions must be executed separately + in order to get their value, if RETURNING is not used. + + This is currently oriented towards PostgreSQL when the + ``implicit_returning=False`` parameter is used on a :class:`.Table` + object. + + """ + + insert_returning: bool + """if the dialect supports RETURNING with INSERT + + .. versionadded:: 2.0 + + """ + + update_returning: bool + """if the dialect supports RETURNING with UPDATE + + .. versionadded:: 2.0 + + """ + + update_returning_multifrom: bool + """if the dialect supports RETURNING with UPDATE..FROM + + .. versionadded:: 2.0 + + """ + + delete_returning: bool + """if the dialect supports RETURNING with DELETE + + .. versionadded:: 2.0 + + """ + + delete_returning_multifrom: bool + """if the dialect supports RETURNING with DELETE..FROM + + .. versionadded:: 2.0 + + """ + + favor_returning_over_lastrowid: bool + """for backends that support both a lastrowid and a RETURNING insert + strategy, favor RETURNING for simple single-int pk inserts. + + cursor.lastrowid tends to be more performant on most backends. + + """ + + supports_identity_columns: bool + """target database supports IDENTITY""" + + cte_follows_insert: bool + """target database, when given a CTE with an INSERT statement, needs + the CTE to be below the INSERT""" + + colspecs: MutableMapping[Type[TypeEngine[Any]], Type[TypeEngine[Any]]] + """A dictionary of TypeEngine classes from sqlalchemy.types mapped + to subclasses that are specific to the dialect class. This + dictionary is class-level only and is not accessed from the + dialect instance itself. + """ + + supports_sequences: bool + """Indicates if the dialect supports CREATE SEQUENCE or similar.""" + + sequences_optional: bool + """If True, indicates if the :paramref:`_schema.Sequence.optional` + parameter on the :class:`_schema.Sequence` construct + should signal to not generate a CREATE SEQUENCE. Applies only to + dialects that support sequences. Currently used only to allow PostgreSQL + SERIAL to be used on a column that specifies Sequence() for usage on + other backends. + """ + + default_sequence_base: int + """the default value that will be rendered as the "START WITH" portion of + a CREATE SEQUENCE DDL statement. + + """ + + supports_native_enum: bool + """Indicates if the dialect supports a native ENUM construct. + This will prevent :class:`_types.Enum` from generating a CHECK + constraint when that type is used in "native" mode. + """ + + supports_native_boolean: bool + """Indicates if the dialect supports a native boolean construct. + This will prevent :class:`_types.Boolean` from generating a CHECK + constraint when that type is used. + """ + + supports_native_decimal: bool + """indicates if Decimal objects are handled and returned for precision + numeric types, or if floats are returned""" + + supports_native_uuid: bool + """indicates if Python UUID() objects are handled natively by the + driver for SQL UUID datatypes. + + .. versionadded:: 2.0 + + """ + + returns_native_bytes: bool + """indicates if Python bytes() objects are returned natively by the + driver for SQL "binary" datatypes. + + .. versionadded:: 2.0.11 + + """ + + construct_arguments: Optional[ + List[Tuple[Type[Union[SchemaItem, ClauseElement]], Mapping[str, Any]]] + ] = None + """Optional set of argument specifiers for various SQLAlchemy + constructs, typically schema items. + + To implement, establish as a series of tuples, as in:: + + construct_arguments = [ + (schema.Index, { + "using": False, + "where": None, + "ops": None + }) + ] + + If the above construct is established on the PostgreSQL dialect, + the :class:`.Index` construct will now accept the keyword arguments + ``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``. + Any other argument specified to the constructor of :class:`.Index` + which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`. + + A dialect which does not include a ``construct_arguments`` member will + not participate in the argument validation system. For such a dialect, + any argument name is accepted by all participating constructs, within + the namespace of arguments prefixed with that dialect name. The rationale + here is so that third-party dialects that haven't yet implemented this + feature continue to function in the old way. + + .. seealso:: + + :class:`.DialectKWArgs` - implementing base class which consumes + :attr:`.DefaultDialect.construct_arguments` + + + """ + + reflection_options: Sequence[str] = () + """Sequence of string names indicating keyword arguments that can be + established on a :class:`.Table` object which will be passed as + "reflection options" when using :paramref:`.Table.autoload_with`. + + Current example is "oracle_resolve_synonyms" in the Oracle dialect. + + """ + + dbapi_exception_translation_map: Mapping[str, str] = util.EMPTY_DICT + """A dictionary of names that will contain as values the names of + pep-249 exceptions ("IntegrityError", "OperationalError", etc) + keyed to alternate class names, to support the case where a + DBAPI has exception classes that aren't named as they are + referred to (e.g. IntegrityError = MyException). In the vast + majority of cases this dictionary is empty. + """ + + supports_comments: bool + """Indicates the dialect supports comment DDL on tables and columns.""" + + inline_comments: bool + """Indicates the dialect supports comment DDL that's inline with the + definition of a Table or Column. If False, this implies that ALTER must + be used to set table and column comments.""" + + supports_constraint_comments: bool + """Indicates if the dialect supports comment DDL on constraints. + + .. versionadded: 2.0 + """ + + _has_events = False + + supports_statement_cache: bool = True + """indicates if this dialect supports caching. + + All dialects that are compatible with statement caching should set this + flag to True directly on each dialect class and subclass that supports + it. SQLAlchemy tests that this flag is locally present on each dialect + subclass before it will use statement caching. This is to provide + safety for legacy or new dialects that are not yet fully tested to be + compliant with SQL statement caching. + + .. versionadded:: 1.4.5 + + .. seealso:: + + :ref:`engine_thirdparty_caching` + + """ + + _supports_statement_cache: bool + """internal evaluation for supports_statement_cache""" + + bind_typing = BindTyping.NONE + """define a means of passing typing information to the database and/or + driver for bound parameters. + + See :class:`.BindTyping` for values. + + .. versionadded:: 2.0 + + """ + + is_async: bool + """Whether or not this dialect is intended for asyncio use.""" + + has_terminate: bool + """Whether or not this dialect has a separate "terminate" implementation + that does not block or require awaiting.""" + + engine_config_types: Mapping[str, Any] + """a mapping of string keys that can be in an engine config linked to + type conversion functions. + + """ + + label_length: Optional[int] + """optional user-defined max length for SQL labels""" + + include_set_input_sizes: Optional[Set[Any]] + """set of DBAPI type objects that should be included in + automatic cursor.setinputsizes() calls. + + This is only used if bind_typing is BindTyping.SET_INPUT_SIZES + + """ + + exclude_set_input_sizes: Optional[Set[Any]] + """set of DBAPI type objects that should be excluded in + automatic cursor.setinputsizes() calls. + + This is only used if bind_typing is BindTyping.SET_INPUT_SIZES + + """ + + supports_simple_order_by_label: bool + """target database supports ORDER BY , where + refers to a label in the columns clause of the SELECT""" + + div_is_floordiv: bool + """target database treats the / division operator as "floor division" """ + + tuple_in_values: bool + """target database supports tuple IN, i.e. (x, y) IN ((q, p), (r, z))""" + + _bind_typing_render_casts: bool + + _type_memos: MutableMapping[TypeEngine[Any], _TypeMemoDict] + + def _builtin_onconnect(self) -> Optional[_ListenerFnType]: + raise NotImplementedError() + + def create_connect_args(self, url: URL) -> ConnectArgsType: + """Build DB-API compatible connection arguments. + + Given a :class:`.URL` object, returns a tuple + consisting of a ``(*args, **kwargs)`` suitable to send directly + to the dbapi's connect function. The arguments are sent to the + :meth:`.Dialect.connect` method which then runs the DBAPI-level + ``connect()`` function. + + The method typically makes use of the + :meth:`.URL.translate_connect_args` + method in order to generate a dictionary of options. + + The default implementation is:: + + def create_connect_args(self, url): + opts = url.translate_connect_args() + opts.update(url.query) + return ([], opts) + + :param url: a :class:`.URL` object + + :return: a tuple of ``(*args, **kwargs)`` which will be passed to the + :meth:`.Dialect.connect` method. + + .. seealso:: + + :meth:`.URL.translate_connect_args` + + """ + + raise NotImplementedError() + + @classmethod + def import_dbapi(cls) -> ModuleType: + """Import the DBAPI module that is used by this dialect. + + The Python module object returned here will be assigned as an + instance variable to a constructed dialect under the name + ``.dbapi``. + + .. versionchanged:: 2.0 The :meth:`.Dialect.import_dbapi` class + method is renamed from the previous method ``.Dialect.dbapi()``, + which would be replaced at dialect instantiation time by the + DBAPI module itself, thus using the same name in two different ways. + If a ``.Dialect.dbapi()`` classmethod is present on a third-party + dialect, it will be used and a deprecation warning will be emitted. + + """ + raise NotImplementedError() + + def type_descriptor(self, typeobj: TypeEngine[_T]) -> TypeEngine[_T]: + """Transform a generic type to a dialect-specific type. + + Dialect classes will usually use the + :func:`_types.adapt_type` function in the types module to + accomplish this. + + The returned result is cached *per dialect class* so can + contain no dialect-instance state. + + """ + + raise NotImplementedError() + + def initialize(self, connection: Connection) -> None: + """Called during strategized creation of the dialect with a + connection. + + Allows dialects to configure options based on server version info or + other properties. + + The connection passed here is a SQLAlchemy Connection object, + with full capabilities. + + The initialize() method of the base dialect should be called via + super(). + + .. note:: as of SQLAlchemy 1.4, this method is called **before** + any :meth:`_engine.Dialect.on_connect` hooks are called. + + """ + + pass + + if TYPE_CHECKING: + + def _overrides_default(self, method_name: str) -> bool: ... + + def get_columns( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedColumn]: + """Return information about columns in ``table_name``. + + Given a :class:`_engine.Connection`, a string + ``table_name``, and an optional string ``schema``, return column + information as a list of dictionaries + corresponding to the :class:`.ReflectedColumn` dictionary. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_columns`. + + """ + + raise NotImplementedError() + + def get_multi_columns( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, List[ReflectedColumn]]]: + """Return information about columns in all tables in the + given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_multi_columns`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def get_pk_constraint( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> ReflectedPrimaryKeyConstraint: + """Return information about the primary key constraint on + table_name`. + + Given a :class:`_engine.Connection`, a string + ``table_name``, and an optional string ``schema``, return primary + key information as a dictionary corresponding to the + :class:`.ReflectedPrimaryKeyConstraint` dictionary. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_pk_constraint`. + + """ + raise NotImplementedError() + + def get_multi_pk_constraint( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, ReflectedPrimaryKeyConstraint]]: + """Return information about primary key constraints in + all tables in the given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_multi_pk_constraint`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + raise NotImplementedError() + + def get_foreign_keys( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedForeignKeyConstraint]: + """Return information about foreign_keys in ``table_name``. + + Given a :class:`_engine.Connection`, a string + ``table_name``, and an optional string ``schema``, return foreign + key information as a list of dicts corresponding to the + :class:`.ReflectedForeignKeyConstraint` dictionary. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_foreign_keys`. + """ + + raise NotImplementedError() + + def get_multi_foreign_keys( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, List[ReflectedForeignKeyConstraint]]]: + """Return information about foreign_keys in all tables + in the given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_multi_foreign_keys`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def get_table_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + """Return a list of table names for ``schema``. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_table_names`. + + """ + + raise NotImplementedError() + + def get_temp_table_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + """Return a list of temporary table names on the given connection, + if supported by the underlying backend. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_temp_table_names`. + + """ + + raise NotImplementedError() + + def get_view_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + """Return a list of all non-materialized view names available in the + database. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_view_names`. + + :param schema: schema name to query, if not the default schema. + + """ + + raise NotImplementedError() + + def get_materialized_view_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + """Return a list of all materialized view names available in the + database. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_materialized_view_names`. + + :param schema: schema name to query, if not the default schema. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def get_sequence_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + """Return a list of all sequence names available in the database. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_sequence_names`. + + :param schema: schema name to query, if not the default schema. + + .. versionadded:: 1.4 + """ + + raise NotImplementedError() + + def get_temp_view_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + """Return a list of temporary view names on the given connection, + if supported by the underlying backend. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_temp_view_names`. + + """ + + raise NotImplementedError() + + def get_schema_names(self, connection: Connection, **kw: Any) -> List[str]: + """Return a list of all schema names available in the database. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_schema_names`. + """ + raise NotImplementedError() + + def get_view_definition( + self, + connection: Connection, + view_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> str: + """Return plain or materialized view definition. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_view_definition`. + + Given a :class:`_engine.Connection`, a string + ``view_name``, and an optional string ``schema``, return the view + definition. + """ + + raise NotImplementedError() + + def get_indexes( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedIndex]: + """Return information about indexes in ``table_name``. + + Given a :class:`_engine.Connection`, a string + ``table_name`` and an optional string ``schema``, return index + information as a list of dictionaries corresponding to the + :class:`.ReflectedIndex` dictionary. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_indexes`. + """ + + raise NotImplementedError() + + def get_multi_indexes( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, List[ReflectedIndex]]]: + """Return information about indexes in in all tables + in the given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_multi_indexes`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def get_unique_constraints( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedUniqueConstraint]: + r"""Return information about unique constraints in ``table_name``. + + Given a string ``table_name`` and an optional string ``schema``, return + unique constraint information as a list of dicts corresponding + to the :class:`.ReflectedUniqueConstraint` dictionary. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_unique_constraints`. + """ + + raise NotImplementedError() + + def get_multi_unique_constraints( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, List[ReflectedUniqueConstraint]]]: + """Return information about unique constraints in all tables + in the given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_multi_unique_constraints`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def get_check_constraints( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedCheckConstraint]: + r"""Return information about check constraints in ``table_name``. + + Given a string ``table_name`` and an optional string ``schema``, return + check constraint information as a list of dicts corresponding + to the :class:`.ReflectedCheckConstraint` dictionary. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_check_constraints`. + + """ + + raise NotImplementedError() + + def get_multi_check_constraints( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, List[ReflectedCheckConstraint]]]: + """Return information about check constraints in all tables + in the given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_multi_check_constraints`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def get_table_options( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> Dict[str, Any]: + """Return a dictionary of options specified when ``table_name`` + was created. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_table_options`. + """ + raise NotImplementedError() + + def get_multi_table_options( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, Dict[str, Any]]]: + """Return a dictionary of options specified when the tables in the + given schema were created. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_multi_table_options`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + raise NotImplementedError() + + def get_table_comment( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> ReflectedTableComment: + r"""Return the "comment" for the table identified by ``table_name``. + + Given a string ``table_name`` and an optional string ``schema``, return + table comment information as a dictionary corresponding to the + :class:`.ReflectedTableComment` dictionary. + + This is an internal dialect method. Applications should use + :meth:`.Inspector.get_table_comment`. + + :raise: ``NotImplementedError`` for dialects that don't support + comments. + + .. versionadded:: 1.2 + + """ + + raise NotImplementedError() + + def get_multi_table_comment( + self, + connection: Connection, + *, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + **kw: Any, + ) -> Iterable[Tuple[TableKey, ReflectedTableComment]]: + """Return information about the table comment in all tables + in the given ``schema``. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.get_multi_table_comment`. + + .. note:: The :class:`_engine.DefaultDialect` provides a default + implementation that will call the single table method for + each object returned by :meth:`Dialect.get_table_names`, + :meth:`Dialect.get_view_names` or + :meth:`Dialect.get_materialized_view_names` depending on the + provided ``kind``. Dialects that want to support a faster + implementation should implement this method. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def normalize_name(self, name: str) -> str: + """convert the given name to lowercase if it is detected as + case insensitive. + + This method is only used if the dialect defines + requires_name_normalize=True. + + """ + raise NotImplementedError() + + def denormalize_name(self, name: str) -> str: + """convert the given name to a case insensitive identifier + for the backend if it is an all-lowercase name. + + This method is only used if the dialect defines + requires_name_normalize=True. + + """ + raise NotImplementedError() + + def has_table( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> bool: + """For internal dialect use, check the existence of a particular table + or view in the database. + + Given a :class:`_engine.Connection` object, a string table_name and + optional schema name, return True if the given table exists in the + database, False otherwise. + + This method serves as the underlying implementation of the + public facing :meth:`.Inspector.has_table` method, and is also used + internally to implement the "checkfirst" behavior for methods like + :meth:`_schema.Table.create` and :meth:`_schema.MetaData.create_all`. + + .. note:: This method is used internally by SQLAlchemy, and is + published so that third-party dialects may provide an + implementation. It is **not** the public API for checking for table + presence. Please use the :meth:`.Inspector.has_table` method. + + .. versionchanged:: 2.0:: :meth:`_engine.Dialect.has_table` now + formally supports checking for additional table-like objects: + + * any type of views (plain or materialized) + * temporary tables of any kind + + Previously, these two checks were not formally specified and + different dialects would vary in their behavior. The dialect + testing suite now includes tests for all of these object types, + and dialects to the degree that the backing database supports views + or temporary tables should seek to support locating these objects + for full compliance. + + """ + + raise NotImplementedError() + + def has_index( + self, + connection: Connection, + table_name: str, + index_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> bool: + """Check the existence of a particular index name in the database. + + Given a :class:`_engine.Connection` object, a string + ``table_name`` and string index name, return ``True`` if an index of + the given name on the given table exists, ``False`` otherwise. + + The :class:`.DefaultDialect` implements this in terms of the + :meth:`.Dialect.has_table` and :meth:`.Dialect.get_indexes` methods, + however dialects can implement a more performant version. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.has_index`. + + .. versionadded:: 1.4 + + """ + + raise NotImplementedError() + + def has_sequence( + self, + connection: Connection, + sequence_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> bool: + """Check the existence of a particular sequence in the database. + + Given a :class:`_engine.Connection` object and a string + `sequence_name`, return ``True`` if the given sequence exists in + the database, ``False`` otherwise. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.has_sequence`. + """ + + raise NotImplementedError() + + def has_schema( + self, connection: Connection, schema_name: str, **kw: Any + ) -> bool: + """Check the existence of a particular schema name in the database. + + Given a :class:`_engine.Connection` object, a string + ``schema_name``, return ``True`` if a schema of the + given exists, ``False`` otherwise. + + The :class:`.DefaultDialect` implements this by checking + the presence of ``schema_name`` among the schemas returned by + :meth:`.Dialect.get_schema_names`, + however dialects can implement a more performant version. + + This is an internal dialect method. Applications should use + :meth:`_engine.Inspector.has_schema`. + + .. versionadded:: 2.0 + + """ + + raise NotImplementedError() + + def _get_server_version_info(self, connection: Connection) -> Any: + """Retrieve the server version info from the given connection. + + This is used by the default implementation to populate the + "server_version_info" attribute and is called exactly + once upon first connect. + + """ + + raise NotImplementedError() + + def _get_default_schema_name(self, connection: Connection) -> str: + """Return the string name of the currently selected schema from + the given connection. + + This is used by the default implementation to populate the + "default_schema_name" attribute and is called exactly + once upon first connect. + + """ + + raise NotImplementedError() + + def do_begin(self, dbapi_connection: PoolProxiedConnection) -> None: + """Provide an implementation of ``connection.begin()``, given a + DB-API connection. + + The DBAPI has no dedicated "begin" method and it is expected + that transactions are implicit. This hook is provided for those + DBAPIs that might need additional help in this area. + + :param dbapi_connection: a DBAPI connection, typically + proxied within a :class:`.ConnectionFairy`. + + """ + + raise NotImplementedError() + + def do_rollback(self, dbapi_connection: PoolProxiedConnection) -> None: + """Provide an implementation of ``connection.rollback()``, given + a DB-API connection. + + :param dbapi_connection: a DBAPI connection, typically + proxied within a :class:`.ConnectionFairy`. + + """ + + raise NotImplementedError() + + def do_commit(self, dbapi_connection: PoolProxiedConnection) -> None: + """Provide an implementation of ``connection.commit()``, given a + DB-API connection. + + :param dbapi_connection: a DBAPI connection, typically + proxied within a :class:`.ConnectionFairy`. + + """ + + raise NotImplementedError() + + def do_terminate(self, dbapi_connection: DBAPIConnection) -> None: + """Provide an implementation of ``connection.close()`` that tries as + much as possible to not block, given a DBAPI + connection. + + In the vast majority of cases this just calls .close(), however + for some asyncio dialects may call upon different API features. + + This hook is called by the :class:`_pool.Pool` + when a connection is being recycled or has been invalidated. + + .. versionadded:: 1.4.41 + + """ + + raise NotImplementedError() + + def do_close(self, dbapi_connection: DBAPIConnection) -> None: + """Provide an implementation of ``connection.close()``, given a DBAPI + connection. + + This hook is called by the :class:`_pool.Pool` + when a connection has been + detached from the pool, or is being returned beyond the normal + capacity of the pool. + + """ + + raise NotImplementedError() + + def _do_ping_w_event(self, dbapi_connection: DBAPIConnection) -> bool: + raise NotImplementedError() + + def do_ping(self, dbapi_connection: DBAPIConnection) -> bool: + """ping the DBAPI connection and return True if the connection is + usable.""" + raise NotImplementedError() + + def do_set_input_sizes( + self, + cursor: DBAPICursor, + list_of_tuples: _GenericSetInputSizesType, + context: ExecutionContext, + ) -> Any: + """invoke the cursor.setinputsizes() method with appropriate arguments + + This hook is called if the :attr:`.Dialect.bind_typing` attribute is + set to the + :attr:`.BindTyping.SETINPUTSIZES` value. + Parameter data is passed in a list of tuples (paramname, dbtype, + sqltype), where ``paramname`` is the key of the parameter in the + statement, ``dbtype`` is the DBAPI datatype and ``sqltype`` is the + SQLAlchemy type. The order of tuples is in the correct parameter order. + + .. versionadded:: 1.4 + + .. versionchanged:: 2.0 - setinputsizes mode is now enabled by + setting :attr:`.Dialect.bind_typing` to + :attr:`.BindTyping.SETINPUTSIZES`. Dialects which accept + a ``use_setinputsizes`` parameter should set this value + appropriately. + + + """ + raise NotImplementedError() + + def create_xid(self) -> Any: + """Create a two-phase transaction ID. + + This id will be passed to do_begin_twophase(), + do_rollback_twophase(), do_commit_twophase(). Its format is + unspecified. + """ + + raise NotImplementedError() + + def do_savepoint(self, connection: Connection, name: str) -> None: + """Create a savepoint with the given name. + + :param connection: a :class:`_engine.Connection`. + :param name: savepoint name. + + """ + + raise NotImplementedError() + + def do_rollback_to_savepoint( + self, connection: Connection, name: str + ) -> None: + """Rollback a connection to the named savepoint. + + :param connection: a :class:`_engine.Connection`. + :param name: savepoint name. + + """ + + raise NotImplementedError() + + def do_release_savepoint(self, connection: Connection, name: str) -> None: + """Release the named savepoint on a connection. + + :param connection: a :class:`_engine.Connection`. + :param name: savepoint name. + """ + + raise NotImplementedError() + + def do_begin_twophase(self, connection: Connection, xid: Any) -> None: + """Begin a two phase transaction on the given connection. + + :param connection: a :class:`_engine.Connection`. + :param xid: xid + + """ + + raise NotImplementedError() + + def do_prepare_twophase(self, connection: Connection, xid: Any) -> None: + """Prepare a two phase transaction on the given connection. + + :param connection: a :class:`_engine.Connection`. + :param xid: xid + + """ + + raise NotImplementedError() + + def do_rollback_twophase( + self, + connection: Connection, + xid: Any, + is_prepared: bool = True, + recover: bool = False, + ) -> None: + """Rollback a two phase transaction on the given connection. + + :param connection: a :class:`_engine.Connection`. + :param xid: xid + :param is_prepared: whether or not + :meth:`.TwoPhaseTransaction.prepare` was called. + :param recover: if the recover flag was passed. + + """ + + raise NotImplementedError() + + def do_commit_twophase( + self, + connection: Connection, + xid: Any, + is_prepared: bool = True, + recover: bool = False, + ) -> None: + """Commit a two phase transaction on the given connection. + + + :param connection: a :class:`_engine.Connection`. + :param xid: xid + :param is_prepared: whether or not + :meth:`.TwoPhaseTransaction.prepare` was called. + :param recover: if the recover flag was passed. + + """ + + raise NotImplementedError() + + def do_recover_twophase(self, connection: Connection) -> List[Any]: + """Recover list of uncommitted prepared two phase transaction + identifiers on the given connection. + + :param connection: a :class:`_engine.Connection`. + + """ + + raise NotImplementedError() + + def _deliver_insertmanyvalues_batches( + self, + connection: Connection, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIMultiExecuteParams, + generic_setinputsizes: Optional[_GenericSetInputSizesType], + context: ExecutionContext, + ) -> Iterator[_InsertManyValuesBatch]: + """convert executemany parameters for an INSERT into an iterator + of statement/single execute values, used by the insertmanyvalues + feature. + + """ + raise NotImplementedError() + + def do_executemany( + self, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIMultiExecuteParams, + context: Optional[ExecutionContext] = None, + ) -> None: + """Provide an implementation of ``cursor.executemany(statement, + parameters)``.""" + + raise NotImplementedError() + + def do_execute( + self, + cursor: DBAPICursor, + statement: str, + parameters: Optional[_DBAPISingleExecuteParams], + context: Optional[ExecutionContext] = None, + ) -> None: + """Provide an implementation of ``cursor.execute(statement, + parameters)``.""" + + raise NotImplementedError() + + def do_execute_no_params( + self, + cursor: DBAPICursor, + statement: str, + context: Optional[ExecutionContext] = None, + ) -> None: + """Provide an implementation of ``cursor.execute(statement)``. + + The parameter collection should not be sent. + + """ + + raise NotImplementedError() + + def is_disconnect( + self, + e: Exception, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: + """Return True if the given DB-API error indicates an invalid + connection""" + + raise NotImplementedError() + + def connect(self, *cargs: Any, **cparams: Any) -> DBAPIConnection: + r"""Establish a connection using this dialect's DBAPI. + + The default implementation of this method is:: + + def connect(self, *cargs, **cparams): + return self.dbapi.connect(*cargs, **cparams) + + The ``*cargs, **cparams`` parameters are generated directly + from this dialect's :meth:`.Dialect.create_connect_args` method. + + This method may be used for dialects that need to perform programmatic + per-connection steps when a new connection is procured from the + DBAPI. + + + :param \*cargs: positional parameters returned from the + :meth:`.Dialect.create_connect_args` method + + :param \*\*cparams: keyword parameters returned from the + :meth:`.Dialect.create_connect_args` method. + + :return: a DBAPI connection, typically from the :pep:`249` module + level ``.connect()`` function. + + .. seealso:: + + :meth:`.Dialect.create_connect_args` + + :meth:`.Dialect.on_connect` + + """ + raise NotImplementedError() + + def on_connect_url(self, url: URL) -> Optional[Callable[[Any], Any]]: + """return a callable which sets up a newly created DBAPI connection. + + This method is a new hook that supersedes the + :meth:`_engine.Dialect.on_connect` method when implemented by a + dialect. When not implemented by a dialect, it invokes the + :meth:`_engine.Dialect.on_connect` method directly to maintain + compatibility with existing dialects. There is no deprecation + for :meth:`_engine.Dialect.on_connect` expected. + + The callable should accept a single argument "conn" which is the + DBAPI connection itself. The inner callable has no + return value. + + E.g.:: + + class MyDialect(default.DefaultDialect): + # ... + + def on_connect_url(self, url): + def do_on_connect(connection): + connection.execute("SET SPECIAL FLAGS etc") + + return do_on_connect + + This is used to set dialect-wide per-connection options such as + isolation modes, Unicode modes, etc. + + This method differs from :meth:`_engine.Dialect.on_connect` in that + it is passed the :class:`_engine.URL` object that's relevant to the + connect args. Normally the only way to get this is from the + :meth:`_engine.Dialect.on_connect` hook is to look on the + :class:`_engine.Engine` itself, however this URL object may have been + replaced by plugins. + + .. note:: + + The default implementation of + :meth:`_engine.Dialect.on_connect_url` is to invoke the + :meth:`_engine.Dialect.on_connect` method. Therefore if a dialect + implements this method, the :meth:`_engine.Dialect.on_connect` + method **will not be called** unless the overriding dialect calls + it directly from here. + + .. versionadded:: 1.4.3 added :meth:`_engine.Dialect.on_connect_url` + which normally calls into :meth:`_engine.Dialect.on_connect`. + + :param url: a :class:`_engine.URL` object representing the + :class:`_engine.URL` that was passed to the + :meth:`_engine.Dialect.create_connect_args` method. + + :return: a callable that accepts a single DBAPI connection as an + argument, or None. + + .. seealso:: + + :meth:`_engine.Dialect.on_connect` + + """ + return self.on_connect() + + def on_connect(self) -> Optional[Callable[[Any], Any]]: + """return a callable which sets up a newly created DBAPI connection. + + The callable should accept a single argument "conn" which is the + DBAPI connection itself. The inner callable has no + return value. + + E.g.:: + + class MyDialect(default.DefaultDialect): + # ... + + def on_connect(self): + def do_on_connect(connection): + connection.execute("SET SPECIAL FLAGS etc") + + return do_on_connect + + This is used to set dialect-wide per-connection options such as + isolation modes, Unicode modes, etc. + + The "do_on_connect" callable is invoked by using the + :meth:`_events.PoolEvents.connect` event + hook, then unwrapping the DBAPI connection and passing it into the + callable. + + .. versionchanged:: 1.4 the on_connect hook is no longer called twice + for the first connection of a dialect. The on_connect hook is still + called before the :meth:`_engine.Dialect.initialize` method however. + + .. versionchanged:: 1.4.3 the on_connect hook is invoked from a new + method on_connect_url that passes the URL that was used to create + the connect args. Dialects can implement on_connect_url instead + of on_connect if they need the URL object that was used for the + connection in order to get additional context. + + If None is returned, no event listener is generated. + + :return: a callable that accepts a single DBAPI connection as an + argument, or None. + + .. seealso:: + + :meth:`.Dialect.connect` - allows the DBAPI ``connect()`` sequence + itself to be controlled. + + :meth:`.Dialect.on_connect_url` - supersedes + :meth:`.Dialect.on_connect` to also receive the + :class:`_engine.URL` object in context. + + """ + return None + + def reset_isolation_level(self, dbapi_connection: DBAPIConnection) -> None: + """Given a DBAPI connection, revert its isolation to the default. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`_engine.Connection` and + :class:`_engine.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`_engine.Connection.get_isolation_level` + - view current level + + :attr:`_engine.Connection.default_isolation_level` + - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`_engine.Connection` isolation level + + :paramref:`_sa.create_engine.isolation_level` - + set per :class:`_engine.Engine` isolation level + + """ + + raise NotImplementedError() + + def set_isolation_level( + self, dbapi_connection: DBAPIConnection, level: IsolationLevel + ) -> None: + """Given a DBAPI connection, set its isolation level. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`_engine.Connection` and + :class:`_engine.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + If the dialect also implements the + :meth:`.Dialect.get_isolation_level_values` method, then the given + level is guaranteed to be one of the string names within that sequence, + and the method will not need to anticipate a lookup failure. + + .. seealso:: + + :meth:`_engine.Connection.get_isolation_level` + - view current level + + :attr:`_engine.Connection.default_isolation_level` + - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`_engine.Connection` isolation level + + :paramref:`_sa.create_engine.isolation_level` - + set per :class:`_engine.Engine` isolation level + + """ + + raise NotImplementedError() + + def get_isolation_level( + self, dbapi_connection: DBAPIConnection + ) -> IsolationLevel: + """Given a DBAPI connection, return its isolation level. + + When working with a :class:`_engine.Connection` object, + the corresponding + DBAPI connection may be procured using the + :attr:`_engine.Connection.connection` accessor. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`_engine.Connection` and + :class:`_engine.Engine` isolation level facilities; + these APIs should be preferred for most typical use cases. + + + .. seealso:: + + :meth:`_engine.Connection.get_isolation_level` + - view current level + + :attr:`_engine.Connection.default_isolation_level` + - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`_engine.Connection` isolation level + + :paramref:`_sa.create_engine.isolation_level` - + set per :class:`_engine.Engine` isolation level + + + """ + + raise NotImplementedError() + + def get_default_isolation_level( + self, dbapi_conn: DBAPIConnection + ) -> IsolationLevel: + """Given a DBAPI connection, return its isolation level, or + a default isolation level if one cannot be retrieved. + + This method may only raise NotImplementedError and + **must not raise any other exception**, as it is used implicitly upon + first connect. + + The method **must return a value** for a dialect that supports + isolation level settings, as this level is what will be reverted + towards when a per-connection isolation level change is made. + + The method defaults to using the :meth:`.Dialect.get_isolation_level` + method unless overridden by a dialect. + + .. versionadded:: 1.3.22 + + """ + raise NotImplementedError() + + def get_isolation_level_values( + self, dbapi_conn: DBAPIConnection + ) -> List[IsolationLevel]: + """return a sequence of string isolation level names that are accepted + by this dialect. + + The available names should use the following conventions: + + * use UPPERCASE names. isolation level methods will accept lowercase + names but these are normalized into UPPERCASE before being passed + along to the dialect. + * separate words should be separated by spaces, not underscores, e.g. + ``REPEATABLE READ``. isolation level names will have underscores + converted to spaces before being passed along to the dialect. + * The names for the four standard isolation names to the extent that + they are supported by the backend should be ``READ UNCOMMITTED`` + ``READ COMMITTED``, ``REPEATABLE READ``, ``SERIALIZABLE`` + * if the dialect supports an autocommit option it should be provided + using the isolation level name ``AUTOCOMMIT``. + * Other isolation modes may also be present, provided that they + are named in UPPERCASE and use spaces not underscores. + + This function is used so that the default dialect can check that + a given isolation level parameter is valid, else raises an + :class:`_exc.ArgumentError`. + + A DBAPI connection is passed to the method, in the unlikely event that + the dialect needs to interrogate the connection itself to determine + this list, however it is expected that most backends will return + a hardcoded list of values. If the dialect supports "AUTOCOMMIT", + that value should also be present in the sequence returned. + + The method raises ``NotImplementedError`` by default. If a dialect + does not implement this method, then the default dialect will not + perform any checking on a given isolation level value before passing + it onto the :meth:`.Dialect.set_isolation_level` method. This is + to allow backwards-compatibility with third party dialects that may + not yet be implementing this method. + + .. versionadded:: 2.0 + + """ + raise NotImplementedError() + + def _assert_and_set_isolation_level( + self, dbapi_conn: DBAPIConnection, level: IsolationLevel + ) -> None: + raise NotImplementedError() + + @classmethod + def get_dialect_cls(cls, url: URL) -> Type[Dialect]: + """Given a URL, return the :class:`.Dialect` that will be used. + + This is a hook that allows an external plugin to provide functionality + around an existing dialect, by allowing the plugin to be loaded + from the url based on an entrypoint, and then the plugin returns + the actual dialect to be used. + + By default this just returns the cls. + + """ + return cls + + @classmethod + def get_async_dialect_cls(cls, url: URL) -> Type[Dialect]: + """Given a URL, return the :class:`.Dialect` that will be used by + an async engine. + + By default this is an alias of :meth:`.Dialect.get_dialect_cls` and + just returns the cls. It may be used if a dialect provides + both a sync and async version under the same name, like the + ``psycopg`` driver. + + .. versionadded:: 2 + + .. seealso:: + + :meth:`.Dialect.get_dialect_cls` + + """ + return cls.get_dialect_cls(url) + + @classmethod + def load_provisioning(cls) -> None: + """set up the provision.py module for this dialect. + + For dialects that include a provision.py module that sets up + provisioning followers, this method should initiate that process. + + A typical implementation would be:: + + @classmethod + def load_provisioning(cls): + __import__("mydialect.provision") + + The default method assumes a module named ``provision.py`` inside + the owning package of the current dialect, based on the ``__module__`` + attribute:: + + @classmethod + def load_provisioning(cls): + package = ".".join(cls.__module__.split(".")[0:-1]) + try: + __import__(package + ".provision") + except ImportError: + pass + + .. versionadded:: 1.3.14 + + """ + + @classmethod + def engine_created(cls, engine: Engine) -> None: + """A convenience hook called before returning the final + :class:`_engine.Engine`. + + If the dialect returned a different class from the + :meth:`.get_dialect_cls` + method, then the hook is called on both classes, first on + the dialect class returned by the :meth:`.get_dialect_cls` method and + then on the class on which the method was called. + + The hook should be used by dialects and/or wrappers to apply special + events to the engine or its components. In particular, it allows + a dialect-wrapping class to apply dialect-level events. + + """ + + def get_driver_connection(self, connection: DBAPIConnection) -> Any: + """Returns the connection object as returned by the external driver + package. + + For normal dialects that use a DBAPI compliant driver this call + will just return the ``connection`` passed as argument. + For dialects that instead adapt a non DBAPI compliant driver, like + when adapting an asyncio driver, this call will return the + connection-like object as returned by the driver. + + .. versionadded:: 1.4.24 + + """ + raise NotImplementedError() + + def set_engine_execution_options( + self, engine: Engine, opts: CoreExecuteOptionsParameter + ) -> None: + """Establish execution options for a given engine. + + This is implemented by :class:`.DefaultDialect` to establish + event hooks for new :class:`.Connection` instances created + by the given :class:`.Engine` which will then invoke the + :meth:`.Dialect.set_connection_execution_options` method for that + connection. + + """ + raise NotImplementedError() + + def set_connection_execution_options( + self, connection: Connection, opts: CoreExecuteOptionsParameter + ) -> None: + """Establish execution options for a given connection. + + This is implemented by :class:`.DefaultDialect` in order to implement + the :paramref:`_engine.Connection.execution_options.isolation_level` + execution option. Dialects can intercept various execution options + which may need to modify state on a particular DBAPI connection. + + .. versionadded:: 1.4 + + """ + raise NotImplementedError() + + def get_dialect_pool_class(self, url: URL) -> Type[Pool]: + """return a Pool class to use for a given URL""" + raise NotImplementedError() + + +class CreateEnginePlugin: + """A set of hooks intended to augment the construction of an + :class:`_engine.Engine` object based on entrypoint names in a URL. + + The purpose of :class:`_engine.CreateEnginePlugin` is to allow third-party + systems to apply engine, pool and dialect level event listeners without + the need for the target application to be modified; instead, the plugin + names can be added to the database URL. Target applications for + :class:`_engine.CreateEnginePlugin` include: + + * connection and SQL performance tools, e.g. which use events to track + number of checkouts and/or time spent with statements + + * connectivity plugins such as proxies + + A rudimentary :class:`_engine.CreateEnginePlugin` that attaches a logger + to an :class:`_engine.Engine` object might look like:: + + + import logging + + from sqlalchemy.engine import CreateEnginePlugin + from sqlalchemy import event + + class LogCursorEventsPlugin(CreateEnginePlugin): + def __init__(self, url, kwargs): + # consume the parameter "log_cursor_logging_name" from the + # URL query + logging_name = url.query.get("log_cursor_logging_name", "log_cursor") + + self.log = logging.getLogger(logging_name) + + def update_url(self, url): + "update the URL to one that no longer includes our parameters" + return url.difference_update_query(["log_cursor_logging_name"]) + + def engine_created(self, engine): + "attach an event listener after the new Engine is constructed" + event.listen(engine, "before_cursor_execute", self._log_event) + + + def _log_event( + self, + conn, + cursor, + statement, + parameters, + context, + executemany): + + self.log.info("Plugin logged cursor event: %s", statement) + + + + Plugins are registered using entry points in a similar way as that + of dialects:: + + entry_points={ + 'sqlalchemy.plugins': [ + 'log_cursor_plugin = myapp.plugins:LogCursorEventsPlugin' + ] + + A plugin that uses the above names would be invoked from a database + URL as in:: + + from sqlalchemy import create_engine + + engine = create_engine( + "mysql+pymysql://scott:tiger@localhost/test?" + "plugin=log_cursor_plugin&log_cursor_logging_name=mylogger" + ) + + The ``plugin`` URL parameter supports multiple instances, so that a URL + may specify multiple plugins; they are loaded in the order stated + in the URL:: + + engine = create_engine( + "mysql+pymysql://scott:tiger@localhost/test?" + "plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three") + + The plugin names may also be passed directly to :func:`_sa.create_engine` + using the :paramref:`_sa.create_engine.plugins` argument:: + + engine = create_engine( + "mysql+pymysql://scott:tiger@localhost/test", + plugins=["myplugin"]) + + .. versionadded:: 1.2.3 plugin names can also be specified + to :func:`_sa.create_engine` as a list + + A plugin may consume plugin-specific arguments from the + :class:`_engine.URL` object as well as the ``kwargs`` dictionary, which is + the dictionary of arguments passed to the :func:`_sa.create_engine` + call. "Consuming" these arguments includes that they must be removed + when the plugin initializes, so that the arguments are not passed along + to the :class:`_engine.Dialect` constructor, where they will raise an + :class:`_exc.ArgumentError` because they are not known by the dialect. + + As of version 1.4 of SQLAlchemy, arguments should continue to be consumed + from the ``kwargs`` dictionary directly, by removing the values with a + method such as ``dict.pop``. Arguments from the :class:`_engine.URL` object + should be consumed by implementing the + :meth:`_engine.CreateEnginePlugin.update_url` method, returning a new copy + of the :class:`_engine.URL` with plugin-specific parameters removed:: + + class MyPlugin(CreateEnginePlugin): + def __init__(self, url, kwargs): + self.my_argument_one = url.query['my_argument_one'] + self.my_argument_two = url.query['my_argument_two'] + self.my_argument_three = kwargs.pop('my_argument_three', None) + + def update_url(self, url): + return url.difference_update_query( + ["my_argument_one", "my_argument_two"] + ) + + Arguments like those illustrated above would be consumed from a + :func:`_sa.create_engine` call such as:: + + from sqlalchemy import create_engine + + engine = create_engine( + "mysql+pymysql://scott:tiger@localhost/test?" + "plugin=myplugin&my_argument_one=foo&my_argument_two=bar", + my_argument_three='bat' + ) + + .. versionchanged:: 1.4 + + The :class:`_engine.URL` object is now immutable; a + :class:`_engine.CreateEnginePlugin` that needs to alter the + :class:`_engine.URL` should implement the newly added + :meth:`_engine.CreateEnginePlugin.update_url` method, which + is invoked after the plugin is constructed. + + For migration, construct the plugin in the following way, checking + for the existence of the :meth:`_engine.CreateEnginePlugin.update_url` + method to detect which version is running:: + + class MyPlugin(CreateEnginePlugin): + def __init__(self, url, kwargs): + if hasattr(CreateEnginePlugin, "update_url"): + # detect the 1.4 API + self.my_argument_one = url.query['my_argument_one'] + self.my_argument_two = url.query['my_argument_two'] + else: + # detect the 1.3 and earlier API - mutate the + # URL directly + self.my_argument_one = url.query.pop('my_argument_one') + self.my_argument_two = url.query.pop('my_argument_two') + + self.my_argument_three = kwargs.pop('my_argument_three', None) + + def update_url(self, url): + # this method is only called in the 1.4 version + return url.difference_update_query( + ["my_argument_one", "my_argument_two"] + ) + + .. seealso:: + + :ref:`change_5526` - overview of the :class:`_engine.URL` change which + also includes notes regarding :class:`_engine.CreateEnginePlugin`. + + + When the engine creation process completes and produces the + :class:`_engine.Engine` object, it is again passed to the plugin via the + :meth:`_engine.CreateEnginePlugin.engine_created` hook. In this hook, additional + changes can be made to the engine, most typically involving setup of + events (e.g. those defined in :ref:`core_event_toplevel`). + + """ # noqa: E501 + + def __init__(self, url: URL, kwargs: Dict[str, Any]): + """Construct a new :class:`.CreateEnginePlugin`. + + The plugin object is instantiated individually for each call + to :func:`_sa.create_engine`. A single :class:`_engine. + Engine` will be + passed to the :meth:`.CreateEnginePlugin.engine_created` method + corresponding to this URL. + + :param url: the :class:`_engine.URL` object. The plugin may inspect + the :class:`_engine.URL` for arguments. Arguments used by the + plugin should be removed, by returning an updated :class:`_engine.URL` + from the :meth:`_engine.CreateEnginePlugin.update_url` method. + + .. versionchanged:: 1.4 + + The :class:`_engine.URL` object is now immutable, so a + :class:`_engine.CreateEnginePlugin` that needs to alter the + :class:`_engine.URL` object should implement the + :meth:`_engine.CreateEnginePlugin.update_url` method. + + :param kwargs: The keyword arguments passed to + :func:`_sa.create_engine`. + + """ + self.url = url + + def update_url(self, url: URL) -> URL: + """Update the :class:`_engine.URL`. + + A new :class:`_engine.URL` should be returned. This method is + typically used to consume configuration arguments from the + :class:`_engine.URL` which must be removed, as they will not be + recognized by the dialect. The + :meth:`_engine.URL.difference_update_query` method is available + to remove these arguments. See the docstring at + :class:`_engine.CreateEnginePlugin` for an example. + + + .. versionadded:: 1.4 + + """ + raise NotImplementedError() + + def handle_dialect_kwargs( + self, dialect_cls: Type[Dialect], dialect_args: Dict[str, Any] + ) -> None: + """parse and modify dialect kwargs""" + + def handle_pool_kwargs( + self, pool_cls: Type[Pool], pool_args: Dict[str, Any] + ) -> None: + """parse and modify pool kwargs""" + + def engine_created(self, engine: Engine) -> None: + """Receive the :class:`_engine.Engine` + object when it is fully constructed. + + The plugin may make additional changes to the engine, such as + registering engine or connection pool events. + + """ + + +class ExecutionContext: + """A messenger object for a Dialect that corresponds to a single + execution. + + """ + + engine: Engine + """engine which the Connection is associated with""" + + connection: Connection + """Connection object which can be freely used by default value + generators to execute SQL. This Connection should reference the + same underlying connection/transactional resources of + root_connection.""" + + root_connection: Connection + """Connection object which is the source of this ExecutionContext.""" + + dialect: Dialect + """dialect which created this ExecutionContext.""" + + cursor: DBAPICursor + """DB-API cursor procured from the connection""" + + compiled: Optional[Compiled] + """if passed to constructor, sqlalchemy.engine.base.Compiled object + being executed""" + + statement: str + """string version of the statement to be executed. Is either + passed to the constructor, or must be created from the + sql.Compiled object by the time pre_exec() has completed.""" + + invoked_statement: Optional[Executable] + """The Executable statement object that was given in the first place. + + This should be structurally equivalent to compiled.statement, but not + necessarily the same object as in a caching scenario the compiled form + will have been extracted from the cache. + + """ + + parameters: _AnyMultiExecuteParams + """bind parameters passed to the execute() or exec_driver_sql() methods. + + These are always stored as a list of parameter entries. A single-element + list corresponds to a ``cursor.execute()`` call and a multiple-element + list corresponds to ``cursor.executemany()``, except in the case + of :attr:`.ExecuteStyle.INSERTMANYVALUES` which will use + ``cursor.execute()`` one or more times. + + """ + + no_parameters: bool + """True if the execution style does not use parameters""" + + isinsert: bool + """True if the statement is an INSERT.""" + + isupdate: bool + """True if the statement is an UPDATE.""" + + execute_style: ExecuteStyle + """the style of DBAPI cursor method that will be used to execute + a statement. + + .. versionadded:: 2.0 + + """ + + executemany: bool + """True if the context has a list of more than one parameter set. + + Historically this attribute links to whether ``cursor.execute()`` or + ``cursor.executemany()`` will be used. It also can now mean that + "insertmanyvalues" may be used which indicates one or more + ``cursor.execute()`` calls. + + """ + + prefetch_cols: util.generic_fn_descriptor[Optional[Sequence[Column[Any]]]] + """a list of Column objects for which a client-side default + was fired off. Applies to inserts and updates.""" + + postfetch_cols: util.generic_fn_descriptor[Optional[Sequence[Column[Any]]]] + """a list of Column objects for which a server-side default or + inline SQL expression value was fired off. Applies to inserts + and updates.""" + + execution_options: _ExecuteOptions + """Execution options associated with the current statement execution""" + + @classmethod + def _init_ddl( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + compiled_ddl: DDLCompiler, + ) -> ExecutionContext: + raise NotImplementedError() + + @classmethod + def _init_compiled( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + compiled: SQLCompiler, + parameters: _CoreMultiExecuteParams, + invoked_statement: Executable, + extracted_parameters: Optional[Sequence[BindParameter[Any]]], + cache_hit: CacheStats = CacheStats.CACHING_DISABLED, + ) -> ExecutionContext: + raise NotImplementedError() + + @classmethod + def _init_statement( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + statement: str, + parameters: _DBAPIMultiExecuteParams, + ) -> ExecutionContext: + raise NotImplementedError() + + @classmethod + def _init_default( + cls, + dialect: Dialect, + connection: Connection, + dbapi_connection: PoolProxiedConnection, + execution_options: _ExecuteOptions, + ) -> ExecutionContext: + raise NotImplementedError() + + def _exec_default( + self, + column: Optional[Column[Any]], + default: DefaultGenerator, + type_: Optional[TypeEngine[Any]], + ) -> Any: + raise NotImplementedError() + + def _prepare_set_input_sizes( + self, + ) -> Optional[List[Tuple[str, Any, TypeEngine[Any]]]]: + raise NotImplementedError() + + def _get_cache_stats(self) -> str: + raise NotImplementedError() + + def _setup_result_proxy(self) -> CursorResult[Any]: + raise NotImplementedError() + + def fire_sequence(self, seq: Sequence_SchemaItem, type_: Integer) -> int: + """given a :class:`.Sequence`, invoke it and return the next int + value""" + raise NotImplementedError() + + def create_cursor(self) -> DBAPICursor: + """Return a new cursor generated from this ExecutionContext's + connection. + + Some dialects may wish to change the behavior of + connection.cursor(), such as postgresql which may return a PG + "server side" cursor. + """ + + raise NotImplementedError() + + def pre_exec(self) -> None: + """Called before an execution of a compiled statement. + + If a compiled statement was passed to this ExecutionContext, + the `statement` and `parameters` datamembers must be + initialized after this statement is complete. + """ + + raise NotImplementedError() + + def get_out_parameter_values( + self, out_param_names: Sequence[str] + ) -> Sequence[Any]: + """Return a sequence of OUT parameter values from a cursor. + + For dialects that support OUT parameters, this method will be called + when there is a :class:`.SQLCompiler` object which has the + :attr:`.SQLCompiler.has_out_parameters` flag set. This flag in turn + will be set to True if the statement itself has :class:`.BindParameter` + objects that have the ``.isoutparam`` flag set which are consumed by + the :meth:`.SQLCompiler.visit_bindparam` method. If the dialect + compiler produces :class:`.BindParameter` objects with ``.isoutparam`` + set which are not handled by :meth:`.SQLCompiler.visit_bindparam`, it + should set this flag explicitly. + + The list of names that were rendered for each bound parameter + is passed to the method. The method should then return a sequence of + values corresponding to the list of parameter objects. Unlike in + previous SQLAlchemy versions, the values can be the **raw values** from + the DBAPI; the execution context will apply the appropriate type + handler based on what's present in self.compiled.binds and update the + values. The processed dictionary will then be made available via the + ``.out_parameters`` collection on the result object. Note that + SQLAlchemy 1.4 has multiple kinds of result object as part of the 2.0 + transition. + + .. versionadded:: 1.4 - added + :meth:`.ExecutionContext.get_out_parameter_values`, which is invoked + automatically by the :class:`.DefaultExecutionContext` when there + are :class:`.BindParameter` objects with the ``.isoutparam`` flag + set. This replaces the practice of setting out parameters within + the now-removed ``get_result_proxy()`` method. + + """ + raise NotImplementedError() + + def post_exec(self) -> None: + """Called after the execution of a compiled statement. + + If a compiled statement was passed to this ExecutionContext, + the `last_insert_ids`, `last_inserted_params`, etc. + datamembers should be available after this method completes. + """ + + raise NotImplementedError() + + def handle_dbapi_exception(self, e: BaseException) -> None: + """Receive a DBAPI exception which occurred upon execute, result + fetch, etc.""" + + raise NotImplementedError() + + def lastrow_has_defaults(self) -> bool: + """Return True if the last INSERT or UPDATE row contained + inlined or database-side defaults. + """ + + raise NotImplementedError() + + def get_rowcount(self) -> Optional[int]: + """Return the DBAPI ``cursor.rowcount`` value, or in some + cases an interpreted value. + + See :attr:`_engine.CursorResult.rowcount` for details on this. + + """ + + raise NotImplementedError() + + def fetchall_for_returning(self, cursor: DBAPICursor) -> Sequence[Any]: + """For a RETURNING result, deliver cursor.fetchall() from the + DBAPI cursor. + + This is a dialect-specific hook for dialects that have special + considerations when calling upon the rows delivered for a + "RETURNING" statement. Default implementation is + ``cursor.fetchall()``. + + This hook is currently used only by the :term:`insertmanyvalues` + feature. Dialects that don't set ``use_insertmanyvalues=True`` + don't need to consider this hook. + + .. versionadded:: 2.0.10 + + """ + raise NotImplementedError() + + +class ConnectionEventsTarget(EventTarget): + """An object which can accept events from :class:`.ConnectionEvents`. + + Includes :class:`_engine.Connection` and :class:`_engine.Engine`. + + .. versionadded:: 2.0 + + """ + + dispatch: dispatcher[ConnectionEventsTarget] + + +Connectable = ConnectionEventsTarget + + +class ExceptionContext: + """Encapsulate information about an error condition in progress. + + This object exists solely to be passed to the + :meth:`_events.DialectEvents.handle_error` event, + supporting an interface that + can be extended without backwards-incompatibility. + + + """ + + __slots__ = () + + dialect: Dialect + """The :class:`_engine.Dialect` in use. + + This member is present for all invocations of the event hook. + + .. versionadded:: 2.0 + + """ + + connection: Optional[Connection] + """The :class:`_engine.Connection` in use during the exception. + + This member is present, except in the case of a failure when + first connecting. + + .. seealso:: + + :attr:`.ExceptionContext.engine` + + + """ + + engine: Optional[Engine] + """The :class:`_engine.Engine` in use during the exception. + + This member is present in all cases except for when handling an error + within the connection pool "pre-ping" process. + + """ + + cursor: Optional[DBAPICursor] + """The DBAPI cursor object. + + May be None. + + """ + + statement: Optional[str] + """String SQL statement that was emitted directly to the DBAPI. + + May be None. + + """ + + parameters: Optional[_DBAPIAnyExecuteParams] + """Parameter collection that was emitted directly to the DBAPI. + + May be None. + + """ + + original_exception: BaseException + """The exception object which was caught. + + This member is always present. + + """ + + sqlalchemy_exception: Optional[StatementError] + """The :class:`sqlalchemy.exc.StatementError` which wraps the original, + and will be raised if exception handling is not circumvented by the event. + + May be None, as not all exception types are wrapped by SQLAlchemy. + For DBAPI-level exceptions that subclass the dbapi's Error class, this + field will always be present. + + """ + + chained_exception: Optional[BaseException] + """The exception that was returned by the previous handler in the + exception chain, if any. + + If present, this exception will be the one ultimately raised by + SQLAlchemy unless a subsequent handler replaces it. + + May be None. + + """ + + execution_context: Optional[ExecutionContext] + """The :class:`.ExecutionContext` corresponding to the execution + operation in progress. + + This is present for statement execution operations, but not for + operations such as transaction begin/end. It also is not present when + the exception was raised before the :class:`.ExecutionContext` + could be constructed. + + Note that the :attr:`.ExceptionContext.statement` and + :attr:`.ExceptionContext.parameters` members may represent a + different value than that of the :class:`.ExecutionContext`, + potentially in the case where a + :meth:`_events.ConnectionEvents.before_cursor_execute` event or similar + modified the statement/parameters to be sent. + + May be None. + + """ + + is_disconnect: bool + """Represent whether the exception as occurred represents a "disconnect" + condition. + + This flag will always be True or False within the scope of the + :meth:`_events.DialectEvents.handle_error` handler. + + SQLAlchemy will defer to this flag in order to determine whether or not + the connection should be invalidated subsequently. That is, by + assigning to this flag, a "disconnect" event which then results in + a connection and pool invalidation can be invoked or prevented by + changing this flag. + + + .. note:: The pool "pre_ping" handler enabled using the + :paramref:`_sa.create_engine.pool_pre_ping` parameter does **not** + consult this event before deciding if the "ping" returned false, + as opposed to receiving an unhandled error. For this use case, the + :ref:`legacy recipe based on engine_connect() may be used + `. A future API allow more + comprehensive customization of the "disconnect" detection mechanism + across all functions. + + """ + + invalidate_pool_on_disconnect: bool + """Represent whether all connections in the pool should be invalidated + when a "disconnect" condition is in effect. + + Setting this flag to False within the scope of the + :meth:`_events.DialectEvents.handle_error` + event will have the effect such + that the full collection of connections in the pool will not be + invalidated during a disconnect; only the current connection that is the + subject of the error will actually be invalidated. + + The purpose of this flag is for custom disconnect-handling schemes where + the invalidation of other connections in the pool is to be performed + based on other conditions, or even on a per-connection basis. + + """ + + is_pre_ping: bool + """Indicates if this error is occurring within the "pre-ping" step + performed when :paramref:`_sa.create_engine.pool_pre_ping` is set to + ``True``. In this mode, the :attr:`.ExceptionContext.engine` attribute + will be ``None``. The dialect in use is accessible via the + :attr:`.ExceptionContext.dialect` attribute. + + .. versionadded:: 2.0.5 + + """ + + +class AdaptedConnection: + """Interface of an adapted connection object to support the DBAPI protocol. + + Used by asyncio dialects to provide a sync-style pep-249 facade on top + of the asyncio connection/cursor API provided by the driver. + + .. versionadded:: 1.4.24 + + """ + + __slots__ = ("_connection",) + + _connection: Any + + @property + def driver_connection(self) -> Any: + """The connection object as returned by the driver after a connect.""" + return self._connection + + def run_async(self, fn: Callable[[Any], Awaitable[_T]]) -> _T: + """Run the awaitable returned by the given function, which is passed + the raw asyncio driver connection. + + This is used to invoke awaitable-only methods on the driver connection + within the context of a "synchronous" method, like a connection + pool event handler. + + E.g.:: + + engine = create_async_engine(...) + + @event.listens_for(engine.sync_engine, "connect") + def register_custom_types(dbapi_connection, ...): + dbapi_connection.run_async( + lambda connection: connection.set_type_codec( + 'MyCustomType', encoder, decoder, ... + ) + ) + + .. versionadded:: 1.4.30 + + .. seealso:: + + :ref:`asyncio_events_run_async` + + """ + return await_only(fn(self._connection)) + + def __repr__(self) -> str: + return "" % self._connection diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py new file mode 100644 index 00000000..c9fa5eb3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py @@ -0,0 +1,131 @@ +# engine/mock.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from operator import attrgetter +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Optional +from typing import Type +from typing import Union + +from . import url as _url +from .. import util + + +if typing.TYPE_CHECKING: + from .base import Engine + from .interfaces import _CoreAnyExecuteParams + from .interfaces import CoreExecuteOptionsParameter + from .interfaces import Dialect + from .url import URL + from ..sql.base import Executable + from ..sql.ddl import SchemaDropper + from ..sql.ddl import SchemaGenerator + from ..sql.schema import HasSchemaAttr + from ..sql.schema import SchemaItem + + +class MockConnection: + def __init__(self, dialect: Dialect, execute: Callable[..., Any]): + self._dialect = dialect + self._execute_impl = execute + + engine: Engine = cast(Any, property(lambda s: s)) + dialect: Dialect = cast(Any, property(attrgetter("_dialect"))) + name: str = cast(Any, property(lambda s: s._dialect.name)) + + def connect(self, **kwargs: Any) -> MockConnection: + return self + + def schema_for_object(self, obj: HasSchemaAttr) -> Optional[str]: + return obj.schema + + def execution_options(self, **kw: Any) -> MockConnection: + return self + + def _run_ddl_visitor( + self, + visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]], + element: SchemaItem, + **kwargs: Any, + ) -> None: + kwargs["checkfirst"] = False + visitorcallable(self.dialect, self, **kwargs).traverse_single(element) + + def execute( + self, + obj: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Any: + return self._execute_impl(obj, parameters) + + +def create_mock_engine( + url: Union[str, URL], executor: Any, **kw: Any +) -> MockConnection: + """Create a "mock" engine used for echoing DDL. + + This is a utility function used for debugging or storing the output of DDL + sequences as generated by :meth:`_schema.MetaData.create_all` + and related methods. + + The function accepts a URL which is used only to determine the kind of + dialect to be used, as well as an "executor" callable function which + will receive a SQL expression object and parameters, which can then be + echoed or otherwise printed. The executor's return value is not handled, + nor does the engine allow regular string statements to be invoked, and + is therefore only useful for DDL that is sent to the database without + receiving any results. + + E.g.:: + + from sqlalchemy import create_mock_engine + + def dump(sql, *multiparams, **params): + print(sql.compile(dialect=engine.dialect)) + + engine = create_mock_engine('postgresql+psycopg2://', dump) + metadata.create_all(engine, checkfirst=False) + + :param url: A string URL which typically needs to contain only the + database backend name. + + :param executor: a callable which receives the arguments ``sql``, + ``*multiparams`` and ``**params``. The ``sql`` parameter is typically + an instance of :class:`.ExecutableDDLElement`, which can then be compiled + into a string using :meth:`.ExecutableDDLElement.compile`. + + .. versionadded:: 1.4 - the :func:`.create_mock_engine` function replaces + the previous "mock" engine strategy used with + :func:`_sa.create_engine`. + + .. seealso:: + + :ref:`faq_ddl_as_string` + + """ + + # create url.URL object + u = _url.make_url(url) + + dialect_cls = u.get_dialect() + + dialect_args = {} + # consume dialect arguments from kwargs + for k in util.get_cls_kwargs(dialect_cls): + if k in kw: + dialect_args[k] = kw.pop(k) + + # create dialect + dialect = dialect_cls(**dialect_args) + + return MockConnection(dialect, executor) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py new file mode 100644 index 00000000..610e03d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py @@ -0,0 +1,61 @@ +# engine/processors.py +# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors +# +# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""defines generic type conversion functions, as used in bind and result +processors. + +They all share one common characteristic: None is passed through unchanged. + +""" +from __future__ import annotations + +import typing + +from ._py_processors import str_to_datetime_processor_factory # noqa +from ..util._has_cy import HAS_CYEXTENSION + +if typing.TYPE_CHECKING or not HAS_CYEXTENSION: + from ._py_processors import int_to_boolean as int_to_boolean + from ._py_processors import str_to_date as str_to_date + from ._py_processors import str_to_datetime as str_to_datetime + from ._py_processors import str_to_time as str_to_time + from ._py_processors import ( + to_decimal_processor_factory as to_decimal_processor_factory, + ) + from ._py_processors import to_float as to_float + from ._py_processors import to_str as to_str +else: + from sqlalchemy.cyextension.processors import ( + DecimalResultProcessor, + ) + from sqlalchemy.cyextension.processors import ( # noqa: F401 + int_to_boolean as int_to_boolean, + ) + from sqlalchemy.cyextension.processors import ( # noqa: F401,E501 + str_to_date as str_to_date, + ) + from sqlalchemy.cyextension.processors import ( # noqa: F401 + str_to_datetime as str_to_datetime, + ) + from sqlalchemy.cyextension.processors import ( # noqa: F401,E501 + str_to_time as str_to_time, + ) + from sqlalchemy.cyextension.processors import ( # noqa: F401,E501 + to_float as to_float, + ) + from sqlalchemy.cyextension.processors import ( # noqa: F401,E501 + to_str as to_str, + ) + + def to_decimal_processor_factory(target_class, scale): + # Note that the scale argument is not taken into account for integer + # values in the C implementation while it is in the Python one. + # For example, the Python implementation might return + # Decimal('5.00000') whereas the C implementation will + # return Decimal('5'). These are equivalent of course. + return DecimalResultProcessor(target_class, "%%.%df" % scale).process diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py new file mode 100644 index 00000000..09b09880 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py @@ -0,0 +1,2098 @@ +# engine/reflection.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Provides an abstraction for obtaining database schema information. + +Usage Notes: + +Here are some general conventions when accessing the low level inspector +methods such as get_table_names, get_columns, etc. + +1. Inspector methods return lists of dicts in most cases for the following + reasons: + + * They're both standard types that can be serialized. + * Using a dict instead of a tuple allows easy expansion of attributes. + * Using a list for the outer structure maintains order and is easy to work + with (e.g. list comprehension [d['name'] for d in cols]). + +2. Records that contain a name, such as the column name in a column record + use the key 'name'. So for most return values, each record will have a + 'name' attribute.. +""" +from __future__ import annotations + +import contextlib +from dataclasses import dataclass +from enum import auto +from enum import Flag +from enum import unique +from typing import Any +from typing import Callable +from typing import Collection +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .base import Connection +from .base import Engine +from .. import exc +from .. import inspection +from .. import sql +from .. import util +from ..sql import operators +from ..sql import schema as sa_schema +from ..sql.cache_key import _ad_hoc_cache_key_from_args +from ..sql.elements import quoted_name +from ..sql.elements import TextClause +from ..sql.type_api import TypeEngine +from ..sql.visitors import InternalTraversal +from ..util import topological +from ..util.typing import final + +if TYPE_CHECKING: + from .interfaces import Dialect + from .interfaces import ReflectedCheckConstraint + from .interfaces import ReflectedColumn + from .interfaces import ReflectedForeignKeyConstraint + from .interfaces import ReflectedIndex + from .interfaces import ReflectedPrimaryKeyConstraint + from .interfaces import ReflectedTableComment + from .interfaces import ReflectedUniqueConstraint + from .interfaces import TableKey + +_R = TypeVar("_R") + + +@util.decorator +def cache( + fn: Callable[..., _R], + self: Dialect, + con: Connection, + *args: Any, + **kw: Any, +) -> _R: + info_cache = kw.get("info_cache", None) + if info_cache is None: + return fn(self, con, *args, **kw) + exclude = {"info_cache", "unreflectable"} + key = ( + fn.__name__, + tuple( + (str(a), a.quote) if isinstance(a, quoted_name) else a + for a in args + if isinstance(a, str) + ), + tuple( + (k, (str(v), v.quote) if isinstance(v, quoted_name) else v) + for k, v in kw.items() + if k not in exclude + ), + ) + ret: _R = info_cache.get(key) + if ret is None: + ret = fn(self, con, *args, **kw) + info_cache[key] = ret + return ret + + +def flexi_cache( + *traverse_args: Tuple[str, InternalTraversal] +) -> Callable[[Callable[..., _R]], Callable[..., _R]]: + @util.decorator + def go( + fn: Callable[..., _R], + self: Dialect, + con: Connection, + *args: Any, + **kw: Any, + ) -> _R: + info_cache = kw.get("info_cache", None) + if info_cache is None: + return fn(self, con, *args, **kw) + key = _ad_hoc_cache_key_from_args((fn.__name__,), traverse_args, args) + ret: _R = info_cache.get(key) + if ret is None: + ret = fn(self, con, *args, **kw) + info_cache[key] = ret + return ret + + return go + + +@unique +class ObjectKind(Flag): + """Enumerator that indicates which kind of object to return when calling + the ``get_multi`` methods. + + This is a Flag enum, so custom combinations can be passed. For example, + to reflect tables and plain views ``ObjectKind.TABLE | ObjectKind.VIEW`` + may be used. + + .. note:: + Not all dialect may support all kind of object. If a dialect does + not support a particular object an empty dict is returned. + In case a dialect supports an object, but the requested method + is not applicable for the specified kind the default value + will be returned for each reflected object. For example reflecting + check constraints of view return a dict with all the views with + empty lists as values. + """ + + TABLE = auto() + "Reflect table objects" + VIEW = auto() + "Reflect plain view objects" + MATERIALIZED_VIEW = auto() + "Reflect materialized view object" + + ANY_VIEW = VIEW | MATERIALIZED_VIEW + "Reflect any kind of view objects" + ANY = TABLE | VIEW | MATERIALIZED_VIEW + "Reflect all type of objects" + + +@unique +class ObjectScope(Flag): + """Enumerator that indicates which scope to use when calling + the ``get_multi`` methods. + """ + + DEFAULT = auto() + "Include default scope" + TEMPORARY = auto() + "Include only temp scope" + ANY = DEFAULT | TEMPORARY + "Include both default and temp scope" + + +@inspection._self_inspects +class Inspector(inspection.Inspectable["Inspector"]): + """Performs database schema inspection. + + The Inspector acts as a proxy to the reflection methods of the + :class:`~sqlalchemy.engine.interfaces.Dialect`, providing a + consistent interface as well as caching support for previously + fetched metadata. + + A :class:`_reflection.Inspector` object is usually created via the + :func:`_sa.inspect` function, which may be passed an + :class:`_engine.Engine` + or a :class:`_engine.Connection`:: + + from sqlalchemy import inspect, create_engine + engine = create_engine('...') + insp = inspect(engine) + + Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` associated + with the engine may opt to return an :class:`_reflection.Inspector` + subclass that + provides additional methods specific to the dialect's target database. + + """ + + bind: Union[Engine, Connection] + engine: Engine + _op_context_requires_connect: bool + dialect: Dialect + info_cache: Dict[Any, Any] + + @util.deprecated( + "1.4", + "The __init__() method on :class:`_reflection.Inspector` " + "is deprecated and " + "will be removed in a future release. Please use the " + ":func:`.sqlalchemy.inspect` " + "function on an :class:`_engine.Engine` or " + ":class:`_engine.Connection` " + "in order to " + "acquire an :class:`_reflection.Inspector`.", + ) + def __init__(self, bind: Union[Engine, Connection]): + """Initialize a new :class:`_reflection.Inspector`. + + :param bind: a :class:`~sqlalchemy.engine.Connection`, + which is typically an instance of + :class:`~sqlalchemy.engine.Engine` or + :class:`~sqlalchemy.engine.Connection`. + + For a dialect-specific instance of :class:`_reflection.Inspector`, see + :meth:`_reflection.Inspector.from_engine` + + """ + self._init_legacy(bind) + + @classmethod + def _construct( + cls, init: Callable[..., Any], bind: Union[Engine, Connection] + ) -> Inspector: + if hasattr(bind.dialect, "inspector"): + cls = bind.dialect.inspector + + self = cls.__new__(cls) + init(self, bind) + return self + + def _init_legacy(self, bind: Union[Engine, Connection]) -> None: + if hasattr(bind, "exec_driver_sql"): + self._init_connection(bind) # type: ignore[arg-type] + else: + self._init_engine(bind) + + def _init_engine(self, engine: Engine) -> None: + self.bind = self.engine = engine + engine.connect().close() + self._op_context_requires_connect = True + self.dialect = self.engine.dialect + self.info_cache = {} + + def _init_connection(self, connection: Connection) -> None: + self.bind = connection + self.engine = connection.engine + self._op_context_requires_connect = False + self.dialect = self.engine.dialect + self.info_cache = {} + + def clear_cache(self) -> None: + """reset the cache for this :class:`.Inspector`. + + Inspection methods that have data cached will emit SQL queries + when next called to get new data. + + .. versionadded:: 2.0 + + """ + self.info_cache.clear() + + @classmethod + @util.deprecated( + "1.4", + "The from_engine() method on :class:`_reflection.Inspector` " + "is deprecated and " + "will be removed in a future release. Please use the " + ":func:`.sqlalchemy.inspect` " + "function on an :class:`_engine.Engine` or " + ":class:`_engine.Connection` " + "in order to " + "acquire an :class:`_reflection.Inspector`.", + ) + def from_engine(cls, bind: Engine) -> Inspector: + """Construct a new dialect-specific Inspector object from the given + engine or connection. + + :param bind: a :class:`~sqlalchemy.engine.Connection` + or :class:`~sqlalchemy.engine.Engine`. + + This method differs from direct a direct constructor call of + :class:`_reflection.Inspector` in that the + :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to + provide a dialect-specific :class:`_reflection.Inspector` instance, + which may + provide additional methods. + + See the example at :class:`_reflection.Inspector`. + + """ + return cls._construct(cls._init_legacy, bind) + + @inspection._inspects(Engine) + def _engine_insp(bind: Engine) -> Inspector: # type: ignore[misc] + return Inspector._construct(Inspector._init_engine, bind) + + @inspection._inspects(Connection) + def _connection_insp(bind: Connection) -> Inspector: # type: ignore[misc] + return Inspector._construct(Inspector._init_connection, bind) + + @contextlib.contextmanager + def _operation_context(self) -> Generator[Connection, None, None]: + """Return a context that optimizes for multiple operations on a single + transaction. + + This essentially allows connect()/close() to be called if we detected + that we're against an :class:`_engine.Engine` and not a + :class:`_engine.Connection`. + + """ + conn: Connection + if self._op_context_requires_connect: + conn = self.bind.connect() # type: ignore[union-attr] + else: + conn = self.bind # type: ignore[assignment] + try: + yield conn + finally: + if self._op_context_requires_connect: + conn.close() + + @contextlib.contextmanager + def _inspection_context(self) -> Generator[Inspector, None, None]: + """Return an :class:`_reflection.Inspector` + from this one that will run all + operations on a single connection. + + """ + + with self._operation_context() as conn: + sub_insp = self._construct(self.__class__._init_connection, conn) + sub_insp.info_cache = self.info_cache + yield sub_insp + + @property + def default_schema_name(self) -> Optional[str]: + """Return the default schema name presented by the dialect + for the current engine's database user. + + E.g. this is typically ``public`` for PostgreSQL and ``dbo`` + for SQL Server. + + """ + return self.dialect.default_schema_name + + def get_schema_names(self, **kw: Any) -> List[str]: + r"""Return all schema names. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + """ + + with self._operation_context() as conn: + return self.dialect.get_schema_names( + conn, info_cache=self.info_cache, **kw + ) + + def get_table_names( + self, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + r"""Return all table names within a particular schema. + + The names are expected to be real tables only, not views. + Views are instead returned using the + :meth:`_reflection.Inspector.get_view_names` and/or + :meth:`_reflection.Inspector.get_materialized_view_names` + methods. + + :param schema: Schema name. If ``schema`` is left at ``None``, the + database's default schema is + used, else the named schema is searched. If the database does not + support named schemas, behavior is undefined if ``schema`` is not + passed as ``None``. For special quoting, use :class:`.quoted_name`. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. seealso:: + + :meth:`_reflection.Inspector.get_sorted_table_and_fkc_names` + + :attr:`_schema.MetaData.sorted_tables` + + """ + + with self._operation_context() as conn: + return self.dialect.get_table_names( + conn, schema, info_cache=self.info_cache, **kw + ) + + def has_table( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> bool: + r"""Return True if the backend has a table, view, or temporary + table of the given name. + + :param table_name: name of the table to check + :param schema: schema name to query, if not the default schema. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. versionadded:: 1.4 - the :meth:`.Inspector.has_table` method + replaces the :meth:`_engine.Engine.has_table` method. + + .. versionchanged:: 2.0:: :meth:`.Inspector.has_table` now formally + supports checking for additional table-like objects: + + * any type of views (plain or materialized) + * temporary tables of any kind + + Previously, these two checks were not formally specified and + different dialects would vary in their behavior. The dialect + testing suite now includes tests for all of these object types + and should be supported by all SQLAlchemy-included dialects. + Support among third party dialects may be lagging, however. + + """ + with self._operation_context() as conn: + return self.dialect.has_table( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def has_sequence( + self, sequence_name: str, schema: Optional[str] = None, **kw: Any + ) -> bool: + r"""Return True if the backend has a sequence with the given name. + + :param sequence_name: name of the sequence to check + :param schema: schema name to query, if not the default schema. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. versionadded:: 1.4 + + """ + with self._operation_context() as conn: + return self.dialect.has_sequence( + conn, sequence_name, schema, info_cache=self.info_cache, **kw + ) + + def has_index( + self, + table_name: str, + index_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> bool: + r"""Check the existence of a particular index name in the database. + + :param table_name: the name of the table the index belongs to + :param index_name: the name of the index to check + :param schema: schema name to query, if not the default schema. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. versionadded:: 2.0 + + """ + with self._operation_context() as conn: + return self.dialect.has_index( + conn, + table_name, + index_name, + schema, + info_cache=self.info_cache, + **kw, + ) + + def has_schema(self, schema_name: str, **kw: Any) -> bool: + r"""Return True if the backend has a schema with the given name. + + :param schema_name: name of the schema to check + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. versionadded:: 2.0 + + """ + with self._operation_context() as conn: + return self.dialect.has_schema( + conn, schema_name, info_cache=self.info_cache, **kw + ) + + def get_sorted_table_and_fkc_names( + self, + schema: Optional[str] = None, + **kw: Any, + ) -> List[Tuple[Optional[str], List[Tuple[str, Optional[str]]]]]: + r"""Return dependency-sorted table and foreign key constraint names in + referred to within a particular schema. + + This will yield 2-tuples of + ``(tablename, [(tname, fkname), (tname, fkname), ...])`` + consisting of table names in CREATE order grouped with the foreign key + constraint names that are not detected as belonging to a cycle. + The final element + will be ``(None, [(tname, fkname), (tname, fkname), ..])`` + which will consist of remaining + foreign key constraint names that would require a separate CREATE + step after-the-fact, based on dependencies between tables. + + :param schema: schema name to query, if not the default schema. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. seealso:: + + :meth:`_reflection.Inspector.get_table_names` + + :func:`.sort_tables_and_constraints` - similar method which works + with an already-given :class:`_schema.MetaData`. + + """ + + return [ + ( + table_key[1] if table_key else None, + [(tname, fks) for (_, tname), fks in fk_collection], + ) + for ( + table_key, + fk_collection, + ) in self.sort_tables_on_foreign_key_dependency( + consider_schemas=(schema,) + ) + ] + + def sort_tables_on_foreign_key_dependency( + self, + consider_schemas: Collection[Optional[str]] = (None,), + **kw: Any, + ) -> List[ + Tuple[ + Optional[Tuple[Optional[str], str]], + List[Tuple[Tuple[Optional[str], str], Optional[str]]], + ] + ]: + r"""Return dependency-sorted table and foreign key constraint names + referred to within multiple schemas. + + This method may be compared to + :meth:`.Inspector.get_sorted_table_and_fkc_names`, which + works on one schema at a time; here, the method is a generalization + that will consider multiple schemas at once including that it will + resolve for cross-schema foreign keys. + + .. versionadded:: 2.0 + + """ + SchemaTab = Tuple[Optional[str], str] + + tuples: Set[Tuple[SchemaTab, SchemaTab]] = set() + remaining_fkcs: Set[Tuple[SchemaTab, Optional[str]]] = set() + fknames_for_table: Dict[SchemaTab, Set[Optional[str]]] = {} + tnames: List[SchemaTab] = [] + + for schname in consider_schemas: + schema_fkeys = self.get_multi_foreign_keys(schname, **kw) + tnames.extend(schema_fkeys) + for (_, tname), fkeys in schema_fkeys.items(): + fknames_for_table[(schname, tname)] = { + fk["name"] for fk in fkeys + } + for fkey in fkeys: + if ( + tname != fkey["referred_table"] + or schname != fkey["referred_schema"] + ): + tuples.add( + ( + ( + fkey["referred_schema"], + fkey["referred_table"], + ), + (schname, tname), + ) + ) + try: + candidate_sort = list(topological.sort(tuples, tnames)) + except exc.CircularDependencyError as err: + edge: Tuple[SchemaTab, SchemaTab] + for edge in err.edges: + tuples.remove(edge) + remaining_fkcs.update( + (edge[1], fkc) for fkc in fknames_for_table[edge[1]] + ) + + candidate_sort = list(topological.sort(tuples, tnames)) + ret: List[ + Tuple[Optional[SchemaTab], List[Tuple[SchemaTab, Optional[str]]]] + ] + ret = [ + ( + (schname, tname), + [ + ((schname, tname), fk) + for fk in fknames_for_table[(schname, tname)].difference( + name for _, name in remaining_fkcs + ) + ], + ) + for (schname, tname) in candidate_sort + ] + return ret + [(None, list(remaining_fkcs))] + + def get_temp_table_names(self, **kw: Any) -> List[str]: + r"""Return a list of temporary table names for the current bind. + + This method is unsupported by most dialects; currently + only Oracle, PostgreSQL and SQLite implements it. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + """ + + with self._operation_context() as conn: + return self.dialect.get_temp_table_names( + conn, info_cache=self.info_cache, **kw + ) + + def get_temp_view_names(self, **kw: Any) -> List[str]: + r"""Return a list of temporary view names for the current bind. + + This method is unsupported by most dialects; currently + only PostgreSQL and SQLite implements it. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + """ + with self._operation_context() as conn: + return self.dialect.get_temp_view_names( + conn, info_cache=self.info_cache, **kw + ) + + def get_table_options( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> Dict[str, Any]: + r"""Return a dictionary of options specified when the table of the + given name was created. + + This currently includes some options that apply to MySQL and Oracle + tables. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dict with the table options. The returned keys depend on the + dialect in use. Each one is prefixed with the dialect name. + + .. seealso:: :meth:`Inspector.get_multi_table_options` + + """ + with self._operation_context() as conn: + return self.dialect.get_table_options( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_table_options( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, Dict[str, Any]]: + r"""Return a dictionary of options specified when the tables in the + given schema were created. + + The tables can be filtered by passing the names to use to + ``filter_names``. + + This currently includes some options that apply to MySQL and Oracle + tables. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if options of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are dictionaries with the table options. + The returned keys in each dict depend on the + dialect in use. Each one is prefixed with the dialect name. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_table_options` + """ + with self._operation_context() as conn: + res = self.dialect.get_multi_table_options( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + return dict(res) + + def get_view_names( + self, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + r"""Return all non-materialized view names in `schema`. + + :param schema: Optional, retrieve names from a non-default schema. + For special quoting, use :class:`.quoted_name`. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + + .. versionchanged:: 2.0 For those dialects that previously included + the names of materialized views in this list (currently PostgreSQL), + this method no longer returns the names of materialized views. + the :meth:`.Inspector.get_materialized_view_names` method should + be used instead. + + .. seealso:: + + :meth:`.Inspector.get_materialized_view_names` + + """ + + with self._operation_context() as conn: + return self.dialect.get_view_names( + conn, schema, info_cache=self.info_cache, **kw + ) + + def get_materialized_view_names( + self, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + r"""Return all materialized view names in `schema`. + + :param schema: Optional, retrieve names from a non-default schema. + For special quoting, use :class:`.quoted_name`. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + .. versionadded:: 2.0 + + .. seealso:: + + :meth:`.Inspector.get_view_names` + + """ + + with self._operation_context() as conn: + return self.dialect.get_materialized_view_names( + conn, schema, info_cache=self.info_cache, **kw + ) + + def get_sequence_names( + self, schema: Optional[str] = None, **kw: Any + ) -> List[str]: + r"""Return all sequence names in `schema`. + + :param schema: Optional, retrieve names from a non-default schema. + For special quoting, use :class:`.quoted_name`. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + """ + + with self._operation_context() as conn: + return self.dialect.get_sequence_names( + conn, schema, info_cache=self.info_cache, **kw + ) + + def get_view_definition( + self, view_name: str, schema: Optional[str] = None, **kw: Any + ) -> str: + r"""Return definition for the plain or materialized view called + ``view_name``. + + :param view_name: Name of the view. + :param schema: Optional, retrieve names from a non-default schema. + For special quoting, use :class:`.quoted_name`. + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + """ + + with self._operation_context() as conn: + return self.dialect.get_view_definition( + conn, view_name, schema, info_cache=self.info_cache, **kw + ) + + def get_columns( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> List[ReflectedColumn]: + r"""Return information about columns in ``table_name``. + + Given a string ``table_name`` and an optional string ``schema``, + return column information as a list of :class:`.ReflectedColumn`. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: list of dictionaries, each representing the definition of + a database column. + + .. seealso:: :meth:`Inspector.get_multi_columns`. + + """ + + with self._operation_context() as conn: + col_defs = self.dialect.get_columns( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + if col_defs: + self._instantiate_types([col_defs]) + return col_defs + + def _instantiate_types( + self, data: Iterable[List[ReflectedColumn]] + ) -> None: + # make this easy and only return instances for coltype + for col_defs in data: + for col_def in col_defs: + coltype = col_def["type"] + if not isinstance(coltype, TypeEngine): + col_def["type"] = coltype() + + def get_multi_columns( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, List[ReflectedColumn]]: + r"""Return information about columns in all objects in the given + schema. + + The objects can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a list of :class:`.ReflectedColumn`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if columns of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are list of dictionaries, each representing the + definition of a database column. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_columns` + """ + + with self._operation_context() as conn: + table_col_defs = dict( + self.dialect.get_multi_columns( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + self._instantiate_types(table_col_defs.values()) + return table_col_defs + + def get_pk_constraint( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> ReflectedPrimaryKeyConstraint: + r"""Return information about primary key constraint in ``table_name``. + + Given a string ``table_name``, and an optional string `schema`, return + primary key information as a :class:`.ReflectedPrimaryKeyConstraint`. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary representing the definition of + a primary key constraint. + + .. seealso:: :meth:`Inspector.get_multi_pk_constraint` + """ + with self._operation_context() as conn: + return self.dialect.get_pk_constraint( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_pk_constraint( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, ReflectedPrimaryKeyConstraint]: + r"""Return information about primary key constraints in + all tables in the given schema. + + The tables can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a :class:`.ReflectedPrimaryKeyConstraint`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if primary keys of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are dictionaries, each representing the + definition of a primary key constraint. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_pk_constraint` + """ + with self._operation_context() as conn: + return dict( + self.dialect.get_multi_pk_constraint( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + + def get_foreign_keys( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> List[ReflectedForeignKeyConstraint]: + r"""Return information about foreign_keys in ``table_name``. + + Given a string ``table_name``, and an optional string `schema`, return + foreign key information as a list of + :class:`.ReflectedForeignKeyConstraint`. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a list of dictionaries, each representing the + a foreign key definition. + + .. seealso:: :meth:`Inspector.get_multi_foreign_keys` + """ + + with self._operation_context() as conn: + return self.dialect.get_foreign_keys( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_foreign_keys( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, List[ReflectedForeignKeyConstraint]]: + r"""Return information about foreign_keys in all tables + in the given schema. + + The tables can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a list of + :class:`.ReflectedForeignKeyConstraint`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if foreign keys of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are list of dictionaries, each representing + a foreign key definition. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_foreign_keys` + """ + + with self._operation_context() as conn: + return dict( + self.dialect.get_multi_foreign_keys( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + + def get_indexes( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> List[ReflectedIndex]: + r"""Return information about indexes in ``table_name``. + + Given a string ``table_name`` and an optional string `schema`, return + index information as a list of :class:`.ReflectedIndex`. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a list of dictionaries, each representing the + definition of an index. + + .. seealso:: :meth:`Inspector.get_multi_indexes` + """ + + with self._operation_context() as conn: + return self.dialect.get_indexes( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_indexes( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, List[ReflectedIndex]]: + r"""Return information about indexes in in all objects + in the given schema. + + The objects can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a list of :class:`.ReflectedIndex`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if indexes of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are list of dictionaries, each representing the + definition of an index. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_indexes` + """ + + with self._operation_context() as conn: + return dict( + self.dialect.get_multi_indexes( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + + def get_unique_constraints( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> List[ReflectedUniqueConstraint]: + r"""Return information about unique constraints in ``table_name``. + + Given a string ``table_name`` and an optional string `schema`, return + unique constraint information as a list of + :class:`.ReflectedUniqueConstraint`. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a list of dictionaries, each representing the + definition of an unique constraint. + + .. seealso:: :meth:`Inspector.get_multi_unique_constraints` + """ + + with self._operation_context() as conn: + return self.dialect.get_unique_constraints( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_unique_constraints( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, List[ReflectedUniqueConstraint]]: + r"""Return information about unique constraints in all tables + in the given schema. + + The tables can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a list of + :class:`.ReflectedUniqueConstraint`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if constraints of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are list of dictionaries, each representing the + definition of an unique constraint. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_unique_constraints` + """ + + with self._operation_context() as conn: + return dict( + self.dialect.get_multi_unique_constraints( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + + def get_table_comment( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> ReflectedTableComment: + r"""Return information about the table comment for ``table_name``. + + Given a string ``table_name`` and an optional string ``schema``, + return table comment information as a :class:`.ReflectedTableComment`. + + Raises ``NotImplementedError`` for a dialect that does not support + comments. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary, with the table comment. + + .. versionadded:: 1.2 + + .. seealso:: :meth:`Inspector.get_multi_table_comment` + """ + + with self._operation_context() as conn: + return self.dialect.get_table_comment( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_table_comment( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, ReflectedTableComment]: + r"""Return information about the table comment in all objects + in the given schema. + + The objects can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a :class:`.ReflectedTableComment`. + + Raises ``NotImplementedError`` for a dialect that does not support + comments. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if comments of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are dictionaries, representing the + table comments. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_table_comment` + """ + + with self._operation_context() as conn: + return dict( + self.dialect.get_multi_table_comment( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + + def get_check_constraints( + self, table_name: str, schema: Optional[str] = None, **kw: Any + ) -> List[ReflectedCheckConstraint]: + r"""Return information about check constraints in ``table_name``. + + Given a string ``table_name`` and an optional string `schema`, return + check constraint information as a list of + :class:`.ReflectedCheckConstraint`. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a list of dictionaries, each representing the + definition of a check constraints. + + .. seealso:: :meth:`Inspector.get_multi_check_constraints` + """ + + with self._operation_context() as conn: + return self.dialect.get_check_constraints( + conn, table_name, schema, info_cache=self.info_cache, **kw + ) + + def get_multi_check_constraints( + self, + schema: Optional[str] = None, + filter_names: Optional[Sequence[str]] = None, + kind: ObjectKind = ObjectKind.TABLE, + scope: ObjectScope = ObjectScope.DEFAULT, + **kw: Any, + ) -> Dict[TableKey, List[ReflectedCheckConstraint]]: + r"""Return information about check constraints in all tables + in the given schema. + + The tables can be filtered by passing the names to use to + ``filter_names``. + + For each table the value is a list of + :class:`.ReflectedCheckConstraint`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + :param filter_names: optionally return information only for the + objects listed here. + + :param kind: a :class:`.ObjectKind` that specifies the type of objects + to reflect. Defaults to ``ObjectKind.TABLE``. + + :param scope: a :class:`.ObjectScope` that specifies if constraints of + default, temporary or any tables should be reflected. + Defaults to ``ObjectScope.DEFAULT``. + + :param \**kw: Additional keyword argument to pass to the dialect + specific implementation. See the documentation of the dialect + in use for more information. + + :return: a dictionary where the keys are two-tuple schema,table-name + and the values are list of dictionaries, each representing the + definition of a check constraints. + The schema is ``None`` if no schema is provided. + + .. versionadded:: 2.0 + + .. seealso:: :meth:`Inspector.get_check_constraints` + """ + + with self._operation_context() as conn: + return dict( + self.dialect.get_multi_check_constraints( + conn, + schema=schema, + filter_names=filter_names, + kind=kind, + scope=scope, + info_cache=self.info_cache, + **kw, + ) + ) + + def reflect_table( + self, + table: sa_schema.Table, + include_columns: Optional[Collection[str]], + exclude_columns: Collection[str] = (), + resolve_fks: bool = True, + _extend_on: Optional[Set[sa_schema.Table]] = None, + _reflect_info: Optional[_ReflectionInfo] = None, + ) -> None: + """Given a :class:`_schema.Table` object, load its internal + constructs based on introspection. + + This is the underlying method used by most dialects to produce + table reflection. Direct usage is like:: + + from sqlalchemy import create_engine, MetaData, Table + from sqlalchemy import inspect + + engine = create_engine('...') + meta = MetaData() + user_table = Table('user', meta) + insp = inspect(engine) + insp.reflect_table(user_table, None) + + .. versionchanged:: 1.4 Renamed from ``reflecttable`` to + ``reflect_table`` + + :param table: a :class:`~sqlalchemy.schema.Table` instance. + :param include_columns: a list of string column names to include + in the reflection process. If ``None``, all columns are reflected. + + """ + + if _extend_on is not None: + if table in _extend_on: + return + else: + _extend_on.add(table) + + dialect = self.bind.dialect + + with self._operation_context() as conn: + schema = conn.schema_for_object(table) + + table_name = table.name + + # get table-level arguments that are specifically + # intended for reflection, e.g. oracle_resolve_synonyms. + # these are unconditionally passed to related Table + # objects + reflection_options = { + k: table.dialect_kwargs.get(k) + for k in dialect.reflection_options + if k in table.dialect_kwargs + } + + table_key = (schema, table_name) + if _reflect_info is None or table_key not in _reflect_info.columns: + _reflect_info = self._get_reflection_info( + schema, + filter_names=[table_name], + kind=ObjectKind.ANY, + scope=ObjectScope.ANY, + _reflect_info=_reflect_info, + **table.dialect_kwargs, + ) + if table_key in _reflect_info.unreflectable: + raise _reflect_info.unreflectable[table_key] + + if table_key not in _reflect_info.columns: + raise exc.NoSuchTableError(table_name) + + # reflect table options, like mysql_engine + if _reflect_info.table_options: + tbl_opts = _reflect_info.table_options.get(table_key) + if tbl_opts: + # add additional kwargs to the Table if the dialect + # returned them + table._validate_dialect_kwargs(tbl_opts) + + found_table = False + cols_by_orig_name: Dict[str, sa_schema.Column[Any]] = {} + + for col_d in _reflect_info.columns[table_key]: + found_table = True + + self._reflect_column( + table, + col_d, + include_columns, + exclude_columns, + cols_by_orig_name, + ) + + # NOTE: support tables/views with no columns + if not found_table and not self.has_table(table_name, schema): + raise exc.NoSuchTableError(table_name) + + self._reflect_pk( + _reflect_info, table_key, table, cols_by_orig_name, exclude_columns + ) + + self._reflect_fk( + _reflect_info, + table_key, + table, + cols_by_orig_name, + include_columns, + exclude_columns, + resolve_fks, + _extend_on, + reflection_options, + ) + + self._reflect_indexes( + _reflect_info, + table_key, + table, + cols_by_orig_name, + include_columns, + exclude_columns, + reflection_options, + ) + + self._reflect_unique_constraints( + _reflect_info, + table_key, + table, + cols_by_orig_name, + include_columns, + exclude_columns, + reflection_options, + ) + + self._reflect_check_constraints( + _reflect_info, + table_key, + table, + cols_by_orig_name, + include_columns, + exclude_columns, + reflection_options, + ) + + self._reflect_table_comment( + _reflect_info, + table_key, + table, + reflection_options, + ) + + def _reflect_column( + self, + table: sa_schema.Table, + col_d: ReflectedColumn, + include_columns: Optional[Collection[str]], + exclude_columns: Collection[str], + cols_by_orig_name: Dict[str, sa_schema.Column[Any]], + ) -> None: + orig_name = col_d["name"] + + table.metadata.dispatch.column_reflect(self, table, col_d) + table.dispatch.column_reflect(self, table, col_d) + + # fetch name again as column_reflect is allowed to + # change it + name = col_d["name"] + if (include_columns and name not in include_columns) or ( + exclude_columns and name in exclude_columns + ): + return + + coltype = col_d["type"] + + col_kw = { + k: col_d[k] # type: ignore[literal-required] + for k in [ + "nullable", + "autoincrement", + "quote", + "info", + "key", + "comment", + ] + if k in col_d + } + + if "dialect_options" in col_d: + col_kw.update(col_d["dialect_options"]) + + colargs = [] + default: Any + if col_d.get("default") is not None: + default_text = col_d["default"] + assert default_text is not None + if isinstance(default_text, TextClause): + default = sa_schema.DefaultClause( + default_text, _reflected=True + ) + elif not isinstance(default_text, sa_schema.FetchedValue): + default = sa_schema.DefaultClause( + sql.text(default_text), _reflected=True + ) + else: + default = default_text + colargs.append(default) + + if "computed" in col_d: + computed = sa_schema.Computed(**col_d["computed"]) + colargs.append(computed) + + if "identity" in col_d: + identity = sa_schema.Identity(**col_d["identity"]) + colargs.append(identity) + + cols_by_orig_name[orig_name] = col = sa_schema.Column( + name, coltype, *colargs, **col_kw + ) + + if col.key in table.primary_key: + col.primary_key = True + table.append_column(col, replace_existing=True) + + def _reflect_pk( + self, + _reflect_info: _ReflectionInfo, + table_key: TableKey, + table: sa_schema.Table, + cols_by_orig_name: Dict[str, sa_schema.Column[Any]], + exclude_columns: Collection[str], + ) -> None: + pk_cons = _reflect_info.pk_constraint.get(table_key) + if pk_cons: + pk_cols = [ + cols_by_orig_name[pk] + for pk in pk_cons["constrained_columns"] + if pk in cols_by_orig_name and pk not in exclude_columns + ] + + # update pk constraint name and comment + table.primary_key.name = pk_cons.get("name") + table.primary_key.comment = pk_cons.get("comment", None) + + # tell the PKConstraint to re-initialize + # its column collection + table.primary_key._reload(pk_cols) + + def _reflect_fk( + self, + _reflect_info: _ReflectionInfo, + table_key: TableKey, + table: sa_schema.Table, + cols_by_orig_name: Dict[str, sa_schema.Column[Any]], + include_columns: Optional[Collection[str]], + exclude_columns: Collection[str], + resolve_fks: bool, + _extend_on: Optional[Set[sa_schema.Table]], + reflection_options: Dict[str, Any], + ) -> None: + fkeys = _reflect_info.foreign_keys.get(table_key, []) + for fkey_d in fkeys: + conname = fkey_d["name"] + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_columns = [ + cols_by_orig_name[c].key if c in cols_by_orig_name else c + for c in fkey_d["constrained_columns"] + ] + + if ( + exclude_columns + and set(constrained_columns).intersection(exclude_columns) + or ( + include_columns + and set(constrained_columns).difference(include_columns) + ) + ): + continue + + referred_schema = fkey_d["referred_schema"] + referred_table = fkey_d["referred_table"] + referred_columns = fkey_d["referred_columns"] + refspec = [] + if referred_schema is not None: + if resolve_fks: + sa_schema.Table( + referred_table, + table.metadata, + schema=referred_schema, + autoload_with=self.bind, + _extend_on=_extend_on, + _reflect_info=_reflect_info, + **reflection_options, + ) + for column in referred_columns: + refspec.append( + ".".join([referred_schema, referred_table, column]) + ) + else: + if resolve_fks: + sa_schema.Table( + referred_table, + table.metadata, + autoload_with=self.bind, + schema=sa_schema.BLANK_SCHEMA, + _extend_on=_extend_on, + _reflect_info=_reflect_info, + **reflection_options, + ) + for column in referred_columns: + refspec.append(".".join([referred_table, column])) + if "options" in fkey_d: + options = fkey_d["options"] + else: + options = {} + + try: + table.append_constraint( + sa_schema.ForeignKeyConstraint( + constrained_columns, + refspec, + conname, + link_to_name=True, + comment=fkey_d.get("comment"), + **options, + ) + ) + except exc.ConstraintColumnNotFoundError: + util.warn( + f"On reflected table {table.name}, skipping reflection of " + "foreign key constraint " + f"{conname}; one or more subject columns within " + f"name(s) {', '.join(constrained_columns)} are not " + "present in the table" + ) + + _index_sort_exprs = { + "asc": operators.asc_op, + "desc": operators.desc_op, + "nulls_first": operators.nulls_first_op, + "nulls_last": operators.nulls_last_op, + } + + def _reflect_indexes( + self, + _reflect_info: _ReflectionInfo, + table_key: TableKey, + table: sa_schema.Table, + cols_by_orig_name: Dict[str, sa_schema.Column[Any]], + include_columns: Optional[Collection[str]], + exclude_columns: Collection[str], + reflection_options: Dict[str, Any], + ) -> None: + # Indexes + indexes = _reflect_info.indexes.get(table_key, []) + for index_d in indexes: + name = index_d["name"] + columns = index_d["column_names"] + expressions = index_d.get("expressions") + column_sorting = index_d.get("column_sorting", {}) + unique = index_d["unique"] + flavor = index_d.get("type", "index") + dialect_options = index_d.get("dialect_options", {}) + + duplicates = index_d.get("duplicates_constraint") + if include_columns and not set(columns).issubset(include_columns): + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + idx_element: Any + idx_elements = [] + for index, c in enumerate(columns): + if c is None: + if not expressions: + util.warn( + f"Skipping {flavor} {name!r} because key " + f"{index + 1} reflected as None but no " + "'expressions' were returned" + ) + break + idx_element = sql.text(expressions[index]) + else: + try: + if c in cols_by_orig_name: + idx_element = cols_by_orig_name[c] + else: + idx_element = table.c[c] + except KeyError: + util.warn( + f"{flavor} key {c!r} was not located in " + f"columns for table {table.name!r}" + ) + continue + for option in column_sorting.get(c, ()): + if option in self._index_sort_exprs: + op = self._index_sort_exprs[option] + idx_element = op(idx_element) + idx_elements.append(idx_element) + else: + sa_schema.Index( + name, + *idx_elements, + _table=table, + unique=unique, + **dialect_options, + ) + + def _reflect_unique_constraints( + self, + _reflect_info: _ReflectionInfo, + table_key: TableKey, + table: sa_schema.Table, + cols_by_orig_name: Dict[str, sa_schema.Column[Any]], + include_columns: Optional[Collection[str]], + exclude_columns: Collection[str], + reflection_options: Dict[str, Any], + ) -> None: + constraints = _reflect_info.unique_constraints.get(table_key, []) + # Unique Constraints + for const_d in constraints: + conname = const_d["name"] + columns = const_d["column_names"] + comment = const_d.get("comment") + duplicates = const_d.get("duplicates_index") + dialect_options = const_d.get("dialect_options", {}) + if include_columns and not set(columns).issubset(include_columns): + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_cols = [] + for c in columns: + try: + constrained_col = ( + cols_by_orig_name[c] + if c in cols_by_orig_name + else table.c[c] + ) + except KeyError: + util.warn( + "unique constraint key '%s' was not located in " + "columns for table '%s'" % (c, table.name) + ) + else: + constrained_cols.append(constrained_col) + table.append_constraint( + sa_schema.UniqueConstraint( + *constrained_cols, + name=conname, + comment=comment, + **dialect_options, + ) + ) + + def _reflect_check_constraints( + self, + _reflect_info: _ReflectionInfo, + table_key: TableKey, + table: sa_schema.Table, + cols_by_orig_name: Dict[str, sa_schema.Column[Any]], + include_columns: Optional[Collection[str]], + exclude_columns: Collection[str], + reflection_options: Dict[str, Any], + ) -> None: + constraints = _reflect_info.check_constraints.get(table_key, []) + for const_d in constraints: + table.append_constraint(sa_schema.CheckConstraint(**const_d)) + + def _reflect_table_comment( + self, + _reflect_info: _ReflectionInfo, + table_key: TableKey, + table: sa_schema.Table, + reflection_options: Dict[str, Any], + ) -> None: + comment_dict = _reflect_info.table_comment.get(table_key) + if comment_dict: + table.comment = comment_dict["text"] + + def _get_reflection_info( + self, + schema: Optional[str] = None, + filter_names: Optional[Collection[str]] = None, + available: Optional[Collection[str]] = None, + _reflect_info: Optional[_ReflectionInfo] = None, + **kw: Any, + ) -> _ReflectionInfo: + kw["schema"] = schema + + if filter_names and available and len(filter_names) > 100: + fraction = len(filter_names) / len(available) + else: + fraction = None + + unreflectable: Dict[TableKey, exc.UnreflectableTableError] + kw["unreflectable"] = unreflectable = {} + + has_result: bool = True + + def run( + meth: Any, + *, + optional: bool = False, + check_filter_names_from_meth: bool = False, + ) -> Any: + nonlocal has_result + # simple heuristic to improve reflection performance if a + # dialect implements multi_reflection: + # if more than 50% of the tables in the db are in filter_names + # load all the tables, since it's most likely faster to avoid + # a filter on that many tables. + if ( + fraction is None + or fraction <= 0.5 + or not self.dialect._overrides_default(meth.__name__) + ): + _fn = filter_names + else: + _fn = None + try: + if has_result: + res = meth(filter_names=_fn, **kw) + if check_filter_names_from_meth and not res: + # method returned no result data. + # skip any future call methods + has_result = False + else: + res = {} + except NotImplementedError: + if not optional: + raise + res = {} + return res + + info = _ReflectionInfo( + columns=run( + self.get_multi_columns, check_filter_names_from_meth=True + ), + pk_constraint=run(self.get_multi_pk_constraint), + foreign_keys=run(self.get_multi_foreign_keys), + indexes=run(self.get_multi_indexes), + unique_constraints=run( + self.get_multi_unique_constraints, optional=True + ), + table_comment=run(self.get_multi_table_comment, optional=True), + check_constraints=run( + self.get_multi_check_constraints, optional=True + ), + table_options=run(self.get_multi_table_options, optional=True), + unreflectable=unreflectable, + ) + if _reflect_info: + _reflect_info.update(info) + return _reflect_info + else: + return info + + +@final +class ReflectionDefaults: + """provides blank default values for reflection methods.""" + + @classmethod + def columns(cls) -> List[ReflectedColumn]: + return [] + + @classmethod + def pk_constraint(cls) -> ReflectedPrimaryKeyConstraint: + return { + "name": None, + "constrained_columns": [], + } + + @classmethod + def foreign_keys(cls) -> List[ReflectedForeignKeyConstraint]: + return [] + + @classmethod + def indexes(cls) -> List[ReflectedIndex]: + return [] + + @classmethod + def unique_constraints(cls) -> List[ReflectedUniqueConstraint]: + return [] + + @classmethod + def check_constraints(cls) -> List[ReflectedCheckConstraint]: + return [] + + @classmethod + def table_options(cls) -> Dict[str, Any]: + return {} + + @classmethod + def table_comment(cls) -> ReflectedTableComment: + return {"text": None} + + +@dataclass +class _ReflectionInfo: + columns: Dict[TableKey, List[ReflectedColumn]] + pk_constraint: Dict[TableKey, Optional[ReflectedPrimaryKeyConstraint]] + foreign_keys: Dict[TableKey, List[ReflectedForeignKeyConstraint]] + indexes: Dict[TableKey, List[ReflectedIndex]] + # optionals + unique_constraints: Dict[TableKey, List[ReflectedUniqueConstraint]] + table_comment: Dict[TableKey, Optional[ReflectedTableComment]] + check_constraints: Dict[TableKey, List[ReflectedCheckConstraint]] + table_options: Dict[TableKey, Dict[str, Any]] + unreflectable: Dict[TableKey, exc.UnreflectableTableError] + + def update(self, other: _ReflectionInfo) -> None: + for k, v in self.__dict__.items(): + ov = getattr(other, k) + if ov is not None: + if v is None: + setattr(self, k, ov) + else: + v.update(ov) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py new file mode 100644 index 00000000..56b3a68b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py @@ -0,0 +1,2382 @@ +# engine/result.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Define generic result set constructs.""" + +from __future__ import annotations + +from enum import Enum +import functools +import itertools +import operator +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .row import Row +from .row import RowMapping +from .. import exc +from .. import util +from ..sql.base import _generative +from ..sql.base import HasMemoized +from ..sql.base import InPlaceGenerative +from ..util import HasMemoized_ro_memoized_attribute +from ..util import NONE_SET +from ..util._has_cy import HAS_CYEXTENSION +from ..util.typing import Literal +from ..util.typing import Self + +if typing.TYPE_CHECKING or not HAS_CYEXTENSION: + from ._py_row import tuplegetter as tuplegetter +else: + from sqlalchemy.cyextension.resultproxy import tuplegetter as tuplegetter + +if typing.TYPE_CHECKING: + from ..sql.schema import Column + from ..sql.type_api import _ResultProcessorType + +_KeyType = Union[str, "Column[Any]"] +_KeyIndexType = Union[str, "Column[Any]", int] + +# is overridden in cursor using _CursorKeyMapRecType +_KeyMapRecType = Any + +_KeyMapType = Mapping[_KeyType, _KeyMapRecType] + + +_RowData = Union[Row[Any], RowMapping, Any] +"""A generic form of "row" that accommodates for the different kinds of +"rows" that different result objects return, including row, row mapping, and +scalar values""" + +_RawRowType = Tuple[Any, ...] +"""represents the kind of row we get from a DBAPI cursor""" + +_R = TypeVar("_R", bound=_RowData) +_T = TypeVar("_T", bound=Any) +_TP = TypeVar("_TP", bound=Tuple[Any, ...]) + +_InterimRowType = Union[_R, _RawRowType] +"""a catchall "anything" kind of return type that can be applied +across all the result types + +""" + +_InterimSupportsScalarsRowType = Union[Row[Any], Any] + +_ProcessorsType = Sequence[Optional["_ResultProcessorType[Any]"]] +_TupleGetterType = Callable[[Sequence[Any]], Sequence[Any]] +_UniqueFilterType = Callable[[Any], Any] +_UniqueFilterStateType = Tuple[Set[Any], Optional[_UniqueFilterType]] + + +class ResultMetaData: + """Base for metadata about result rows.""" + + __slots__ = () + + _tuplefilter: Optional[_TupleGetterType] = None + _translated_indexes: Optional[Sequence[int]] = None + _unique_filters: Optional[Sequence[Callable[[Any], Any]]] = None + _keymap: _KeyMapType + _keys: Sequence[str] + _processors: Optional[_ProcessorsType] + _key_to_index: Mapping[_KeyType, int] + + @property + def keys(self) -> RMKeyView: + return RMKeyView(self) + + def _has_key(self, key: object) -> bool: + raise NotImplementedError() + + def _for_freeze(self) -> ResultMetaData: + raise NotImplementedError() + + @overload + def _key_fallback( + self, key: Any, err: Optional[Exception], raiseerr: Literal[True] = ... + ) -> NoReturn: ... + + @overload + def _key_fallback( + self, + key: Any, + err: Optional[Exception], + raiseerr: Literal[False] = ..., + ) -> None: ... + + @overload + def _key_fallback( + self, key: Any, err: Optional[Exception], raiseerr: bool = ... + ) -> Optional[NoReturn]: ... + + def _key_fallback( + self, key: Any, err: Optional[Exception], raiseerr: bool = True + ) -> Optional[NoReturn]: + assert raiseerr + raise KeyError(key) from err + + def _raise_for_ambiguous_column_name( + self, rec: _KeyMapRecType + ) -> NoReturn: + raise NotImplementedError( + "ambiguous column name logic is implemented for " + "CursorResultMetaData" + ) + + def _index_for_key( + self, key: _KeyIndexType, raiseerr: bool + ) -> Optional[int]: + raise NotImplementedError() + + def _indexes_for_keys( + self, keys: Sequence[_KeyIndexType] + ) -> Sequence[int]: + raise NotImplementedError() + + def _metadata_for_keys( + self, keys: Sequence[_KeyIndexType] + ) -> Iterator[_KeyMapRecType]: + raise NotImplementedError() + + def _reduce(self, keys: Sequence[_KeyIndexType]) -> ResultMetaData: + raise NotImplementedError() + + def _getter( + self, key: Any, raiseerr: bool = True + ) -> Optional[Callable[[Row[Any]], Any]]: + index = self._index_for_key(key, raiseerr) + + if index is not None: + return operator.itemgetter(index) + else: + return None + + def _row_as_tuple_getter( + self, keys: Sequence[_KeyIndexType] + ) -> _TupleGetterType: + indexes = self._indexes_for_keys(keys) + return tuplegetter(*indexes) + + def _make_key_to_index( + self, keymap: Mapping[_KeyType, Sequence[Any]], index: int + ) -> Mapping[_KeyType, int]: + return { + key: rec[index] + for key, rec in keymap.items() + if rec[index] is not None + } + + def _key_not_found(self, key: Any, attr_error: bool) -> NoReturn: + if key in self._keymap: + # the index must be none in this case + self._raise_for_ambiguous_column_name(self._keymap[key]) + else: + # unknown key + if attr_error: + try: + self._key_fallback(key, None) + except KeyError as ke: + raise AttributeError(ke.args[0]) from ke + else: + self._key_fallback(key, None) + + @property + def _effective_processors(self) -> Optional[_ProcessorsType]: + if not self._processors or NONE_SET.issuperset(self._processors): + return None + else: + return self._processors + + +class RMKeyView(typing.KeysView[Any]): + __slots__ = ("_parent", "_keys") + + _parent: ResultMetaData + _keys: Sequence[str] + + def __init__(self, parent: ResultMetaData): + self._parent = parent + self._keys = [k for k in parent._keys if k is not None] + + def __len__(self) -> int: + return len(self._keys) + + def __repr__(self) -> str: + return "{0.__class__.__name__}({0._keys!r})".format(self) + + def __iter__(self) -> Iterator[str]: + return iter(self._keys) + + def __contains__(self, item: Any) -> bool: + if isinstance(item, int): + return False + + # note this also includes special key fallback behaviors + # which also don't seem to be tested in test_resultset right now + return self._parent._has_key(item) + + def __eq__(self, other: Any) -> bool: + return list(other) == list(self) + + def __ne__(self, other: Any) -> bool: + return list(other) != list(self) + + +class SimpleResultMetaData(ResultMetaData): + """result metadata for in-memory collections.""" + + __slots__ = ( + "_keys", + "_keymap", + "_processors", + "_tuplefilter", + "_translated_indexes", + "_unique_filters", + "_key_to_index", + ) + + _keys: Sequence[str] + + def __init__( + self, + keys: Sequence[str], + extra: Optional[Sequence[Any]] = None, + _processors: Optional[_ProcessorsType] = None, + _tuplefilter: Optional[_TupleGetterType] = None, + _translated_indexes: Optional[Sequence[int]] = None, + _unique_filters: Optional[Sequence[Callable[[Any], Any]]] = None, + ): + self._keys = list(keys) + self._tuplefilter = _tuplefilter + self._translated_indexes = _translated_indexes + self._unique_filters = _unique_filters + if extra: + recs_names = [ + ( + (name,) + (extras if extras else ()), + (index, name, extras), + ) + for index, (name, extras) in enumerate(zip(self._keys, extra)) + ] + else: + recs_names = [ + ((name,), (index, name, ())) + for index, name in enumerate(self._keys) + ] + + self._keymap = {key: rec for keys, rec in recs_names for key in keys} + + self._processors = _processors + + self._key_to_index = self._make_key_to_index(self._keymap, 0) + + def _has_key(self, key: object) -> bool: + return key in self._keymap + + def _for_freeze(self) -> ResultMetaData: + unique_filters = self._unique_filters + if unique_filters and self._tuplefilter: + unique_filters = self._tuplefilter(unique_filters) + + # TODO: are we freezing the result with or without uniqueness + # applied? + return SimpleResultMetaData( + self._keys, + extra=[self._keymap[key][2] for key in self._keys], + _unique_filters=unique_filters, + ) + + def __getstate__(self) -> Dict[str, Any]: + return { + "_keys": self._keys, + "_translated_indexes": self._translated_indexes, + } + + def __setstate__(self, state: Dict[str, Any]) -> None: + if state["_translated_indexes"]: + _translated_indexes = state["_translated_indexes"] + _tuplefilter = tuplegetter(*_translated_indexes) + else: + _translated_indexes = _tuplefilter = None + self.__init__( # type: ignore + state["_keys"], + _translated_indexes=_translated_indexes, + _tuplefilter=_tuplefilter, + ) + + def _index_for_key(self, key: Any, raiseerr: bool = True) -> int: + if int in key.__class__.__mro__: + key = self._keys[key] + try: + rec = self._keymap[key] + except KeyError as ke: + rec = self._key_fallback(key, ke, raiseerr) + + return rec[0] # type: ignore[no-any-return] + + def _indexes_for_keys(self, keys: Sequence[Any]) -> Sequence[int]: + return [self._keymap[key][0] for key in keys] + + def _metadata_for_keys( + self, keys: Sequence[Any] + ) -> Iterator[_KeyMapRecType]: + for key in keys: + if int in key.__class__.__mro__: + key = self._keys[key] + + try: + rec = self._keymap[key] + except KeyError as ke: + rec = self._key_fallback(key, ke, True) + + yield rec + + def _reduce(self, keys: Sequence[Any]) -> ResultMetaData: + try: + metadata_for_keys = [ + self._keymap[ + self._keys[key] if int in key.__class__.__mro__ else key + ] + for key in keys + ] + except KeyError as ke: + self._key_fallback(ke.args[0], ke, True) + + indexes: Sequence[int] + new_keys: Sequence[str] + extra: Sequence[Any] + indexes, new_keys, extra = zip(*metadata_for_keys) + + if self._translated_indexes: + indexes = [self._translated_indexes[idx] for idx in indexes] + + tup = tuplegetter(*indexes) + + new_metadata = SimpleResultMetaData( + new_keys, + extra=extra, + _tuplefilter=tup, + _translated_indexes=indexes, + _processors=self._processors, + _unique_filters=self._unique_filters, + ) + + return new_metadata + + +def result_tuple( + fields: Sequence[str], extra: Optional[Any] = None +) -> Callable[[Iterable[Any]], Row[Any]]: + parent = SimpleResultMetaData(fields, extra) + return functools.partial( + Row, parent, parent._effective_processors, parent._key_to_index + ) + + +# a symbol that indicates to internal Result methods that +# "no row is returned". We can't use None for those cases where a scalar +# filter is applied to rows. +class _NoRow(Enum): + _NO_ROW = 0 + + +_NO_ROW = _NoRow._NO_ROW + + +class ResultInternal(InPlaceGenerative, Generic[_R]): + __slots__ = () + + _real_result: Optional[Result[Any]] = None + _generate_rows: bool = True + _row_logging_fn: Optional[Callable[[Any], Any]] + + _unique_filter_state: Optional[_UniqueFilterStateType] = None + _post_creational_filter: Optional[Callable[[Any], Any]] = None + _is_cursor = False + + _metadata: ResultMetaData + + _source_supports_scalars: bool + + def _fetchiter_impl(self) -> Iterator[_InterimRowType[Row[Any]]]: + raise NotImplementedError() + + def _fetchone_impl( + self, hard_close: bool = False + ) -> Optional[_InterimRowType[Row[Any]]]: + raise NotImplementedError() + + def _fetchmany_impl( + self, size: Optional[int] = None + ) -> List[_InterimRowType[Row[Any]]]: + raise NotImplementedError() + + def _fetchall_impl(self) -> List[_InterimRowType[Row[Any]]]: + raise NotImplementedError() + + def _soft_close(self, hard: bool = False) -> None: + raise NotImplementedError() + + @HasMemoized_ro_memoized_attribute + def _row_getter(self) -> Optional[Callable[..., _R]]: + real_result: Result[Any] = ( + self._real_result + if self._real_result + else cast("Result[Any]", self) + ) + + if real_result._source_supports_scalars: + if not self._generate_rows: + return None + else: + _proc = Row + + def process_row( + metadata: ResultMetaData, + processors: Optional[_ProcessorsType], + key_to_index: Mapping[_KeyType, int], + scalar_obj: Any, + ) -> Row[Any]: + return _proc( + metadata, processors, key_to_index, (scalar_obj,) + ) + + else: + process_row = Row # type: ignore + + metadata = self._metadata + + key_to_index = metadata._key_to_index + processors = metadata._effective_processors + tf = metadata._tuplefilter + + if tf and not real_result._source_supports_scalars: + if processors: + processors = tf(processors) + + _make_row_orig: Callable[..., _R] = functools.partial( # type: ignore # noqa E501 + process_row, metadata, processors, key_to_index + ) + + fixed_tf = tf + + def make_row(row: _InterimRowType[Row[Any]]) -> _R: + return _make_row_orig(fixed_tf(row)) + + else: + make_row = functools.partial( # type: ignore + process_row, metadata, processors, key_to_index + ) + + if real_result._row_logging_fn: + _log_row = real_result._row_logging_fn + _make_row = make_row + + def make_row(row: _InterimRowType[Row[Any]]) -> _R: + return _log_row(_make_row(row)) # type: ignore + + return make_row + + @HasMemoized_ro_memoized_attribute + def _iterator_getter(self) -> Callable[..., Iterator[_R]]: + make_row = self._row_getter + + post_creational_filter = self._post_creational_filter + + if self._unique_filter_state: + uniques, strategy = self._unique_strategy + + def iterrows(self: Result[Any]) -> Iterator[_R]: + for raw_row in self._fetchiter_impl(): + obj: _InterimRowType[Any] = ( + make_row(raw_row) if make_row else raw_row + ) + hashed = strategy(obj) if strategy else obj + if hashed in uniques: + continue + uniques.add(hashed) + if post_creational_filter: + obj = post_creational_filter(obj) + yield obj # type: ignore + + else: + + def iterrows(self: Result[Any]) -> Iterator[_R]: + for raw_row in self._fetchiter_impl(): + row: _InterimRowType[Any] = ( + make_row(raw_row) if make_row else raw_row + ) + if post_creational_filter: + row = post_creational_filter(row) + yield row # type: ignore + + return iterrows + + def _raw_all_rows(self) -> List[_R]: + make_row = self._row_getter + assert make_row is not None + rows = self._fetchall_impl() + return [make_row(row) for row in rows] + + def _allrows(self) -> List[_R]: + post_creational_filter = self._post_creational_filter + + make_row = self._row_getter + + rows = self._fetchall_impl() + made_rows: List[_InterimRowType[_R]] + if make_row: + made_rows = [make_row(row) for row in rows] + else: + made_rows = rows # type: ignore + + interim_rows: List[_R] + + if self._unique_filter_state: + uniques, strategy = self._unique_strategy + + interim_rows = [ + made_row # type: ignore + for made_row, sig_row in [ + ( + made_row, + strategy(made_row) if strategy else made_row, + ) + for made_row in made_rows + ] + if sig_row not in uniques and not uniques.add(sig_row) # type: ignore # noqa: E501 + ] + else: + interim_rows = made_rows # type: ignore + + if post_creational_filter: + interim_rows = [ + post_creational_filter(row) for row in interim_rows + ] + return interim_rows + + @HasMemoized_ro_memoized_attribute + def _onerow_getter( + self, + ) -> Callable[..., Union[Literal[_NoRow._NO_ROW], _R]]: + make_row = self._row_getter + + post_creational_filter = self._post_creational_filter + + if self._unique_filter_state: + uniques, strategy = self._unique_strategy + + def onerow(self: Result[Any]) -> Union[_NoRow, _R]: + _onerow = self._fetchone_impl + while True: + row = _onerow() + if row is None: + return _NO_ROW + else: + obj: _InterimRowType[Any] = ( + make_row(row) if make_row else row + ) + hashed = strategy(obj) if strategy else obj + if hashed in uniques: + continue + else: + uniques.add(hashed) + if post_creational_filter: + obj = post_creational_filter(obj) + return obj # type: ignore + + else: + + def onerow(self: Result[Any]) -> Union[_NoRow, _R]: + row = self._fetchone_impl() + if row is None: + return _NO_ROW + else: + interim_row: _InterimRowType[Any] = ( + make_row(row) if make_row else row + ) + if post_creational_filter: + interim_row = post_creational_filter(interim_row) + return interim_row # type: ignore + + return onerow + + @HasMemoized_ro_memoized_attribute + def _manyrow_getter(self) -> Callable[..., List[_R]]: + make_row = self._row_getter + + post_creational_filter = self._post_creational_filter + + if self._unique_filter_state: + uniques, strategy = self._unique_strategy + + def filterrows( + make_row: Optional[Callable[..., _R]], + rows: List[Any], + strategy: Optional[Callable[[List[Any]], Any]], + uniques: Set[Any], + ) -> List[_R]: + if make_row: + rows = [make_row(row) for row in rows] + + if strategy: + made_rows = ( + (made_row, strategy(made_row)) for made_row in rows + ) + else: + made_rows = ((made_row, made_row) for made_row in rows) + return [ + made_row + for made_row, sig_row in made_rows + if sig_row not in uniques and not uniques.add(sig_row) # type: ignore # noqa: E501 + ] + + def manyrows( + self: ResultInternal[_R], num: Optional[int] + ) -> List[_R]: + collect: List[_R] = [] + + _manyrows = self._fetchmany_impl + + if num is None: + # if None is passed, we don't know the default + # manyrows number, DBAPI has this as cursor.arraysize + # different DBAPIs / fetch strategies may be different. + # do a fetch to find what the number is. if there are + # only fewer rows left, then it doesn't matter. + real_result = ( + self._real_result + if self._real_result + else cast("Result[Any]", self) + ) + if real_result._yield_per: + num_required = num = real_result._yield_per + else: + rows = _manyrows(num) + num = len(rows) + assert make_row is not None + collect.extend( + filterrows(make_row, rows, strategy, uniques) + ) + num_required = num - len(collect) + else: + num_required = num + + assert num is not None + + while num_required: + rows = _manyrows(num_required) + if not rows: + break + + collect.extend( + filterrows(make_row, rows, strategy, uniques) + ) + num_required = num - len(collect) + + if post_creational_filter: + collect = [post_creational_filter(row) for row in collect] + return collect + + else: + + def manyrows( + self: ResultInternal[_R], num: Optional[int] + ) -> List[_R]: + if num is None: + real_result = ( + self._real_result + if self._real_result + else cast("Result[Any]", self) + ) + num = real_result._yield_per + + rows: List[_InterimRowType[Any]] = self._fetchmany_impl(num) + if make_row: + rows = [make_row(row) for row in rows] + if post_creational_filter: + rows = [post_creational_filter(row) for row in rows] + return rows # type: ignore + + return manyrows + + @overload + def _only_one_row( + self, + raise_for_second_row: bool, + raise_for_none: Literal[True], + scalar: bool, + ) -> _R: ... + + @overload + def _only_one_row( + self, + raise_for_second_row: bool, + raise_for_none: bool, + scalar: bool, + ) -> Optional[_R]: ... + + def _only_one_row( + self, + raise_for_second_row: bool, + raise_for_none: bool, + scalar: bool, + ) -> Optional[_R]: + onerow = self._fetchone_impl + + row: Optional[_InterimRowType[Any]] = onerow(hard_close=True) + if row is None: + if raise_for_none: + raise exc.NoResultFound( + "No row was found when one was required" + ) + else: + return None + + if scalar and self._source_supports_scalars: + self._generate_rows = False + make_row = None + else: + make_row = self._row_getter + + try: + row = make_row(row) if make_row else row + except: + self._soft_close(hard=True) + raise + + if raise_for_second_row: + if self._unique_filter_state: + # for no second row but uniqueness, need to essentially + # consume the entire result :( + uniques, strategy = self._unique_strategy + + existing_row_hash = strategy(row) if strategy else row + + while True: + next_row: Any = onerow(hard_close=True) + if next_row is None: + next_row = _NO_ROW + break + + try: + next_row = make_row(next_row) if make_row else next_row + + if strategy: + assert next_row is not _NO_ROW + if existing_row_hash == strategy(next_row): + continue + elif row == next_row: + continue + # here, we have a row and it's different + break + except: + self._soft_close(hard=True) + raise + else: + next_row = onerow(hard_close=True) + if next_row is None: + next_row = _NO_ROW + + if next_row is not _NO_ROW: + self._soft_close(hard=True) + raise exc.MultipleResultsFound( + "Multiple rows were found when exactly one was required" + if raise_for_none + else "Multiple rows were found when one or none " + "was required" + ) + else: + next_row = _NO_ROW + # if we checked for second row then that would have + # closed us :) + self._soft_close(hard=True) + + if not scalar: + post_creational_filter = self._post_creational_filter + if post_creational_filter: + row = post_creational_filter(row) + + if scalar and make_row: + return row[0] # type: ignore + else: + return row # type: ignore + + def _iter_impl(self) -> Iterator[_R]: + return self._iterator_getter(self) + + def _next_impl(self) -> _R: + row = self._onerow_getter(self) + if row is _NO_ROW: + raise StopIteration() + else: + return row + + @_generative + def _column_slices(self, indexes: Sequence[_KeyIndexType]) -> Self: + real_result = ( + self._real_result + if self._real_result + else cast("Result[Any]", self) + ) + + if not real_result._source_supports_scalars or len(indexes) != 1: + self._metadata = self._metadata._reduce(indexes) + + assert self._generate_rows + + return self + + @HasMemoized.memoized_attribute + def _unique_strategy(self) -> _UniqueFilterStateType: + assert self._unique_filter_state is not None + uniques, strategy = self._unique_filter_state + + real_result = ( + self._real_result + if self._real_result is not None + else cast("Result[Any]", self) + ) + + if not strategy and self._metadata._unique_filters: + if ( + real_result._source_supports_scalars + and not self._generate_rows + ): + strategy = self._metadata._unique_filters[0] + else: + filters = self._metadata._unique_filters + if self._metadata._tuplefilter: + filters = self._metadata._tuplefilter(filters) + + strategy = operator.methodcaller("_filter_on_values", filters) + return uniques, strategy + + +class _WithKeys: + __slots__ = () + + _metadata: ResultMetaData + + # used mainly to share documentation on the keys method. + def keys(self) -> RMKeyView: + """Return an iterable view which yields the string keys that would + be represented by each :class:`_engine.Row`. + + The keys can represent the labels of the columns returned by a core + statement or the names of the orm classes returned by an orm + execution. + + The view also can be tested for key containment using the Python + ``in`` operator, which will test both for the string keys represented + in the view, as well as for alternate keys such as column objects. + + .. versionchanged:: 1.4 a key view object is returned rather than a + plain list. + + + """ + return self._metadata.keys + + +class Result(_WithKeys, ResultInternal[Row[_TP]]): + """Represent a set of database results. + + .. versionadded:: 1.4 The :class:`_engine.Result` object provides a + completely updated usage model and calling facade for SQLAlchemy + Core and SQLAlchemy ORM. In Core, it forms the basis of the + :class:`_engine.CursorResult` object which replaces the previous + :class:`_engine.ResultProxy` interface. When using the ORM, a + higher level object called :class:`_engine.ChunkedIteratorResult` + is normally used. + + .. note:: In SQLAlchemy 1.4 and above, this object is + used for ORM results returned by :meth:`_orm.Session.execute`, which can + yield instances of ORM mapped objects either individually or within + tuple-like rows. Note that the :class:`_engine.Result` object does not + deduplicate instances or rows automatically as is the case with the + legacy :class:`_orm.Query` object. For in-Python de-duplication of + instances or rows, use the :meth:`_engine.Result.unique` modifier + method. + + .. seealso:: + + :ref:`tutorial_fetching_rows` - in the :doc:`/tutorial/index` + + """ + + __slots__ = ("_metadata", "__dict__") + + _row_logging_fn: Optional[Callable[[Row[Any]], Row[Any]]] = None + + _source_supports_scalars: bool = False + + _yield_per: Optional[int] = None + + _attributes: util.immutabledict[Any, Any] = util.immutabledict() + + def __init__(self, cursor_metadata: ResultMetaData): + self._metadata = cursor_metadata + + def __enter__(self) -> Self: + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self.close() + + def close(self) -> None: + """close this :class:`_engine.Result`. + + The behavior of this method is implementation specific, and is + not implemented by default. The method should generally end + the resources in use by the result object and also cause any + subsequent iteration or row fetching to raise + :class:`.ResourceClosedError`. + + .. versionadded:: 1.4.27 - ``.close()`` was previously not generally + available for all :class:`_engine.Result` classes, instead only + being available on the :class:`_engine.CursorResult` returned for + Core statement executions. As most other result objects, namely the + ones used by the ORM, are proxying a :class:`_engine.CursorResult` + in any case, this allows the underlying cursor result to be closed + from the outside facade for the case when the ORM query is using + the ``yield_per`` execution option where it does not immediately + exhaust and autoclose the database cursor. + + """ + self._soft_close(hard=True) + + @property + def _soft_closed(self) -> bool: + raise NotImplementedError() + + @property + def closed(self) -> bool: + """return ``True`` if this :class:`_engine.Result` reports .closed + + .. versionadded:: 1.4.43 + + """ + raise NotImplementedError() + + @_generative + def yield_per(self, num: int) -> Self: + """Configure the row-fetching strategy to fetch ``num`` rows at a time. + + This impacts the underlying behavior of the result when iterating over + the result object, or otherwise making use of methods such as + :meth:`_engine.Result.fetchone` that return one row at a time. Data + from the underlying cursor or other data source will be buffered up to + this many rows in memory, and the buffered collection will then be + yielded out one row at a time or as many rows are requested. Each time + the buffer clears, it will be refreshed to this many rows or as many + rows remain if fewer remain. + + The :meth:`_engine.Result.yield_per` method is generally used in + conjunction with the + :paramref:`_engine.Connection.execution_options.stream_results` + execution option, which will allow the database dialect in use to make + use of a server side cursor, if the DBAPI supports a specific "server + side cursor" mode separate from its default mode of operation. + + .. tip:: + + Consider using the + :paramref:`_engine.Connection.execution_options.yield_per` + execution option, which will simultaneously set + :paramref:`_engine.Connection.execution_options.stream_results` + to ensure the use of server side cursors, as well as automatically + invoke the :meth:`_engine.Result.yield_per` method to establish + a fixed row buffer size at once. + + The :paramref:`_engine.Connection.execution_options.yield_per` + execution option is available for ORM operations, with + :class:`_orm.Session`-oriented use described at + :ref:`orm_queryguide_yield_per`. The Core-only version which works + with :class:`_engine.Connection` is new as of SQLAlchemy 1.4.40. + + .. versionadded:: 1.4 + + :param num: number of rows to fetch each time the buffer is refilled. + If set to a value below 1, fetches all rows for the next buffer. + + .. seealso:: + + :ref:`engine_stream_results` - describes Core behavior for + :meth:`_engine.Result.yield_per` + + :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel` + + """ + self._yield_per = num + return self + + @_generative + def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self: + """Apply unique filtering to the objects returned by this + :class:`_engine.Result`. + + When this filter is applied with no arguments, the rows or objects + returned will filtered such that each row is returned uniquely. The + algorithm used to determine this uniqueness is by default the Python + hashing identity of the whole tuple. In some cases a specialized + per-entity hashing scheme may be used, such as when using the ORM, a + scheme is applied which works against the primary key identity of + returned objects. + + The unique filter is applied **after all other filters**, which means + if the columns returned have been refined using a method such as the + :meth:`_engine.Result.columns` or :meth:`_engine.Result.scalars` + method, the uniquing is applied to **only the column or columns + returned**. This occurs regardless of the order in which these + methods have been called upon the :class:`_engine.Result` object. + + The unique filter also changes the calculus used for methods like + :meth:`_engine.Result.fetchmany` and :meth:`_engine.Result.partitions`. + When using :meth:`_engine.Result.unique`, these methods will continue + to yield the number of rows or objects requested, after uniquing + has been applied. However, this necessarily impacts the buffering + behavior of the underlying cursor or datasource, such that multiple + underlying calls to ``cursor.fetchmany()`` may be necessary in order + to accumulate enough objects in order to provide a unique collection + of the requested size. + + :param strategy: a callable that will be applied to rows or objects + being iterated, which should return an object that represents the + unique value of the row. A Python ``set()`` is used to store + these identities. If not passed, a default uniqueness strategy + is used which may have been assembled by the source of this + :class:`_engine.Result` object. + + """ + self._unique_filter_state = (set(), strategy) + return self + + def columns(self, *col_expressions: _KeyIndexType) -> Self: + r"""Establish the columns that should be returned in each row. + + This method may be used to limit the columns returned as well + as to reorder them. The given list of expressions are normally + a series of integers or string key names. They may also be + appropriate :class:`.ColumnElement` objects which correspond to + a given statement construct. + + .. versionchanged:: 2.0 Due to a bug in 1.4, the + :meth:`_engine.Result.columns` method had an incorrect behavior + where calling upon the method with just one index would cause the + :class:`_engine.Result` object to yield scalar values rather than + :class:`_engine.Row` objects. In version 2.0, this behavior + has been corrected such that calling upon + :meth:`_engine.Result.columns` with a single index will + produce a :class:`_engine.Result` object that continues + to yield :class:`_engine.Row` objects, which include + only a single column. + + E.g.:: + + statement = select(table.c.x, table.c.y, table.c.z) + result = connection.execute(statement) + + for z, y in result.columns('z', 'y'): + # ... + + + Example of using the column objects from the statement itself:: + + for z, y in result.columns( + statement.selected_columns.c.z, + statement.selected_columns.c.y + ): + # ... + + .. versionadded:: 1.4 + + :param \*col_expressions: indicates columns to be returned. Elements + may be integer row indexes, string column names, or appropriate + :class:`.ColumnElement` objects corresponding to a select construct. + + :return: this :class:`_engine.Result` object with the modifications + given. + + """ + return self._column_slices(col_expressions) + + @overload + def scalars(self: Result[Tuple[_T]]) -> ScalarResult[_T]: ... + + @overload + def scalars( + self: Result[Tuple[_T]], index: Literal[0] + ) -> ScalarResult[_T]: ... + + @overload + def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: ... + + def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: + """Return a :class:`_engine.ScalarResult` filtering object which + will return single elements rather than :class:`_row.Row` objects. + + E.g.:: + + >>> result = conn.execute(text("select int_id from table")) + >>> result.scalars().all() + [1, 2, 3] + + When results are fetched from the :class:`_engine.ScalarResult` + filtering object, the single column-row that would be returned by the + :class:`_engine.Result` is instead returned as the column's value. + + .. versionadded:: 1.4 + + :param index: integer or row key indicating the column to be fetched + from each row, defaults to ``0`` indicating the first column. + + :return: a new :class:`_engine.ScalarResult` filtering object referring + to this :class:`_engine.Result` object. + + """ + return ScalarResult(self, index) + + def _getter( + self, key: _KeyIndexType, raiseerr: bool = True + ) -> Optional[Callable[[Row[Any]], Any]]: + """return a callable that will retrieve the given key from a + :class:`_engine.Row`. + + """ + if self._source_supports_scalars: + raise NotImplementedError( + "can't use this function in 'only scalars' mode" + ) + return self._metadata._getter(key, raiseerr) + + def _tuple_getter(self, keys: Sequence[_KeyIndexType]) -> _TupleGetterType: + """return a callable that will retrieve the given keys from a + :class:`_engine.Row`. + + """ + if self._source_supports_scalars: + raise NotImplementedError( + "can't use this function in 'only scalars' mode" + ) + return self._metadata._row_as_tuple_getter(keys) + + def mappings(self) -> MappingResult: + """Apply a mappings filter to returned rows, returning an instance of + :class:`_engine.MappingResult`. + + When this filter is applied, fetching rows will return + :class:`_engine.RowMapping` objects instead of :class:`_engine.Row` + objects. + + .. versionadded:: 1.4 + + :return: a new :class:`_engine.MappingResult` filtering object + referring to this :class:`_engine.Result` object. + + """ + + return MappingResult(self) + + @property + def t(self) -> TupleResult[_TP]: + """Apply a "typed tuple" typing filter to returned rows. + + The :attr:`_engine.Result.t` attribute is a synonym for + calling the :meth:`_engine.Result.tuples` method. + + .. versionadded:: 2.0 + + """ + return self # type: ignore + + def tuples(self) -> TupleResult[_TP]: + """Apply a "typed tuple" typing filter to returned rows. + + This method returns the same :class:`_engine.Result` object + at runtime, + however annotates as returning a :class:`_engine.TupleResult` object + that will indicate to :pep:`484` typing tools that plain typed + ``Tuple`` instances are returned rather than rows. This allows + tuple unpacking and ``__getitem__`` access of :class:`_engine.Row` + objects to by typed, for those cases where the statement invoked + itself included typing information. + + .. versionadded:: 2.0 + + :return: the :class:`_engine.TupleResult` type at typing time. + + .. seealso:: + + :attr:`_engine.Result.t` - shorter synonym + + :attr:`_engine.Row._t` - :class:`_engine.Row` version + + """ + + return self # type: ignore + + def _raw_row_iterator(self) -> Iterator[_RowData]: + """Return a safe iterator that yields raw row data. + + This is used by the :meth:`_engine.Result.merge` method + to merge multiple compatible results together. + + """ + raise NotImplementedError() + + def __iter__(self) -> Iterator[Row[_TP]]: + return self._iter_impl() + + def __next__(self) -> Row[_TP]: + return self._next_impl() + + def partitions( + self, size: Optional[int] = None + ) -> Iterator[Sequence[Row[_TP]]]: + """Iterate through sub-lists of rows of the size given. + + Each list will be of the size given, excluding the last list to + be yielded, which may have a small number of rows. No empty + lists will be yielded. + + The result object is automatically closed when the iterator + is fully consumed. + + Note that the backend driver will usually buffer the entire result + ahead of time unless the + :paramref:`.Connection.execution_options.stream_results` execution + option is used indicating that the driver should not pre-buffer + results, if possible. Not all drivers support this option and + the option is silently ignored for those who do not. + + When using the ORM, the :meth:`_engine.Result.partitions` method + is typically more effective from a memory perspective when it is + combined with use of the + :ref:`yield_per execution option `, + which instructs both the DBAPI driver to use server side cursors, + if available, as well as instructs the ORM loading internals to only + build a certain amount of ORM objects from a result at a time before + yielding them out. + + .. versionadded:: 1.4 + + :param size: indicate the maximum number of rows to be present + in each list yielded. If None, makes use of the value set by + the :meth:`_engine.Result.yield_per`, method, if it were called, + or the :paramref:`_engine.Connection.execution_options.yield_per` + execution option, which is equivalent in this regard. If + yield_per weren't set, it makes use of the + :meth:`_engine.Result.fetchmany` default, which may be backend + specific and not well defined. + + :return: iterator of lists + + .. seealso:: + + :ref:`engine_stream_results` + + :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel` + + """ + + getter = self._manyrow_getter + + while True: + partition = getter(self, size) + if partition: + yield partition + else: + break + + def fetchall(self) -> Sequence[Row[_TP]]: + """A synonym for the :meth:`_engine.Result.all` method.""" + + return self._allrows() + + def fetchone(self) -> Optional[Row[_TP]]: + """Fetch one row. + + When all rows are exhausted, returns None. + + This method is provided for backwards compatibility with + SQLAlchemy 1.x.x. + + To fetch the first row of a result only, use the + :meth:`_engine.Result.first` method. To iterate through all + rows, iterate the :class:`_engine.Result` object directly. + + :return: a :class:`_engine.Row` object if no filters are applied, + or ``None`` if no rows remain. + + """ + row = self._onerow_getter(self) + if row is _NO_ROW: + return None + else: + return row + + def fetchmany(self, size: Optional[int] = None) -> Sequence[Row[_TP]]: + """Fetch many rows. + + When all rows are exhausted, returns an empty sequence. + + This method is provided for backwards compatibility with + SQLAlchemy 1.x.x. + + To fetch rows in groups, use the :meth:`_engine.Result.partitions` + method. + + :return: a sequence of :class:`_engine.Row` objects. + + .. seealso:: + + :meth:`_engine.Result.partitions` + + """ + + return self._manyrow_getter(self, size) + + def all(self) -> Sequence[Row[_TP]]: + """Return all rows in a sequence. + + Closes the result set after invocation. Subsequent invocations + will return an empty sequence. + + .. versionadded:: 1.4 + + :return: a sequence of :class:`_engine.Row` objects. + + .. seealso:: + + :ref:`engine_stream_results` - How to stream a large result set + without loading it completely in python. + + """ + + return self._allrows() + + def first(self) -> Optional[Row[_TP]]: + """Fetch the first row or ``None`` if no row is present. + + Closes the result set and discards remaining rows. + + .. note:: This method returns one **row**, e.g. tuple, by default. + To return exactly one single scalar value, that is, the first + column of the first row, use the + :meth:`_engine.Result.scalar` method, + or combine :meth:`_engine.Result.scalars` and + :meth:`_engine.Result.first`. + + Additionally, in contrast to the behavior of the legacy ORM + :meth:`_orm.Query.first` method, **no limit is applied** to the + SQL query which was invoked to produce this + :class:`_engine.Result`; + for a DBAPI driver that buffers results in memory before yielding + rows, all rows will be sent to the Python process and all but + the first row will be discarded. + + .. seealso:: + + :ref:`migration_20_unify_select` + + :return: a :class:`_engine.Row` object, or None + if no rows remain. + + .. seealso:: + + :meth:`_engine.Result.scalar` + + :meth:`_engine.Result.one` + + """ + + return self._only_one_row( + raise_for_second_row=False, raise_for_none=False, scalar=False + ) + + def one_or_none(self) -> Optional[Row[_TP]]: + """Return at most one result or raise an exception. + + Returns ``None`` if the result has no rows. + Raises :class:`.MultipleResultsFound` + if multiple rows are returned. + + .. versionadded:: 1.4 + + :return: The first :class:`_engine.Row` or ``None`` if no row + is available. + + :raises: :class:`.MultipleResultsFound` + + .. seealso:: + + :meth:`_engine.Result.first` + + :meth:`_engine.Result.one` + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=False, scalar=False + ) + + @overload + def scalar_one(self: Result[Tuple[_T]]) -> _T: ... + + @overload + def scalar_one(self) -> Any: ... + + def scalar_one(self) -> Any: + """Return exactly one scalar result or raise an exception. + + This is equivalent to calling :meth:`_engine.Result.scalars` and + then :meth:`_engine.Result.one`. + + .. seealso:: + + :meth:`_engine.Result.one` + + :meth:`_engine.Result.scalars` + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=True, scalar=True + ) + + @overload + def scalar_one_or_none(self: Result[Tuple[_T]]) -> Optional[_T]: ... + + @overload + def scalar_one_or_none(self) -> Optional[Any]: ... + + def scalar_one_or_none(self) -> Optional[Any]: + """Return exactly one scalar result or ``None``. + + This is equivalent to calling :meth:`_engine.Result.scalars` and + then :meth:`_engine.Result.one_or_none`. + + .. seealso:: + + :meth:`_engine.Result.one_or_none` + + :meth:`_engine.Result.scalars` + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=False, scalar=True + ) + + def one(self) -> Row[_TP]: + """Return exactly one row or raise an exception. + + Raises :class:`.NoResultFound` if the result returns no + rows, or :class:`.MultipleResultsFound` if multiple rows + would be returned. + + .. note:: This method returns one **row**, e.g. tuple, by default. + To return exactly one single scalar value, that is, the first + column of the first row, use the + :meth:`_engine.Result.scalar_one` method, or combine + :meth:`_engine.Result.scalars` and + :meth:`_engine.Result.one`. + + .. versionadded:: 1.4 + + :return: The first :class:`_engine.Row`. + + :raises: :class:`.MultipleResultsFound`, :class:`.NoResultFound` + + .. seealso:: + + :meth:`_engine.Result.first` + + :meth:`_engine.Result.one_or_none` + + :meth:`_engine.Result.scalar_one` + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=True, scalar=False + ) + + @overload + def scalar(self: Result[Tuple[_T]]) -> Optional[_T]: ... + + @overload + def scalar(self) -> Any: ... + + def scalar(self) -> Any: + """Fetch the first column of the first row, and close the result set. + + Returns ``None`` if there are no rows to fetch. + + No validation is performed to test if additional rows remain. + + After calling this method, the object is fully closed, + e.g. the :meth:`_engine.CursorResult.close` + method will have been called. + + :return: a Python scalar value, or ``None`` if no rows remain. + + """ + return self._only_one_row( + raise_for_second_row=False, raise_for_none=False, scalar=True + ) + + def freeze(self) -> FrozenResult[_TP]: + """Return a callable object that will produce copies of this + :class:`_engine.Result` when invoked. + + The callable object returned is an instance of + :class:`_engine.FrozenResult`. + + This is used for result set caching. The method must be called + on the result when it has been unconsumed, and calling the method + will consume the result fully. When the :class:`_engine.FrozenResult` + is retrieved from a cache, it can be called any number of times where + it will produce a new :class:`_engine.Result` object each time + against its stored set of rows. + + .. seealso:: + + :ref:`do_orm_execute_re_executing` - example usage within the + ORM to implement a result-set cache. + + """ + + return FrozenResult(self) + + def merge(self, *others: Result[Any]) -> MergedResult[_TP]: + """Merge this :class:`_engine.Result` with other compatible result + objects. + + The object returned is an instance of :class:`_engine.MergedResult`, + which will be composed of iterators from the given result + objects. + + The new result will use the metadata from this result object. + The subsequent result objects must be against an identical + set of result / cursor metadata, otherwise the behavior is + undefined. + + """ + return MergedResult(self._metadata, (self,) + others) + + +class FilterResult(ResultInternal[_R]): + """A wrapper for a :class:`_engine.Result` that returns objects other than + :class:`_engine.Row` objects, such as dictionaries or scalar objects. + + :class:`_engine.FilterResult` is the common base for additional result + APIs including :class:`_engine.MappingResult`, + :class:`_engine.ScalarResult` and :class:`_engine.AsyncResult`. + + """ + + __slots__ = ( + "_real_result", + "_post_creational_filter", + "_metadata", + "_unique_filter_state", + "__dict__", + ) + + _post_creational_filter: Optional[Callable[[Any], Any]] + + _real_result: Result[Any] + + def __enter__(self) -> Self: + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self._real_result.__exit__(type_, value, traceback) + + @_generative + def yield_per(self, num: int) -> Self: + """Configure the row-fetching strategy to fetch ``num`` rows at a time. + + The :meth:`_engine.FilterResult.yield_per` method is a pass through + to the :meth:`_engine.Result.yield_per` method. See that method's + documentation for usage notes. + + .. versionadded:: 1.4.40 - added :meth:`_engine.FilterResult.yield_per` + so that the method is available on all result set implementations + + .. seealso:: + + :ref:`engine_stream_results` - describes Core behavior for + :meth:`_engine.Result.yield_per` + + :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel` + + """ + self._real_result = self._real_result.yield_per(num) + return self + + def _soft_close(self, hard: bool = False) -> None: + self._real_result._soft_close(hard=hard) + + @property + def _soft_closed(self) -> bool: + return self._real_result._soft_closed + + @property + def closed(self) -> bool: + """Return ``True`` if the underlying :class:`_engine.Result` reports + closed + + .. versionadded:: 1.4.43 + + """ + return self._real_result.closed + + def close(self) -> None: + """Close this :class:`_engine.FilterResult`. + + .. versionadded:: 1.4.43 + + """ + self._real_result.close() + + @property + def _attributes(self) -> Dict[Any, Any]: + return self._real_result._attributes + + def _fetchiter_impl(self) -> Iterator[_InterimRowType[Row[Any]]]: + return self._real_result._fetchiter_impl() + + def _fetchone_impl( + self, hard_close: bool = False + ) -> Optional[_InterimRowType[Row[Any]]]: + return self._real_result._fetchone_impl(hard_close=hard_close) + + def _fetchall_impl(self) -> List[_InterimRowType[Row[Any]]]: + return self._real_result._fetchall_impl() + + def _fetchmany_impl( + self, size: Optional[int] = None + ) -> List[_InterimRowType[Row[Any]]]: + return self._real_result._fetchmany_impl(size=size) + + +class ScalarResult(FilterResult[_R]): + """A wrapper for a :class:`_engine.Result` that returns scalar values + rather than :class:`_row.Row` values. + + The :class:`_engine.ScalarResult` object is acquired by calling the + :meth:`_engine.Result.scalars` method. + + A special limitation of :class:`_engine.ScalarResult` is that it has + no ``fetchone()`` method; since the semantics of ``fetchone()`` are that + the ``None`` value indicates no more results, this is not compatible + with :class:`_engine.ScalarResult` since there is no way to distinguish + between ``None`` as a row value versus ``None`` as an indicator. Use + ``next(result)`` to receive values individually. + + """ + + __slots__ = () + + _generate_rows = False + + _post_creational_filter: Optional[Callable[[Any], Any]] + + def __init__(self, real_result: Result[Any], index: _KeyIndexType): + self._real_result = real_result + + if real_result._source_supports_scalars: + self._metadata = real_result._metadata + self._post_creational_filter = None + else: + self._metadata = real_result._metadata._reduce([index]) + self._post_creational_filter = operator.itemgetter(0) + + self._unique_filter_state = real_result._unique_filter_state + + def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self: + """Apply unique filtering to the objects returned by this + :class:`_engine.ScalarResult`. + + See :meth:`_engine.Result.unique` for usage details. + + """ + self._unique_filter_state = (set(), strategy) + return self + + def partitions(self, size: Optional[int] = None) -> Iterator[Sequence[_R]]: + """Iterate through sub-lists of elements of the size given. + + Equivalent to :meth:`_engine.Result.partitions` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + + getter = self._manyrow_getter + + while True: + partition = getter(self, size) + if partition: + yield partition + else: + break + + def fetchall(self) -> Sequence[_R]: + """A synonym for the :meth:`_engine.ScalarResult.all` method.""" + + return self._allrows() + + def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]: + """Fetch many objects. + + Equivalent to :meth:`_engine.Result.fetchmany` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return self._manyrow_getter(self, size) + + def all(self) -> Sequence[_R]: + """Return all scalar values in a sequence. + + Equivalent to :meth:`_engine.Result.all` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return self._allrows() + + def __iter__(self) -> Iterator[_R]: + return self._iter_impl() + + def __next__(self) -> _R: + return self._next_impl() + + def first(self) -> Optional[_R]: + """Fetch the first object or ``None`` if no object is present. + + Equivalent to :meth:`_engine.Result.first` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + + """ + return self._only_one_row( + raise_for_second_row=False, raise_for_none=False, scalar=False + ) + + def one_or_none(self) -> Optional[_R]: + """Return at most one object or raise an exception. + + Equivalent to :meth:`_engine.Result.one_or_none` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=False, scalar=False + ) + + def one(self) -> _R: + """Return exactly one object or raise an exception. + + Equivalent to :meth:`_engine.Result.one` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=True, scalar=False + ) + + +class TupleResult(FilterResult[_R], util.TypingOnly): + """A :class:`_engine.Result` that's typed as returning plain + Python tuples instead of rows. + + Since :class:`_engine.Row` acts like a tuple in every way already, + this class is a typing only class, regular :class:`_engine.Result` is + still used at runtime. + + """ + + __slots__ = () + + if TYPE_CHECKING: + + def partitions( + self, size: Optional[int] = None + ) -> Iterator[Sequence[_R]]: + """Iterate through sub-lists of elements of the size given. + + Equivalent to :meth:`_engine.Result.partitions` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + def fetchone(self) -> Optional[_R]: + """Fetch one tuple. + + Equivalent to :meth:`_engine.Result.fetchone` except that + tuple values, rather than :class:`_engine.Row` + objects, are returned. + + """ + ... + + def fetchall(self) -> Sequence[_R]: + """A synonym for the :meth:`_engine.ScalarResult.all` method.""" + ... + + def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]: + """Fetch many objects. + + Equivalent to :meth:`_engine.Result.fetchmany` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + def all(self) -> Sequence[_R]: # noqa: A001 + """Return all scalar values in a sequence. + + Equivalent to :meth:`_engine.Result.all` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + def __iter__(self) -> Iterator[_R]: ... + + def __next__(self) -> _R: ... + + def first(self) -> Optional[_R]: + """Fetch the first object or ``None`` if no object is present. + + Equivalent to :meth:`_engine.Result.first` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + + """ + ... + + def one_or_none(self) -> Optional[_R]: + """Return at most one object or raise an exception. + + Equivalent to :meth:`_engine.Result.one_or_none` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + def one(self) -> _R: + """Return exactly one object or raise an exception. + + Equivalent to :meth:`_engine.Result.one` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + @overload + def scalar_one(self: TupleResult[Tuple[_T]]) -> _T: ... + + @overload + def scalar_one(self) -> Any: ... + + def scalar_one(self) -> Any: + """Return exactly one scalar result or raise an exception. + + This is equivalent to calling :meth:`_engine.Result.scalars` + and then :meth:`_engine.Result.one`. + + .. seealso:: + + :meth:`_engine.Result.one` + + :meth:`_engine.Result.scalars` + + """ + ... + + @overload + def scalar_one_or_none( + self: TupleResult[Tuple[_T]], + ) -> Optional[_T]: ... + + @overload + def scalar_one_or_none(self) -> Optional[Any]: ... + + def scalar_one_or_none(self) -> Optional[Any]: + """Return exactly one or no scalar result. + + This is equivalent to calling :meth:`_engine.Result.scalars` + and then :meth:`_engine.Result.one_or_none`. + + .. seealso:: + + :meth:`_engine.Result.one_or_none` + + :meth:`_engine.Result.scalars` + + """ + ... + + @overload + def scalar(self: TupleResult[Tuple[_T]]) -> Optional[_T]: ... + + @overload + def scalar(self) -> Any: ... + + def scalar(self) -> Any: + """Fetch the first column of the first row, and close the result + set. + + Returns ``None`` if there are no rows to fetch. + + No validation is performed to test if additional rows remain. + + After calling this method, the object is fully closed, + e.g. the :meth:`_engine.CursorResult.close` + method will have been called. + + :return: a Python scalar value , or ``None`` if no rows remain. + + """ + ... + + +class MappingResult(_WithKeys, FilterResult[RowMapping]): + """A wrapper for a :class:`_engine.Result` that returns dictionary values + rather than :class:`_engine.Row` values. + + The :class:`_engine.MappingResult` object is acquired by calling the + :meth:`_engine.Result.mappings` method. + + """ + + __slots__ = () + + _generate_rows = True + + _post_creational_filter = operator.attrgetter("_mapping") + + def __init__(self, result: Result[Any]): + self._real_result = result + self._unique_filter_state = result._unique_filter_state + self._metadata = result._metadata + if result._source_supports_scalars: + self._metadata = self._metadata._reduce([0]) + + def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self: + """Apply unique filtering to the objects returned by this + :class:`_engine.MappingResult`. + + See :meth:`_engine.Result.unique` for usage details. + + """ + self._unique_filter_state = (set(), strategy) + return self + + def columns(self, *col_expressions: _KeyIndexType) -> Self: + r"""Establish the columns that should be returned in each row.""" + return self._column_slices(col_expressions) + + def partitions( + self, size: Optional[int] = None + ) -> Iterator[Sequence[RowMapping]]: + """Iterate through sub-lists of elements of the size given. + + Equivalent to :meth:`_engine.Result.partitions` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + getter = self._manyrow_getter + + while True: + partition = getter(self, size) + if partition: + yield partition + else: + break + + def fetchall(self) -> Sequence[RowMapping]: + """A synonym for the :meth:`_engine.MappingResult.all` method.""" + + return self._allrows() + + def fetchone(self) -> Optional[RowMapping]: + """Fetch one object. + + Equivalent to :meth:`_engine.Result.fetchone` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + row = self._onerow_getter(self) + if row is _NO_ROW: + return None + else: + return row + + def fetchmany(self, size: Optional[int] = None) -> Sequence[RowMapping]: + """Fetch many objects. + + Equivalent to :meth:`_engine.Result.fetchmany` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + return self._manyrow_getter(self, size) + + def all(self) -> Sequence[RowMapping]: + """Return all scalar values in a sequence. + + Equivalent to :meth:`_engine.Result.all` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + return self._allrows() + + def __iter__(self) -> Iterator[RowMapping]: + return self._iter_impl() + + def __next__(self) -> RowMapping: + return self._next_impl() + + def first(self) -> Optional[RowMapping]: + """Fetch the first object or ``None`` if no object is present. + + Equivalent to :meth:`_engine.Result.first` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + + """ + return self._only_one_row( + raise_for_second_row=False, raise_for_none=False, scalar=False + ) + + def one_or_none(self) -> Optional[RowMapping]: + """Return at most one object or raise an exception. + + Equivalent to :meth:`_engine.Result.one_or_none` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=False, scalar=False + ) + + def one(self) -> RowMapping: + """Return exactly one object or raise an exception. + + Equivalent to :meth:`_engine.Result.one` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + return self._only_one_row( + raise_for_second_row=True, raise_for_none=True, scalar=False + ) + + +class FrozenResult(Generic[_TP]): + """Represents a :class:`_engine.Result` object in a "frozen" state suitable + for caching. + + The :class:`_engine.FrozenResult` object is returned from the + :meth:`_engine.Result.freeze` method of any :class:`_engine.Result` + object. + + A new iterable :class:`_engine.Result` object is generated from a fixed + set of data each time the :class:`_engine.FrozenResult` is invoked as + a callable:: + + + result = connection.execute(query) + + frozen = result.freeze() + + unfrozen_result_one = frozen() + + for row in unfrozen_result_one: + print(row) + + unfrozen_result_two = frozen() + rows = unfrozen_result_two.all() + + # ... etc + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`do_orm_execute_re_executing` - example usage within the + ORM to implement a result-set cache. + + :func:`_orm.loading.merge_frozen_result` - ORM function to merge + a frozen result back into a :class:`_orm.Session`. + + """ + + data: Sequence[Any] + + def __init__(self, result: Result[_TP]): + self.metadata = result._metadata._for_freeze() + self._source_supports_scalars = result._source_supports_scalars + self._attributes = result._attributes + + if self._source_supports_scalars: + self.data = list(result._raw_row_iterator()) + else: + self.data = result.fetchall() + + def rewrite_rows(self) -> Sequence[Sequence[Any]]: + if self._source_supports_scalars: + return [[elem] for elem in self.data] + else: + return [list(row) for row in self.data] + + def with_new_rows( + self, tuple_data: Sequence[Row[_TP]] + ) -> FrozenResult[_TP]: + fr = FrozenResult.__new__(FrozenResult) + fr.metadata = self.metadata + fr._attributes = self._attributes + fr._source_supports_scalars = self._source_supports_scalars + + if self._source_supports_scalars: + fr.data = [d[0] for d in tuple_data] + else: + fr.data = tuple_data + return fr + + def __call__(self) -> Result[_TP]: + result: IteratorResult[_TP] = IteratorResult( + self.metadata, iter(self.data) + ) + result._attributes = self._attributes + result._source_supports_scalars = self._source_supports_scalars + return result + + +class IteratorResult(Result[_TP]): + """A :class:`_engine.Result` that gets data from a Python iterator of + :class:`_engine.Row` objects or similar row-like data. + + .. versionadded:: 1.4 + + """ + + _hard_closed = False + _soft_closed = False + + def __init__( + self, + cursor_metadata: ResultMetaData, + iterator: Iterator[_InterimSupportsScalarsRowType], + raw: Optional[Result[Any]] = None, + _source_supports_scalars: bool = False, + ): + self._metadata = cursor_metadata + self.iterator = iterator + self.raw = raw + self._source_supports_scalars = _source_supports_scalars + + @property + def closed(self) -> bool: + """Return ``True`` if this :class:`_engine.IteratorResult` has + been closed + + .. versionadded:: 1.4.43 + + """ + return self._hard_closed + + def _soft_close(self, hard: bool = False, **kw: Any) -> None: + if hard: + self._hard_closed = True + if self.raw is not None: + self.raw._soft_close(hard=hard, **kw) + self.iterator = iter([]) + self._reset_memoizations() + self._soft_closed = True + + def _raise_hard_closed(self) -> NoReturn: + raise exc.ResourceClosedError("This result object is closed.") + + def _raw_row_iterator(self) -> Iterator[_RowData]: + return self.iterator + + def _fetchiter_impl(self) -> Iterator[_InterimSupportsScalarsRowType]: + if self._hard_closed: + self._raise_hard_closed() + return self.iterator + + def _fetchone_impl( + self, hard_close: bool = False + ) -> Optional[_InterimRowType[Row[Any]]]: + if self._hard_closed: + self._raise_hard_closed() + + row = next(self.iterator, _NO_ROW) + if row is _NO_ROW: + self._soft_close(hard=hard_close) + return None + else: + return row + + def _fetchall_impl(self) -> List[_InterimRowType[Row[Any]]]: + if self._hard_closed: + self._raise_hard_closed() + try: + return list(self.iterator) + finally: + self._soft_close() + + def _fetchmany_impl( + self, size: Optional[int] = None + ) -> List[_InterimRowType[Row[Any]]]: + if self._hard_closed: + self._raise_hard_closed() + + return list(itertools.islice(self.iterator, 0, size)) + + +def null_result() -> IteratorResult[Any]: + return IteratorResult(SimpleResultMetaData([]), iter([])) + + +class ChunkedIteratorResult(IteratorResult[_TP]): + """An :class:`_engine.IteratorResult` that works from an + iterator-producing callable. + + The given ``chunks`` argument is a function that is given a number of rows + to return in each chunk, or ``None`` for all rows. The function should + then return an un-consumed iterator of lists, each list of the requested + size. + + The function can be called at any time again, in which case it should + continue from the same result set but adjust the chunk size as given. + + .. versionadded:: 1.4 + + """ + + def __init__( + self, + cursor_metadata: ResultMetaData, + chunks: Callable[ + [Optional[int]], Iterator[Sequence[_InterimRowType[_R]]] + ], + source_supports_scalars: bool = False, + raw: Optional[Result[Any]] = None, + dynamic_yield_per: bool = False, + ): + self._metadata = cursor_metadata + self.chunks = chunks + self._source_supports_scalars = source_supports_scalars + self.raw = raw + self.iterator = itertools.chain.from_iterable(self.chunks(None)) + self.dynamic_yield_per = dynamic_yield_per + + @_generative + def yield_per(self, num: int) -> Self: + # TODO: this throws away the iterator which may be holding + # onto a chunk. the yield_per cannot be changed once any + # rows have been fetched. either find a way to enforce this, + # or we can't use itertools.chain and will instead have to + # keep track. + + self._yield_per = num + self.iterator = itertools.chain.from_iterable(self.chunks(num)) + return self + + def _soft_close(self, hard: bool = False, **kw: Any) -> None: + super()._soft_close(hard=hard, **kw) + self.chunks = lambda size: [] # type: ignore + + def _fetchmany_impl( + self, size: Optional[int] = None + ) -> List[_InterimRowType[Row[Any]]]: + if self.dynamic_yield_per: + self.iterator = itertools.chain.from_iterable(self.chunks(size)) + return super()._fetchmany_impl(size=size) + + +class MergedResult(IteratorResult[_TP]): + """A :class:`_engine.Result` that is merged from any number of + :class:`_engine.Result` objects. + + Returned by the :meth:`_engine.Result.merge` method. + + .. versionadded:: 1.4 + + """ + + closed = False + rowcount: Optional[int] + + def __init__( + self, cursor_metadata: ResultMetaData, results: Sequence[Result[_TP]] + ): + self._results = results + super().__init__( + cursor_metadata, + itertools.chain.from_iterable( + r._raw_row_iterator() for r in results + ), + ) + + self._unique_filter_state = results[0]._unique_filter_state + self._yield_per = results[0]._yield_per + + # going to try something w/ this in next rev + self._source_supports_scalars = results[0]._source_supports_scalars + + self._attributes = self._attributes.merge_with( + *[r._attributes for r in results] + ) + + def _soft_close(self, hard: bool = False, **kw: Any) -> None: + for r in self._results: + r._soft_close(hard=hard, **kw) + if hard: + self.closed = True diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py new file mode 100644 index 00000000..bcaffee4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py @@ -0,0 +1,401 @@ +# engine/row.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Define row constructs including :class:`.Row`.""" + +from __future__ import annotations + +from abc import ABC +import collections.abc as collections_abc +import operator +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import Iterator +from typing import List +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from ..sql import util as sql_util +from ..util import deprecated +from ..util._has_cy import HAS_CYEXTENSION + +if TYPE_CHECKING or not HAS_CYEXTENSION: + from ._py_row import BaseRow as BaseRow +else: + from sqlalchemy.cyextension.resultproxy import BaseRow as BaseRow + +if TYPE_CHECKING: + from .result import _KeyType + from .result import _ProcessorsType + from .result import RMKeyView + +_T = TypeVar("_T", bound=Any) +_TP = TypeVar("_TP", bound=Tuple[Any, ...]) + + +class Row(BaseRow, Sequence[Any], Generic[_TP]): + """Represent a single result row. + + The :class:`.Row` object represents a row of a database result. It is + typically associated in the 1.x series of SQLAlchemy with the + :class:`_engine.CursorResult` object, however is also used by the ORM for + tuple-like results as of SQLAlchemy 1.4. + + The :class:`.Row` object seeks to act as much like a Python named + tuple as possible. For mapping (i.e. dictionary) behavior on a row, + such as testing for containment of keys, refer to the :attr:`.Row._mapping` + attribute. + + .. seealso:: + + :ref:`tutorial_selecting_data` - includes examples of selecting + rows from SELECT statements. + + .. versionchanged:: 1.4 + + Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a + "proxy" object in that it contains the final form of data within it, + and now acts mostly like a named tuple. Mapping-like functionality is + moved to the :attr:`.Row._mapping` attribute. See + :ref:`change_4710_core` for background on this change. + + """ + + __slots__ = () + + def __setattr__(self, name: str, value: Any) -> NoReturn: + raise AttributeError("can't set attribute") + + def __delattr__(self, name: str) -> NoReturn: + raise AttributeError("can't delete attribute") + + def _tuple(self) -> _TP: + """Return a 'tuple' form of this :class:`.Row`. + + At runtime, this method returns "self"; the :class:`.Row` object is + already a named tuple. However, at the typing level, if this + :class:`.Row` is typed, the "tuple" return type will be a :pep:`484` + ``Tuple`` datatype that contains typing information about individual + elements, supporting typed unpacking and attribute access. + + .. versionadded:: 2.0.19 - The :meth:`.Row._tuple` method supersedes + the previous :meth:`.Row.tuple` method, which is now underscored + to avoid name conflicts with column names in the same way as other + named-tuple methods on :class:`.Row`. + + .. seealso:: + + :attr:`.Row._t` - shorthand attribute notation + + :meth:`.Result.tuples` + + + """ + return self # type: ignore + + @deprecated( + "2.0.19", + "The :meth:`.Row.tuple` method is deprecated in favor of " + ":meth:`.Row._tuple`; all :class:`.Row` " + "methods and library-level attributes are intended to be underscored " + "to avoid name conflicts. Please use :meth:`Row._tuple`.", + ) + def tuple(self) -> _TP: + """Return a 'tuple' form of this :class:`.Row`. + + .. versionadded:: 2.0 + + """ + return self._tuple() + + @property + def _t(self) -> _TP: + """A synonym for :meth:`.Row._tuple`. + + .. versionadded:: 2.0.19 - The :attr:`.Row._t` attribute supersedes + the previous :attr:`.Row.t` attribute, which is now underscored + to avoid name conflicts with column names in the same way as other + named-tuple methods on :class:`.Row`. + + .. seealso:: + + :attr:`.Result.t` + """ + return self # type: ignore + + @property + @deprecated( + "2.0.19", + "The :attr:`.Row.t` attribute is deprecated in favor of " + ":attr:`.Row._t`; all :class:`.Row` " + "methods and library-level attributes are intended to be underscored " + "to avoid name conflicts. Please use :attr:`Row._t`.", + ) + def t(self) -> _TP: + """A synonym for :meth:`.Row._tuple`. + + .. versionadded:: 2.0 + + """ + return self._t + + @property + def _mapping(self) -> RowMapping: + """Return a :class:`.RowMapping` for this :class:`.Row`. + + This object provides a consistent Python mapping (i.e. dictionary) + interface for the data contained within the row. The :class:`.Row` + by itself behaves like a named tuple. + + .. seealso:: + + :attr:`.Row._fields` + + .. versionadded:: 1.4 + + """ + return RowMapping(self._parent, None, self._key_to_index, self._data) + + def _filter_on_values( + self, processor: Optional[_ProcessorsType] + ) -> Row[Any]: + return Row(self._parent, processor, self._key_to_index, self._data) + + if not TYPE_CHECKING: + + def _special_name_accessor(name: str) -> Any: + """Handle ambiguous names such as "count" and "index" """ + + @property + def go(self: Row) -> Any: + if self._parent._has_key(name): + return self.__getattr__(name) + else: + + def meth(*arg: Any, **kw: Any) -> Any: + return getattr(collections_abc.Sequence, name)( + self, *arg, **kw + ) + + return meth + + return go + + count = _special_name_accessor("count") + index = _special_name_accessor("index") + + def __contains__(self, key: Any) -> bool: + return key in self._data + + def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool: + return ( + op(self._to_tuple_instance(), other._to_tuple_instance()) + if isinstance(other, Row) + else op(self._to_tuple_instance(), other) + ) + + __hash__ = BaseRow.__hash__ + + if TYPE_CHECKING: + + @overload + def __getitem__(self, index: int) -> Any: ... + + @overload + def __getitem__(self, index: slice) -> Sequence[Any]: ... + + def __getitem__(self, index: Union[int, slice]) -> Any: ... + + def __lt__(self, other: Any) -> bool: + return self._op(other, operator.lt) + + def __le__(self, other: Any) -> bool: + return self._op(other, operator.le) + + def __ge__(self, other: Any) -> bool: + return self._op(other, operator.ge) + + def __gt__(self, other: Any) -> bool: + return self._op(other, operator.gt) + + def __eq__(self, other: Any) -> bool: + return self._op(other, operator.eq) + + def __ne__(self, other: Any) -> bool: + return self._op(other, operator.ne) + + def __repr__(self) -> str: + return repr(sql_util._repr_row(self)) + + @property + def _fields(self) -> Tuple[str, ...]: + """Return a tuple of string keys as represented by this + :class:`.Row`. + + The keys can represent the labels of the columns returned by a core + statement or the names of the orm classes returned by an orm + execution. + + This attribute is analogous to the Python named tuple ``._fields`` + attribute. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`.Row._mapping` + + """ + return tuple([k for k in self._parent.keys if k is not None]) + + def _asdict(self) -> Dict[str, Any]: + """Return a new dict which maps field names to their corresponding + values. + + This method is analogous to the Python named tuple ``._asdict()`` + method, and works by applying the ``dict()`` constructor to the + :attr:`.Row._mapping` attribute. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`.Row._mapping` + + """ + return dict(self._mapping) + + +BaseRowProxy = BaseRow +RowProxy = Row + + +class ROMappingView(ABC): + __slots__ = () + + _items: Sequence[Any] + _mapping: Mapping["_KeyType", Any] + + def __init__( + self, mapping: Mapping["_KeyType", Any], items: Sequence[Any] + ): + self._mapping = mapping # type: ignore[misc] + self._items = items # type: ignore[misc] + + def __len__(self) -> int: + return len(self._items) + + def __repr__(self) -> str: + return "{0.__class__.__name__}({0._mapping!r})".format(self) + + def __iter__(self) -> Iterator[Any]: + return iter(self._items) + + def __contains__(self, item: Any) -> bool: + return item in self._items + + def __eq__(self, other: Any) -> bool: + return list(other) == list(self) + + def __ne__(self, other: Any) -> bool: + return list(other) != list(self) + + +class ROMappingKeysValuesView( + ROMappingView, typing.KeysView["_KeyType"], typing.ValuesView[Any] +): + __slots__ = ("_items",) # mapping slot is provided by KeysView + + +class ROMappingItemsView(ROMappingView, typing.ItemsView["_KeyType", Any]): + __slots__ = ("_items",) # mapping slot is provided by ItemsView + + +class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]): + """A ``Mapping`` that maps column names and objects to :class:`.Row` + values. + + The :class:`.RowMapping` is available from a :class:`.Row` via the + :attr:`.Row._mapping` attribute, as well as from the iterable interface + provided by the :class:`.MappingResult` object returned by the + :meth:`_engine.Result.mappings` method. + + :class:`.RowMapping` supplies Python mapping (i.e. dictionary) access to + the contents of the row. This includes support for testing of + containment of specific keys (string column names or objects), as well + as iteration of keys, values, and items:: + + for row in result: + if 'a' in row._mapping: + print("Column 'a': %s" % row._mapping['a']) + + print("Column b: %s" % row._mapping[table.c.b]) + + + .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the + mapping-like access previously provided by a database result row, + which now seeks to behave mostly like a named tuple. + + """ + + __slots__ = () + + if TYPE_CHECKING: + + def __getitem__(self, key: _KeyType) -> Any: ... + + else: + __getitem__ = BaseRow._get_by_key_impl_mapping + + def _values_impl(self) -> List[Any]: + return list(self._data) + + def __iter__(self) -> Iterator[str]: + return (k for k in self._parent.keys if k is not None) + + def __len__(self) -> int: + return len(self._data) + + def __contains__(self, key: object) -> bool: + return self._parent._has_key(key) + + def __repr__(self) -> str: + return repr(dict(self)) + + def items(self) -> ROMappingItemsView: + """Return a view of key/value tuples for the elements in the + underlying :class:`.Row`. + + """ + return ROMappingItemsView( + self, [(key, self[key]) for key in self.keys()] + ) + + def keys(self) -> RMKeyView: + """Return a view of 'keys' for string column names represented + by the underlying :class:`.Row`. + + """ + + return self._parent.keys + + def values(self) -> ROMappingKeysValuesView: + """Return a view of values for the values represented in the + underlying :class:`.Row`. + + """ + return ROMappingKeysValuesView(self, self._values_impl()) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py new file mode 100644 index 00000000..30c331e8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py @@ -0,0 +1,19 @@ +# engine/strategies.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Deprecated mock engine strategy used by Alembic. + + +""" + +from __future__ import annotations + +from .mock import MockConnection # noqa + + +class MockEngineStrategy: + MockConnection = MockConnection diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py new file mode 100644 index 00000000..1eeb73a2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py @@ -0,0 +1,910 @@ +# engine/url.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates +information about a database connection specification. + +The URL object is created automatically when +:func:`~sqlalchemy.engine.create_engine` is called with a string +argument; alternatively, the URL is a public-facing construct which can +be used directly and is also accepted directly by ``create_engine()``. +""" + +from __future__ import annotations + +import collections.abc as collections_abc +import re +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import Mapping +from typing import NamedTuple +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import Union +from urllib.parse import parse_qsl +from urllib.parse import quote +from urllib.parse import quote_plus +from urllib.parse import unquote + +from .interfaces import Dialect +from .. import exc +from .. import util +from ..dialects import plugins +from ..dialects import registry + + +class URL(NamedTuple): + """ + Represent the components of a URL used to connect to a database. + + URLs are typically constructed from a fully formatted URL string, where the + :func:`.make_url` function is used internally by the + :func:`_sa.create_engine` function in order to parse the URL string into + its individual components, which are then used to construct a new + :class:`.URL` object. When parsing from a formatted URL string, the parsing + format generally follows + `RFC-1738 `_, with some exceptions. + + A :class:`_engine.URL` object may also be produced directly, either by + using the :func:`.make_url` function with a fully formed URL string, or + by using the :meth:`_engine.URL.create` constructor in order + to construct a :class:`_engine.URL` programmatically given individual + fields. The resulting :class:`.URL` object may be passed directly to + :func:`_sa.create_engine` in place of a string argument, which will bypass + the usage of :func:`.make_url` within the engine's creation process. + + .. versionchanged:: 1.4 + + The :class:`_engine.URL` object is now an immutable object. To + create a URL, use the :func:`_engine.make_url` or + :meth:`_engine.URL.create` function / method. To modify + a :class:`_engine.URL`, use methods like + :meth:`_engine.URL.set` and + :meth:`_engine.URL.update_query_dict` to return a new + :class:`_engine.URL` object with modifications. See notes for this + change at :ref:`change_5526`. + + .. seealso:: + + :ref:`database_urls` + + :class:`_engine.URL` contains the following attributes: + + * :attr:`_engine.URL.drivername`: database backend and driver name, such as + ``postgresql+psycopg2`` + * :attr:`_engine.URL.username`: username string + * :attr:`_engine.URL.password`: password string + * :attr:`_engine.URL.host`: string hostname + * :attr:`_engine.URL.port`: integer port number + * :attr:`_engine.URL.database`: string database name + * :attr:`_engine.URL.query`: an immutable mapping representing the query + string. contains strings for keys and either strings or tuples of + strings for values. + + + """ + + drivername: str + """database backend and driver name, such as + ``postgresql+psycopg2`` + + """ + + username: Optional[str] + "username string" + + password: Optional[str] + """password, which is normally a string but may also be any + object that has a ``__str__()`` method.""" + + host: Optional[str] + """hostname or IP number. May also be a data source name for some + drivers.""" + + port: Optional[int] + """integer port number""" + + database: Optional[str] + """database name""" + + query: util.immutabledict[str, Union[Tuple[str, ...], str]] + """an immutable mapping representing the query string. contains strings + for keys and either strings or tuples of strings for values, e.g.:: + + >>> from sqlalchemy.engine import make_url + >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt") + >>> url.query + immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'}) + + To create a mutable copy of this mapping, use the ``dict`` constructor:: + + mutable_query_opts = dict(url.query) + + .. seealso:: + + :attr:`_engine.URL.normalized_query` - normalizes all values into sequences + for consistent processing + + Methods for altering the contents of :attr:`_engine.URL.query`: + + :meth:`_engine.URL.update_query_dict` + + :meth:`_engine.URL.update_query_string` + + :meth:`_engine.URL.update_query_pairs` + + :meth:`_engine.URL.difference_update_query` + + """ # noqa: E501 + + @classmethod + def create( + cls, + drivername: str, + username: Optional[str] = None, + password: Optional[str] = None, + host: Optional[str] = None, + port: Optional[int] = None, + database: Optional[str] = None, + query: Mapping[str, Union[Sequence[str], str]] = util.EMPTY_DICT, + ) -> URL: + """Create a new :class:`_engine.URL` object. + + .. seealso:: + + :ref:`database_urls` + + :param drivername: the name of the database backend. This name will + correspond to a module in sqlalchemy/databases or a third party + plug-in. + :param username: The user name. + :param password: database password. Is typically a string, but may + also be an object that can be stringified with ``str()``. + + .. note:: The password string should **not** be URL encoded when + passed as an argument to :meth:`_engine.URL.create`; the string + should contain the password characters exactly as they would be + typed. + + .. note:: A password-producing object will be stringified only + **once** per :class:`_engine.Engine` object. For dynamic password + generation per connect, see :ref:`engines_dynamic_tokens`. + + :param host: The name of the host. + :param port: The port number. + :param database: The database name. + :param query: A dictionary of string keys to string values to be passed + to the dialect and/or the DBAPI upon connect. To specify non-string + parameters to a Python DBAPI directly, use the + :paramref:`_sa.create_engine.connect_args` parameter to + :func:`_sa.create_engine`. See also + :attr:`_engine.URL.normalized_query` for a dictionary that is + consistently string->list of string. + :return: new :class:`_engine.URL` object. + + .. versionadded:: 1.4 + + The :class:`_engine.URL` object is now an **immutable named + tuple**. In addition, the ``query`` dictionary is also immutable. + To create a URL, use the :func:`_engine.url.make_url` or + :meth:`_engine.URL.create` function/ method. To modify a + :class:`_engine.URL`, use the :meth:`_engine.URL.set` and + :meth:`_engine.URL.update_query` methods. + + """ + + return cls( + cls._assert_str(drivername, "drivername"), + cls._assert_none_str(username, "username"), + password, + cls._assert_none_str(host, "host"), + cls._assert_port(port), + cls._assert_none_str(database, "database"), + cls._str_dict(query), + ) + + @classmethod + def _assert_port(cls, port: Optional[int]) -> Optional[int]: + if port is None: + return None + try: + return int(port) + except TypeError: + raise TypeError("Port argument must be an integer or None") + + @classmethod + def _assert_str(cls, v: str, paramname: str) -> str: + if not isinstance(v, str): + raise TypeError("%s must be a string" % paramname) + return v + + @classmethod + def _assert_none_str( + cls, v: Optional[str], paramname: str + ) -> Optional[str]: + if v is None: + return v + + return cls._assert_str(v, paramname) + + @classmethod + def _str_dict( + cls, + dict_: Optional[ + Union[ + Sequence[Tuple[str, Union[Sequence[str], str]]], + Mapping[str, Union[Sequence[str], str]], + ] + ], + ) -> util.immutabledict[str, Union[Tuple[str, ...], str]]: + if dict_ is None: + return util.EMPTY_DICT + + @overload + def _assert_value( + val: str, + ) -> str: ... + + @overload + def _assert_value( + val: Sequence[str], + ) -> Union[str, Tuple[str, ...]]: ... + + def _assert_value( + val: Union[str, Sequence[str]], + ) -> Union[str, Tuple[str, ...]]: + if isinstance(val, str): + return val + elif isinstance(val, collections_abc.Sequence): + return tuple(_assert_value(elem) for elem in val) + else: + raise TypeError( + "Query dictionary values must be strings or " + "sequences of strings" + ) + + def _assert_str(v: str) -> str: + if not isinstance(v, str): + raise TypeError("Query dictionary keys must be strings") + return v + + dict_items: Iterable[Tuple[str, Union[Sequence[str], str]]] + if isinstance(dict_, collections_abc.Sequence): + dict_items = dict_ + else: + dict_items = dict_.items() + + return util.immutabledict( + { + _assert_str(key): _assert_value( + value, + ) + for key, value in dict_items + } + ) + + def set( + self, + drivername: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + host: Optional[str] = None, + port: Optional[int] = None, + database: Optional[str] = None, + query: Optional[Mapping[str, Union[Sequence[str], str]]] = None, + ) -> URL: + """return a new :class:`_engine.URL` object with modifications. + + Values are used if they are non-None. To set a value to ``None`` + explicitly, use the :meth:`_engine.URL._replace` method adapted + from ``namedtuple``. + + :param drivername: new drivername + :param username: new username + :param password: new password + :param host: new hostname + :param port: new port + :param query: new query parameters, passed a dict of string keys + referring to string or sequence of string values. Fully + replaces the previous list of arguments. + + :return: new :class:`_engine.URL` object. + + .. versionadded:: 1.4 + + .. seealso:: + + :meth:`_engine.URL.update_query_dict` + + """ + + kw: Dict[str, Any] = {} + if drivername is not None: + kw["drivername"] = drivername + if username is not None: + kw["username"] = username + if password is not None: + kw["password"] = password + if host is not None: + kw["host"] = host + if port is not None: + kw["port"] = port + if database is not None: + kw["database"] = database + if query is not None: + kw["query"] = query + + return self._assert_replace(**kw) + + def _assert_replace(self, **kw: Any) -> URL: + """argument checks before calling _replace()""" + + if "drivername" in kw: + self._assert_str(kw["drivername"], "drivername") + for name in "username", "host", "database": + if name in kw: + self._assert_none_str(kw[name], name) + if "port" in kw: + self._assert_port(kw["port"]) + if "query" in kw: + kw["query"] = self._str_dict(kw["query"]) + + return self._replace(**kw) + + def update_query_string( + self, query_string: str, append: bool = False + ) -> URL: + """Return a new :class:`_engine.URL` object with the :attr:`_engine.URL.query` + parameter dictionary updated by the given query string. + + E.g.:: + + >>> from sqlalchemy.engine import make_url + >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname") + >>> url = url.update_query_string("alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt") + >>> str(url) + 'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt' + + :param query_string: a URL escaped query string, not including the + question mark. + + :param append: if True, parameters in the existing query string will + not be removed; new parameters will be in addition to those present. + If left at its default of False, keys present in the given query + parameters will replace those of the existing query string. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_engine.URL.query` + + :meth:`_engine.URL.update_query_dict` + + """ # noqa: E501 + return self.update_query_pairs(parse_qsl(query_string), append=append) + + def update_query_pairs( + self, + key_value_pairs: Iterable[Tuple[str, Union[str, List[str]]]], + append: bool = False, + ) -> URL: + """Return a new :class:`_engine.URL` object with the + :attr:`_engine.URL.query` + parameter dictionary updated by the given sequence of key/value pairs + + E.g.:: + + >>> from sqlalchemy.engine import make_url + >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname") + >>> url = url.update_query_pairs([("alt_host", "host1"), ("alt_host", "host2"), ("ssl_cipher", "/path/to/crt")]) + >>> str(url) + 'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt' + + :param key_value_pairs: A sequence of tuples containing two strings + each. + + :param append: if True, parameters in the existing query string will + not be removed; new parameters will be in addition to those present. + If left at its default of False, keys present in the given query + parameters will replace those of the existing query string. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_engine.URL.query` + + :meth:`_engine.URL.difference_update_query` + + :meth:`_engine.URL.set` + + """ # noqa: E501 + + existing_query = self.query + new_keys: Dict[str, Union[str, List[str]]] = {} + + for key, value in key_value_pairs: + if key in new_keys: + new_keys[key] = util.to_list(new_keys[key]) + cast("List[str]", new_keys[key]).append(cast(str, value)) + else: + new_keys[key] = ( + list(value) if isinstance(value, (list, tuple)) else value + ) + + new_query: Mapping[str, Union[str, Sequence[str]]] + if append: + new_query = {} + + for k in new_keys: + if k in existing_query: + new_query[k] = tuple( + util.to_list(existing_query[k]) + + util.to_list(new_keys[k]) + ) + else: + new_query[k] = new_keys[k] + + new_query.update( + { + k: existing_query[k] + for k in set(existing_query).difference(new_keys) + } + ) + else: + new_query = self.query.union( + { + k: tuple(v) if isinstance(v, list) else v + for k, v in new_keys.items() + } + ) + return self.set(query=new_query) + + def update_query_dict( + self, + query_parameters: Mapping[str, Union[str, List[str]]], + append: bool = False, + ) -> URL: + """Return a new :class:`_engine.URL` object with the + :attr:`_engine.URL.query` parameter dictionary updated by the given + dictionary. + + The dictionary typically contains string keys and string values. + In order to represent a query parameter that is expressed multiple + times, pass a sequence of string values. + + E.g.:: + + + >>> from sqlalchemy.engine import make_url + >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname") + >>> url = url.update_query_dict({"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"}) + >>> str(url) + 'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt' + + + :param query_parameters: A dictionary with string keys and values + that are either strings, or sequences of strings. + + :param append: if True, parameters in the existing query string will + not be removed; new parameters will be in addition to those present. + If left at its default of False, keys present in the given query + parameters will replace those of the existing query string. + + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_engine.URL.query` + + :meth:`_engine.URL.update_query_string` + + :meth:`_engine.URL.update_query_pairs` + + :meth:`_engine.URL.difference_update_query` + + :meth:`_engine.URL.set` + + """ # noqa: E501 + return self.update_query_pairs(query_parameters.items(), append=append) + + def difference_update_query(self, names: Iterable[str]) -> URL: + """ + Remove the given names from the :attr:`_engine.URL.query` dictionary, + returning the new :class:`_engine.URL`. + + E.g.:: + + url = url.difference_update_query(['foo', 'bar']) + + Equivalent to using :meth:`_engine.URL.set` as follows:: + + url = url.set( + query={ + key: url.query[key] + for key in set(url.query).difference(['foo', 'bar']) + } + ) + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_engine.URL.query` + + :meth:`_engine.URL.update_query_dict` + + :meth:`_engine.URL.set` + + """ + + if not set(names).intersection(self.query): + return self + + return URL( + self.drivername, + self.username, + self.password, + self.host, + self.port, + self.database, + util.immutabledict( + { + key: self.query[key] + for key in set(self.query).difference(names) + } + ), + ) + + @property + def normalized_query(self) -> Mapping[str, Sequence[str]]: + """Return the :attr:`_engine.URL.query` dictionary with values normalized + into sequences. + + As the :attr:`_engine.URL.query` dictionary may contain either + string values or sequences of string values to differentiate between + parameters that are specified multiple times in the query string, + code that needs to handle multiple parameters generically will wish + to use this attribute so that all parameters present are presented + as sequences. Inspiration is from Python's ``urllib.parse.parse_qs`` + function. E.g.:: + + + >>> from sqlalchemy.engine import make_url + >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt") + >>> url.query + immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'}) + >>> url.normalized_query + immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': ('/path/to/crt',)}) + + """ # noqa: E501 + + return util.immutabledict( + { + k: (v,) if not isinstance(v, tuple) else v + for k, v in self.query.items() + } + ) + + @util.deprecated( + "1.4", + "The :meth:`_engine.URL.__to_string__ method is deprecated and will " + "be removed in a future release. Please use the " + ":meth:`_engine.URL.render_as_string` method.", + ) + def __to_string__(self, hide_password: bool = True) -> str: + """Render this :class:`_engine.URL` object as a string. + + :param hide_password: Defaults to True. The password is not shown + in the string unless this is set to False. + + """ + return self.render_as_string(hide_password=hide_password) + + def render_as_string(self, hide_password: bool = True) -> str: + """Render this :class:`_engine.URL` object as a string. + + This method is used when the ``__str__()`` or ``__repr__()`` + methods are used. The method directly includes additional options. + + :param hide_password: Defaults to True. The password is not shown + in the string unless this is set to False. + + """ + s = self.drivername + "://" + if self.username is not None: + s += quote(self.username, safe=" +") + if self.password is not None: + s += ":" + ( + "***" + if hide_password + else quote(str(self.password), safe=" +") + ) + s += "@" + if self.host is not None: + if ":" in self.host: + s += f"[{self.host}]" + else: + s += self.host + if self.port is not None: + s += ":" + str(self.port) + if self.database is not None: + s += "/" + self.database + if self.query: + keys = list(self.query) + keys.sort() + s += "?" + "&".join( + f"{quote_plus(k)}={quote_plus(element)}" + for k in keys + for element in util.to_list(self.query[k]) + ) + return s + + def __repr__(self) -> str: + return self.render_as_string() + + def __copy__(self) -> URL: + return self.__class__.create( + self.drivername, + self.username, + self.password, + self.host, + self.port, + self.database, + # note this is an immutabledict of str-> str / tuple of str, + # also fully immutable. does not require deepcopy + self.query, + ) + + def __deepcopy__(self, memo: Any) -> URL: + return self.__copy__() + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, URL) + and self.drivername == other.drivername + and self.username == other.username + and self.password == other.password + and self.host == other.host + and self.database == other.database + and self.query == other.query + and self.port == other.port + ) + + def __ne__(self, other: Any) -> bool: + return not self == other + + def get_backend_name(self) -> str: + """Return the backend name. + + This is the name that corresponds to the database backend in + use, and is the portion of the :attr:`_engine.URL.drivername` + that is to the left of the plus sign. + + """ + if "+" not in self.drivername: + return self.drivername + else: + return self.drivername.split("+")[0] + + def get_driver_name(self) -> str: + """Return the backend name. + + This is the name that corresponds to the DBAPI driver in + use, and is the portion of the :attr:`_engine.URL.drivername` + that is to the right of the plus sign. + + If the :attr:`_engine.URL.drivername` does not include a plus sign, + then the default :class:`_engine.Dialect` for this :class:`_engine.URL` + is imported in order to get the driver name. + + """ + + if "+" not in self.drivername: + return self.get_dialect().driver + else: + return self.drivername.split("+")[1] + + def _instantiate_plugins( + self, kwargs: Mapping[str, Any] + ) -> Tuple[URL, List[Any], Dict[str, Any]]: + plugin_names = util.to_list(self.query.get("plugin", ())) + plugin_names += kwargs.get("plugins", []) + + kwargs = dict(kwargs) + + loaded_plugins = [ + plugins.load(plugin_name)(self, kwargs) + for plugin_name in plugin_names + ] + + u = self.difference_update_query(["plugin", "plugins"]) + + for plugin in loaded_plugins: + new_u = plugin.update_url(u) + if new_u is not None: + u = new_u + + kwargs.pop("plugins", None) + + return u, loaded_plugins, kwargs + + def _get_entrypoint(self) -> Type[Dialect]: + """Return the "entry point" dialect class. + + This is normally the dialect itself except in the case when the + returned class implements the get_dialect_cls() method. + + """ + if "+" not in self.drivername: + name = self.drivername + else: + name = self.drivername.replace("+", ".") + cls = registry.load(name) + # check for legacy dialects that + # would return a module with 'dialect' as the + # actual class + if ( + hasattr(cls, "dialect") + and isinstance(cls.dialect, type) + and issubclass(cls.dialect, Dialect) + ): + return cls.dialect + else: + return cast("Type[Dialect]", cls) + + def get_dialect(self, _is_async: bool = False) -> Type[Dialect]: + """Return the SQLAlchemy :class:`_engine.Dialect` class corresponding + to this URL's driver name. + + """ + entrypoint = self._get_entrypoint() + if _is_async: + dialect_cls = entrypoint.get_async_dialect_cls(self) + else: + dialect_cls = entrypoint.get_dialect_cls(self) + return dialect_cls + + def translate_connect_args( + self, names: Optional[List[str]] = None, **kw: Any + ) -> Dict[str, Any]: + r"""Translate url attributes into a dictionary of connection arguments. + + Returns attributes of this url (`host`, `database`, `username`, + `password`, `port`) as a plain dictionary. The attribute names are + used as the keys by default. Unset or false attributes are omitted + from the final dictionary. + + :param \**kw: Optional, alternate key names for url attributes. + + :param names: Deprecated. Same purpose as the keyword-based alternate + names, but correlates the name to the original positionally. + """ + + if names is not None: + util.warn_deprecated( + "The `URL.translate_connect_args.name`s parameter is " + "deprecated. Please pass the " + "alternate names as kw arguments.", + "1.4", + ) + + translated = {} + attribute_names = ["host", "database", "username", "password", "port"] + for sname in attribute_names: + if names: + name = names.pop(0) + elif sname in kw: + name = kw[sname] + else: + name = sname + if name is not None and getattr(self, sname, False): + if sname == "password": + translated[name] = str(getattr(self, sname)) + else: + translated[name] = getattr(self, sname) + + return translated + + +def make_url(name_or_url: Union[str, URL]) -> URL: + """Given a string, produce a new URL instance. + + The format of the URL generally follows `RFC-1738 + `_, with some exceptions, including + that underscores, and not dashes or periods, are accepted within the + "scheme" portion. + + If a :class:`.URL` object is passed, it is returned as is. + + .. seealso:: + + :ref:`database_urls` + + """ + + if isinstance(name_or_url, str): + return _parse_url(name_or_url) + elif not isinstance(name_or_url, URL) and not hasattr( + name_or_url, "_sqla_is_testing_if_this_is_a_mock_object" + ): + raise exc.ArgumentError( + f"Expected string or URL object, got {name_or_url!r}" + ) + else: + return name_or_url + + +def _parse_url(name: str) -> URL: + pattern = re.compile( + r""" + (?P[\w\+]+):// + (?: + (?P[^:/]*) + (?::(?P[^@]*))? + @)? + (?: + (?: + \[(?P[^/\?]+)\] | + (?P[^/:\?]+) + )? + (?::(?P[^/\?]*))? + )? + (?:/(?P[^\?]*))? + (?:\?(?P.*))? + """, + re.X, + ) + + m = pattern.match(name) + if m is not None: + components = m.groupdict() + query: Optional[Dict[str, Union[str, List[str]]]] + if components["query"] is not None: + query = {} + + for key, value in parse_qsl(components["query"]): + if key in query: + query[key] = util.to_list(query[key]) + cast("List[str]", query[key]).append(value) + else: + query[key] = value + else: + query = None + components["query"] = query + + if components["username"] is not None: + components["username"] = unquote(components["username"]) + + if components["password"] is not None: + components["password"] = unquote(components["password"]) + + ipv4host = components.pop("ipv4host") + ipv6host = components.pop("ipv6host") + components["host"] = ipv4host or ipv6host + name = components.pop("name") + + if components["port"]: + components["port"] = int(components["port"]) + + return URL.create(name, **components) # type: ignore + + else: + raise exc.ArgumentError( + "Could not parse SQLAlchemy URL from string '%s'" % name + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py new file mode 100644 index 00000000..186ca4c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py @@ -0,0 +1,167 @@ +# engine/util.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import Optional +from typing import TypeVar + +from .. import exc +from .. import util +from ..util._has_cy import HAS_CYEXTENSION +from ..util.typing import Protocol +from ..util.typing import Self + +if typing.TYPE_CHECKING or not HAS_CYEXTENSION: + from ._py_util import _distill_params_20 as _distill_params_20 + from ._py_util import _distill_raw_params as _distill_raw_params +else: + from sqlalchemy.cyextension.util import ( # noqa: F401 + _distill_params_20 as _distill_params_20, + ) + from sqlalchemy.cyextension.util import ( # noqa: F401 + _distill_raw_params as _distill_raw_params, + ) + +_C = TypeVar("_C", bound=Callable[[], Any]) + + +def connection_memoize(key: str) -> Callable[[_C], _C]: + """Decorator, memoize a function in a connection.info stash. + + Only applicable to functions which take no arguments other than a + connection. The memo will be stored in ``connection.info[key]``. + """ + + @util.decorator + def decorated(fn, self, connection): # type: ignore + connection = connection.connect() + try: + return connection.info[key] + except KeyError: + connection.info[key] = val = fn(self, connection) + return val + + return decorated + + +class _TConsSubject(Protocol): + _trans_context_manager: Optional[TransactionalContext] + + +class TransactionalContext: + """Apply Python context manager behavior to transaction objects. + + Performs validation to ensure the subject of the transaction is not + used if the transaction were ended prematurely. + + """ + + __slots__ = ("_outer_trans_ctx", "_trans_subject", "__weakref__") + + _trans_subject: Optional[_TConsSubject] + + def _transaction_is_active(self) -> bool: + raise NotImplementedError() + + def _transaction_is_closed(self) -> bool: + raise NotImplementedError() + + def _rollback_can_be_called(self) -> bool: + """indicates the object is in a state that is known to be acceptable + for rollback() to be called. + + This does not necessarily mean rollback() will succeed or not raise + an error, just that there is currently no state detected that indicates + rollback() would fail or emit warnings. + + It also does not mean that there's a transaction in progress, as + it is usually safe to call rollback() even if no transaction is + present. + + .. versionadded:: 1.4.28 + + """ + raise NotImplementedError() + + def _get_subject(self) -> _TConsSubject: + raise NotImplementedError() + + def commit(self) -> None: + raise NotImplementedError() + + def rollback(self) -> None: + raise NotImplementedError() + + def close(self) -> None: + raise NotImplementedError() + + @classmethod + def _trans_ctx_check(cls, subject: _TConsSubject) -> None: + trans_context = subject._trans_context_manager + if trans_context: + if not trans_context._transaction_is_active(): + raise exc.InvalidRequestError( + "Can't operate on closed transaction inside context " + "manager. Please complete the context manager " + "before emitting further commands." + ) + + def __enter__(self) -> Self: + subject = self._get_subject() + + # none for outer transaction, may be non-None for nested + # savepoint, legacy nesting cases + trans_context = subject._trans_context_manager + self._outer_trans_ctx = trans_context + + self._trans_subject = subject + subject._trans_context_manager = self + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + subject = getattr(self, "_trans_subject", None) + + # simplistically we could assume that + # "subject._trans_context_manager is self". However, any calling + # code that is manipulating __exit__ directly would break this + # assumption. alembic context manager + # is an example of partial use that just calls __exit__ and + # not __enter__ at the moment. it's safe to assume this is being done + # in the wild also + out_of_band_exit = ( + subject is None or subject._trans_context_manager is not self + ) + + if type_ is None and self._transaction_is_active(): + try: + self.commit() + except: + with util.safe_reraise(): + if self._rollback_can_be_called(): + self.rollback() + finally: + if not out_of_band_exit: + assert subject is not None + subject._trans_context_manager = self._outer_trans_ctx + self._trans_subject = self._outer_trans_ctx = None + else: + try: + if not self._transaction_is_active(): + if not self._transaction_is_closed(): + self.close() + else: + if self._rollback_can_be_called(): + self.rollback() + finally: + if not out_of_band_exit: + assert subject is not None + subject._trans_context_manager = self._outer_trans_ctx + self._trans_subject = self._outer_trans_ctx = None diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__init__.py new file mode 100644 index 00000000..9b54f07f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__init__.py @@ -0,0 +1,25 @@ +# event/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from .api import CANCEL as CANCEL +from .api import contains as contains +from .api import listen as listen +from .api import listens_for as listens_for +from .api import NO_RETVAL as NO_RETVAL +from .api import remove as remove +from .attr import _InstanceLevelDispatch as _InstanceLevelDispatch +from .attr import RefCollection as RefCollection +from .base import _Dispatch as _Dispatch +from .base import _DispatchCommon as _DispatchCommon +from .base import dispatcher as dispatcher +from .base import Events as Events +from .legacy import _legacy_signature as _legacy_signature +from .registry import _EventKey as _EventKey +from .registry import _ListenerFnType as _ListenerFnType +from .registry import EventTarget as EventTarget diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..1139136f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/api.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/api.cpython-312.pyc new file mode 100644 index 00000000..57069620 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/api.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/attr.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/attr.cpython-312.pyc new file mode 100644 index 00000000..db4523f3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/attr.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..bdd3d1c8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/legacy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/legacy.cpython-312.pyc new file mode 100644 index 00000000..e9a988de Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/legacy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/registry.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/registry.cpython-312.pyc new file mode 100644 index 00000000..042f5fe6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/event/__pycache__/registry.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/api.py b/.venv/lib/python3.12/site-packages/sqlalchemy/event/api.py new file mode 100644 index 00000000..230ec698 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/event/api.py @@ -0,0 +1,225 @@ +# event/api.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Public API functions for the event system. + +""" +from __future__ import annotations + +from typing import Any +from typing import Callable + +from .base import _registrars +from .registry import _ET +from .registry import _EventKey +from .registry import _ListenerFnType +from .. import exc +from .. import util + + +CANCEL = util.symbol("CANCEL") +NO_RETVAL = util.symbol("NO_RETVAL") + + +def _event_key( + target: _ET, identifier: str, fn: _ListenerFnType +) -> _EventKey[_ET]: + for evt_cls in _registrars[identifier]: + tgt = evt_cls._accept_with(target, identifier) + if tgt is not None: + return _EventKey(target, identifier, fn, tgt) + else: + raise exc.InvalidRequestError( + "No such event '%s' for target '%s'" % (identifier, target) + ) + + +def listen( + target: Any, identifier: str, fn: Callable[..., Any], *args: Any, **kw: Any +) -> None: + """Register a listener function for the given target. + + The :func:`.listen` function is part of the primary interface for the + SQLAlchemy event system, documented at :ref:`event_toplevel`. + + e.g.:: + + from sqlalchemy import event + from sqlalchemy.schema import UniqueConstraint + + def unique_constraint_name(const, table): + const.name = "uq_%s_%s" % ( + table.name, + list(const.columns)[0].name + ) + event.listen( + UniqueConstraint, + "after_parent_attach", + unique_constraint_name) + + :param bool insert: The default behavior for event handlers is to append + the decorated user defined function to an internal list of registered + event listeners upon discovery. If a user registers a function with + ``insert=True``, SQLAlchemy will insert (prepend) the function to the + internal list upon discovery. This feature is not typically used or + recommended by the SQLAlchemy maintainers, but is provided to ensure + certain user defined functions can run before others, such as when + :ref:`Changing the sql_mode in MySQL `. + + :param bool named: When using named argument passing, the names listed in + the function argument specification will be used as keys in the + dictionary. + See :ref:`event_named_argument_styles`. + + :param bool once: Private/Internal API usage. Deprecated. This parameter + would provide that an event function would run only once per given + target. It does not however imply automatic de-registration of the + listener function; associating an arbitrarily high number of listeners + without explicitly removing them will cause memory to grow unbounded even + if ``once=True`` is specified. + + :param bool propagate: The ``propagate`` kwarg is available when working + with ORM instrumentation and mapping events. + See :class:`_ormevent.MapperEvents` and + :meth:`_ormevent.MapperEvents.before_mapper_configured` for examples. + + :param bool retval: This flag applies only to specific event listeners, + each of which includes documentation explaining when it should be used. + By default, no listener ever requires a return value. + However, some listeners do support special behaviors for return values, + and include in their documentation that the ``retval=True`` flag is + necessary for a return value to be processed. + + Event listener suites that make use of :paramref:`_event.listen.retval` + include :class:`_events.ConnectionEvents` and + :class:`_ormevent.AttributeEvents`. + + .. note:: + + The :func:`.listen` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be added + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. seealso:: + + :func:`.listens_for` + + :func:`.remove` + + """ + + _event_key(target, identifier, fn).listen(*args, **kw) + + +def listens_for( + target: Any, identifier: str, *args: Any, **kw: Any +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """Decorate a function as a listener for the given target + identifier. + + The :func:`.listens_for` decorator is part of the primary interface for the + SQLAlchemy event system, documented at :ref:`event_toplevel`. + + This function generally shares the same kwargs as :func:`.listen`. + + e.g.:: + + from sqlalchemy import event + from sqlalchemy.schema import UniqueConstraint + + @event.listens_for(UniqueConstraint, "after_parent_attach") + def unique_constraint_name(const, table): + const.name = "uq_%s_%s" % ( + table.name, + list(const.columns)[0].name + ) + + A given function can also be invoked for only the first invocation + of the event using the ``once`` argument:: + + @event.listens_for(Mapper, "before_configure", once=True) + def on_config(): + do_config() + + + .. warning:: The ``once`` argument does not imply automatic de-registration + of the listener function after it has been invoked a first time; a + listener entry will remain associated with the target object. + Associating an arbitrarily high number of listeners without explicitly + removing them will cause memory to grow unbounded even if ``once=True`` + is specified. + + .. seealso:: + + :func:`.listen` - general description of event listening + + """ + + def decorate(fn: Callable[..., Any]) -> Callable[..., Any]: + listen(target, identifier, fn, *args, **kw) + return fn + + return decorate + + +def remove(target: Any, identifier: str, fn: Callable[..., Any]) -> None: + """Remove an event listener. + + The arguments here should match exactly those which were sent to + :func:`.listen`; all the event registration which proceeded as a result + of this call will be reverted by calling :func:`.remove` with the same + arguments. + + e.g.:: + + # if a function was registered like this... + @event.listens_for(SomeMappedClass, "before_insert", propagate=True) + def my_listener_function(*arg): + pass + + # ... it's removed like this + event.remove(SomeMappedClass, "before_insert", my_listener_function) + + Above, the listener function associated with ``SomeMappedClass`` was also + propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` + function will revert all of these operations. + + .. note:: + + The :func:`.remove` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be removed + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. seealso:: + + :func:`.listen` + + """ + _event_key(target, identifier, fn).remove() + + +def contains(target: Any, identifier: str, fn: Callable[..., Any]) -> bool: + """Return True if the given target/ident/fn is set up to listen.""" + + return _event_key(target, identifier, fn).contains() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/attr.py b/.venv/lib/python3.12/site-packages/sqlalchemy/event/attr.py new file mode 100644 index 00000000..ef2b334d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/event/attr.py @@ -0,0 +1,655 @@ +# event/attr.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Attribute implementation for _Dispatch classes. + +The various listener targets for a particular event class are represented +as attributes, which refer to collections of listeners to be fired off. +These collections can exist at the class level as well as at the instance +level. An event is fired off using code like this:: + + some_object.dispatch.first_connect(arg1, arg2) + +Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and +``first_connect`` is typically an instance of ``_ListenerCollection`` +if event listeners are present, or ``_EmptyListener`` if none are present. + +The attribute mechanics here spend effort trying to ensure listener functions +are available with a minimum of function call overhead, that unnecessary +objects aren't created (i.e. many empty per-instance listener collections), +as well as that everything is garbage collectable when owning references are +lost. Other features such as "propagation" of listener functions across +many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances, +as well as support for subclass propagation (e.g. events assigned to +``Pool`` vs. ``QueuePool``) are all implemented here. + +""" +from __future__ import annotations + +import collections +from itertools import chain +import threading +from types import TracebackType +import typing +from typing import Any +from typing import cast +from typing import Collection +from typing import Deque +from typing import FrozenSet +from typing import Generic +from typing import Iterator +from typing import MutableMapping +from typing import MutableSequence +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union +import weakref + +from . import legacy +from . import registry +from .registry import _ET +from .registry import _EventKey +from .registry import _ListenerFnType +from .. import exc +from .. import util +from ..util.concurrency import AsyncAdaptedLock +from ..util.typing import Protocol + +_T = TypeVar("_T", bound=Any) + +if typing.TYPE_CHECKING: + from .base import _Dispatch + from .base import _DispatchCommon + from .base import _HasEventsDispatch + + +class RefCollection(util.MemoizedSlots, Generic[_ET]): + __slots__ = ("ref",) + + ref: weakref.ref[RefCollection[_ET]] + + def _memoized_attr_ref(self) -> weakref.ref[RefCollection[_ET]]: + return weakref.ref(self, registry._collection_gced) + + +class _empty_collection(Collection[_T]): + def append(self, element: _T) -> None: + pass + + def appendleft(self, element: _T) -> None: + pass + + def extend(self, other: Sequence[_T]) -> None: + pass + + def remove(self, element: _T) -> None: + pass + + def __contains__(self, element: Any) -> bool: + return False + + def __iter__(self) -> Iterator[_T]: + return iter([]) + + def clear(self) -> None: + pass + + def __len__(self) -> int: + return 0 + + +_ListenerFnSequenceType = Union[Deque[_T], _empty_collection[_T]] + + +class _ClsLevelDispatch(RefCollection[_ET]): + """Class-level events on :class:`._Dispatch` classes.""" + + __slots__ = ( + "clsname", + "name", + "arg_names", + "has_kw", + "legacy_signatures", + "_clslevel", + "__weakref__", + ) + + clsname: str + name: str + arg_names: Sequence[str] + has_kw: bool + legacy_signatures: MutableSequence[legacy._LegacySignatureType] + _clslevel: MutableMapping[ + Type[_ET], _ListenerFnSequenceType[_ListenerFnType] + ] + + def __init__( + self, + parent_dispatch_cls: Type[_HasEventsDispatch[_ET]], + fn: _ListenerFnType, + ): + self.name = fn.__name__ + self.clsname = parent_dispatch_cls.__name__ + argspec = util.inspect_getfullargspec(fn) + self.arg_names = argspec.args[1:] + self.has_kw = bool(argspec.varkw) + self.legacy_signatures = list( + reversed( + sorted( + getattr(fn, "_legacy_signatures", []), key=lambda s: s[0] + ) + ) + ) + fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn) + + self._clslevel = weakref.WeakKeyDictionary() + + def _adjust_fn_spec( + self, fn: _ListenerFnType, named: bool + ) -> _ListenerFnType: + if named: + fn = self._wrap_fn_for_kw(fn) + if self.legacy_signatures: + try: + argspec = util.get_callable_argspec(fn, no_self=True) + except TypeError: + pass + else: + fn = legacy._wrap_fn_for_legacy(self, fn, argspec) + return fn + + def _wrap_fn_for_kw(self, fn: _ListenerFnType) -> _ListenerFnType: + def wrap_kw(*args: Any, **kw: Any) -> Any: + argdict = dict(zip(self.arg_names, args)) + argdict.update(kw) + return fn(**argdict) + + return wrap_kw + + def _do_insert_or_append( + self, event_key: _EventKey[_ET], is_append: bool + ) -> None: + target = event_key.dispatch_target + assert isinstance( + target, type + ), "Class-level Event targets must be classes." + if not getattr(target, "_sa_propagate_class_events", True): + raise exc.InvalidRequestError( + f"Can't assign an event directly to the {target} class" + ) + + cls: Type[_ET] + + for cls in util.walk_subclasses(target): + if cls is not target and cls not in self._clslevel: + self.update_subclass(cls) + else: + if cls not in self._clslevel: + self.update_subclass(cls) + if is_append: + self._clslevel[cls].append(event_key._listen_fn) + else: + self._clslevel[cls].appendleft(event_key._listen_fn) + registry._stored_in_collection(event_key, self) + + def insert(self, event_key: _EventKey[_ET], propagate: bool) -> None: + self._do_insert_or_append(event_key, is_append=False) + + def append(self, event_key: _EventKey[_ET], propagate: bool) -> None: + self._do_insert_or_append(event_key, is_append=True) + + def update_subclass(self, target: Type[_ET]) -> None: + if target not in self._clslevel: + if getattr(target, "_sa_propagate_class_events", True): + self._clslevel[target] = collections.deque() + else: + self._clslevel[target] = _empty_collection() + + clslevel = self._clslevel[target] + cls: Type[_ET] + for cls in target.__mro__[1:]: + if cls in self._clslevel: + clslevel.extend( + [fn for fn in self._clslevel[cls] if fn not in clslevel] + ) + + def remove(self, event_key: _EventKey[_ET]) -> None: + target = event_key.dispatch_target + cls: Type[_ET] + for cls in util.walk_subclasses(target): + if cls in self._clslevel: + self._clslevel[cls].remove(event_key._listen_fn) + registry._removed_from_collection(event_key, self) + + def clear(self) -> None: + """Clear all class level listeners""" + + to_clear: Set[_ListenerFnType] = set() + for dispatcher in self._clslevel.values(): + to_clear.update(dispatcher) + dispatcher.clear() + registry._clear(self, to_clear) + + def for_modify(self, obj: _Dispatch[_ET]) -> _ClsLevelDispatch[_ET]: + """Return an event collection which can be modified. + + For _ClsLevelDispatch at the class level of + a dispatcher, this returns self. + + """ + return self + + +class _InstanceLevelDispatch(RefCollection[_ET], Collection[_ListenerFnType]): + __slots__ = () + + parent: _ClsLevelDispatch[_ET] + + def _adjust_fn_spec( + self, fn: _ListenerFnType, named: bool + ) -> _ListenerFnType: + return self.parent._adjust_fn_spec(fn, named) + + def __contains__(self, item: Any) -> bool: + raise NotImplementedError() + + def __len__(self) -> int: + raise NotImplementedError() + + def __iter__(self) -> Iterator[_ListenerFnType]: + raise NotImplementedError() + + def __bool__(self) -> bool: + raise NotImplementedError() + + def exec_once(self, *args: Any, **kw: Any) -> None: + raise NotImplementedError() + + def exec_once_unless_exception(self, *args: Any, **kw: Any) -> None: + raise NotImplementedError() + + def _exec_w_sync_on_first_run(self, *args: Any, **kw: Any) -> None: + raise NotImplementedError() + + def __call__(self, *args: Any, **kw: Any) -> None: + raise NotImplementedError() + + def insert(self, event_key: _EventKey[_ET], propagate: bool) -> None: + raise NotImplementedError() + + def append(self, event_key: _EventKey[_ET], propagate: bool) -> None: + raise NotImplementedError() + + def remove(self, event_key: _EventKey[_ET]) -> None: + raise NotImplementedError() + + def for_modify( + self, obj: _DispatchCommon[_ET] + ) -> _InstanceLevelDispatch[_ET]: + """Return an event collection which can be modified. + + For _ClsLevelDispatch at the class level of + a dispatcher, this returns self. + + """ + return self + + +class _EmptyListener(_InstanceLevelDispatch[_ET]): + """Serves as a proxy interface to the events + served by a _ClsLevelDispatch, when there are no + instance-level events present. + + Is replaced by _ListenerCollection when instance-level + events are added. + + """ + + __slots__ = "parent", "parent_listeners", "name" + + propagate: FrozenSet[_ListenerFnType] = frozenset() + listeners: Tuple[()] = () + parent: _ClsLevelDispatch[_ET] + parent_listeners: _ListenerFnSequenceType[_ListenerFnType] + name: str + + def __init__(self, parent: _ClsLevelDispatch[_ET], target_cls: Type[_ET]): + if target_cls not in parent._clslevel: + parent.update_subclass(target_cls) + self.parent = parent + self.parent_listeners = parent._clslevel[target_cls] + self.name = parent.name + + def for_modify( + self, obj: _DispatchCommon[_ET] + ) -> _ListenerCollection[_ET]: + """Return an event collection which can be modified. + + For _EmptyListener at the instance level of + a dispatcher, this generates a new + _ListenerCollection, applies it to the instance, + and returns it. + + """ + obj = cast("_Dispatch[_ET]", obj) + + assert obj._instance_cls is not None + result = _ListenerCollection(self.parent, obj._instance_cls) + if getattr(obj, self.name) is self: + setattr(obj, self.name, result) + else: + assert isinstance(getattr(obj, self.name), _JoinedListener) + return result + + def _needs_modify(self, *args: Any, **kw: Any) -> NoReturn: + raise NotImplementedError("need to call for_modify()") + + def exec_once(self, *args: Any, **kw: Any) -> NoReturn: + self._needs_modify(*args, **kw) + + def exec_once_unless_exception(self, *args: Any, **kw: Any) -> NoReturn: + self._needs_modify(*args, **kw) + + def insert(self, *args: Any, **kw: Any) -> NoReturn: + self._needs_modify(*args, **kw) + + def append(self, *args: Any, **kw: Any) -> NoReturn: + self._needs_modify(*args, **kw) + + def remove(self, *args: Any, **kw: Any) -> NoReturn: + self._needs_modify(*args, **kw) + + def clear(self, *args: Any, **kw: Any) -> NoReturn: + self._needs_modify(*args, **kw) + + def __call__(self, *args: Any, **kw: Any) -> None: + """Execute this event.""" + + for fn in self.parent_listeners: + fn(*args, **kw) + + def __contains__(self, item: Any) -> bool: + return item in self.parent_listeners + + def __len__(self) -> int: + return len(self.parent_listeners) + + def __iter__(self) -> Iterator[_ListenerFnType]: + return iter(self.parent_listeners) + + def __bool__(self) -> bool: + return bool(self.parent_listeners) + + +class _MutexProtocol(Protocol): + def __enter__(self) -> bool: ... + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: ... + + +class _CompoundListener(_InstanceLevelDispatch[_ET]): + __slots__ = ( + "_exec_once_mutex", + "_exec_once", + "_exec_w_sync_once", + "_is_asyncio", + ) + + _exec_once_mutex: _MutexProtocol + parent_listeners: Collection[_ListenerFnType] + listeners: Collection[_ListenerFnType] + _exec_once: bool + _exec_w_sync_once: bool + + def __init__(self, *arg: Any, **kw: Any): + super().__init__(*arg, **kw) + self._is_asyncio = False + + def _set_asyncio(self) -> None: + self._is_asyncio = True + + def _memoized_attr__exec_once_mutex(self) -> _MutexProtocol: + if self._is_asyncio: + return AsyncAdaptedLock() + else: + return threading.Lock() + + def _exec_once_impl( + self, retry_on_exception: bool, *args: Any, **kw: Any + ) -> None: + with self._exec_once_mutex: + if not self._exec_once: + try: + self(*args, **kw) + exception = False + except: + exception = True + raise + finally: + if not exception or not retry_on_exception: + self._exec_once = True + + def exec_once(self, *args: Any, **kw: Any) -> None: + """Execute this event, but only if it has not been + executed already for this collection.""" + + if not self._exec_once: + self._exec_once_impl(False, *args, **kw) + + def exec_once_unless_exception(self, *args: Any, **kw: Any) -> None: + """Execute this event, but only if it has not been + executed already for this collection, or was called + by a previous exec_once_unless_exception call and + raised an exception. + + If exec_once was already called, then this method will never run + the callable regardless of whether it raised or not. + + .. versionadded:: 1.3.8 + + """ + if not self._exec_once: + self._exec_once_impl(True, *args, **kw) + + def _exec_w_sync_on_first_run(self, *args: Any, **kw: Any) -> None: + """Execute this event, and use a mutex if it has not been + executed already for this collection, or was called + by a previous _exec_w_sync_on_first_run call and + raised an exception. + + If _exec_w_sync_on_first_run was already called and didn't raise an + exception, then a mutex is not used. + + .. versionadded:: 1.4.11 + + """ + if not self._exec_w_sync_once: + with self._exec_once_mutex: + try: + self(*args, **kw) + except: + raise + else: + self._exec_w_sync_once = True + else: + self(*args, **kw) + + def __call__(self, *args: Any, **kw: Any) -> None: + """Execute this event.""" + + for fn in self.parent_listeners: + fn(*args, **kw) + for fn in self.listeners: + fn(*args, **kw) + + def __contains__(self, item: Any) -> bool: + return item in self.parent_listeners or item in self.listeners + + def __len__(self) -> int: + return len(self.parent_listeners) + len(self.listeners) + + def __iter__(self) -> Iterator[_ListenerFnType]: + return chain(self.parent_listeners, self.listeners) + + def __bool__(self) -> bool: + return bool(self.listeners or self.parent_listeners) + + +class _ListenerCollection(_CompoundListener[_ET]): + """Instance-level attributes on instances of :class:`._Dispatch`. + + Represents a collection of listeners. + + As of 0.7.9, _ListenerCollection is only first + created via the _EmptyListener.for_modify() method. + + """ + + __slots__ = ( + "parent_listeners", + "parent", + "name", + "listeners", + "propagate", + "__weakref__", + ) + + parent_listeners: Collection[_ListenerFnType] + parent: _ClsLevelDispatch[_ET] + name: str + listeners: Deque[_ListenerFnType] + propagate: Set[_ListenerFnType] + + def __init__(self, parent: _ClsLevelDispatch[_ET], target_cls: Type[_ET]): + super().__init__() + if target_cls not in parent._clslevel: + parent.update_subclass(target_cls) + self._exec_once = False + self._exec_w_sync_once = False + self.parent_listeners = parent._clslevel[target_cls] + self.parent = parent + self.name = parent.name + self.listeners = collections.deque() + self.propagate = set() + + def for_modify( + self, obj: _DispatchCommon[_ET] + ) -> _ListenerCollection[_ET]: + """Return an event collection which can be modified. + + For _ListenerCollection at the instance level of + a dispatcher, this returns self. + + """ + return self + + def _update( + self, other: _ListenerCollection[_ET], only_propagate: bool = True + ) -> None: + """Populate from the listeners in another :class:`_Dispatch` + object.""" + existing_listeners = self.listeners + existing_listener_set = set(existing_listeners) + self.propagate.update(other.propagate) + other_listeners = [ + l + for l in other.listeners + if l not in existing_listener_set + and not only_propagate + or l in self.propagate + ] + + existing_listeners.extend(other_listeners) + + if other._is_asyncio: + self._set_asyncio() + + to_associate = other.propagate.union(other_listeners) + registry._stored_in_collection_multi(self, other, to_associate) + + def insert(self, event_key: _EventKey[_ET], propagate: bool) -> None: + if event_key.prepend_to_list(self, self.listeners): + if propagate: + self.propagate.add(event_key._listen_fn) + + def append(self, event_key: _EventKey[_ET], propagate: bool) -> None: + if event_key.append_to_list(self, self.listeners): + if propagate: + self.propagate.add(event_key._listen_fn) + + def remove(self, event_key: _EventKey[_ET]) -> None: + self.listeners.remove(event_key._listen_fn) + self.propagate.discard(event_key._listen_fn) + registry._removed_from_collection(event_key, self) + + def clear(self) -> None: + registry._clear(self, self.listeners) + self.propagate.clear() + self.listeners.clear() + + +class _JoinedListener(_CompoundListener[_ET]): + __slots__ = "parent_dispatch", "name", "local", "parent_listeners" + + parent_dispatch: _DispatchCommon[_ET] + name: str + local: _InstanceLevelDispatch[_ET] + parent_listeners: Collection[_ListenerFnType] + + def __init__( + self, + parent_dispatch: _DispatchCommon[_ET], + name: str, + local: _EmptyListener[_ET], + ): + self._exec_once = False + self.parent_dispatch = parent_dispatch + self.name = name + self.local = local + self.parent_listeners = self.local + + if not typing.TYPE_CHECKING: + # first error, I don't really understand: + # Signature of "listeners" incompatible with + # supertype "_CompoundListener" [override] + # the name / return type are exactly the same + # second error is getattr_isn't typed, the cast() here + # adds too much method overhead + @property + def listeners(self) -> Collection[_ListenerFnType]: + return getattr(self.parent_dispatch, self.name) + + def _adjust_fn_spec( + self, fn: _ListenerFnType, named: bool + ) -> _ListenerFnType: + return self.local._adjust_fn_spec(fn, named) + + def for_modify(self, obj: _DispatchCommon[_ET]) -> _JoinedListener[_ET]: + self.local = self.parent_listeners = self.local.for_modify(obj) + return self + + def insert(self, event_key: _EventKey[_ET], propagate: bool) -> None: + self.local.insert(event_key, propagate) + + def append(self, event_key: _EventKey[_ET], propagate: bool) -> None: + self.local.append(event_key, propagate) + + def remove(self, event_key: _EventKey[_ET]) -> None: + self.local.remove(event_key) + + def clear(self) -> None: + raise NotImplementedError() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/event/base.py new file mode 100644 index 00000000..cddfc982 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/event/base.py @@ -0,0 +1,470 @@ +# event/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Base implementation classes. + +The public-facing ``Events`` serves as the base class for an event interface; +its public attributes represent different kinds of events. These attributes +are mirrored onto a ``_Dispatch`` class, which serves as a container for +collections of listener functions. These collections are represented both +at the class level of a particular ``_Dispatch`` class as well as within +instances of ``_Dispatch``. + +""" +from __future__ import annotations + +import typing +from typing import Any +from typing import cast +from typing import Dict +from typing import Generic +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type +from typing import Union +import weakref + +from .attr import _ClsLevelDispatch +from .attr import _EmptyListener +from .attr import _InstanceLevelDispatch +from .attr import _JoinedListener +from .registry import _ET +from .registry import _EventKey +from .. import util +from ..util.typing import Literal + +_registrars: MutableMapping[str, List[Type[_HasEventsDispatch[Any]]]] = ( + util.defaultdict(list) +) + + +def _is_event_name(name: str) -> bool: + # _sa_event prefix is special to support internal-only event names. + # most event names are just plain method names that aren't + # underscored. + + return ( + not name.startswith("_") and name != "dispatch" + ) or name.startswith("_sa_event") + + +class _UnpickleDispatch: + """Serializable callable that re-generates an instance of + :class:`_Dispatch` given a particular :class:`.Events` subclass. + + """ + + def __call__(self, _instance_cls: Type[_ET]) -> _Dispatch[_ET]: + for cls in _instance_cls.__mro__: + if "dispatch" in cls.__dict__: + return cast( + "_Dispatch[_ET]", cls.__dict__["dispatch"].dispatch + )._for_class(_instance_cls) + else: + raise AttributeError("No class with a 'dispatch' member present.") + + +class _DispatchCommon(Generic[_ET]): + __slots__ = () + + _instance_cls: Optional[Type[_ET]] + + def _join(self, other: _DispatchCommon[_ET]) -> _JoinedDispatcher[_ET]: + raise NotImplementedError() + + def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: + raise NotImplementedError() + + @property + def _events(self) -> Type[_HasEventsDispatch[_ET]]: + raise NotImplementedError() + + +class _Dispatch(_DispatchCommon[_ET]): + """Mirror the event listening definitions of an Events class with + listener collections. + + Classes which define a "dispatch" member will return a + non-instantiated :class:`._Dispatch` subclass when the member + is accessed at the class level. When the "dispatch" member is + accessed at the instance level of its owner, an instance + of the :class:`._Dispatch` class is returned. + + A :class:`._Dispatch` class is generated for each :class:`.Events` + class defined, by the :meth:`._HasEventsDispatch._create_dispatcher_class` + method. The original :class:`.Events` classes remain untouched. + This decouples the construction of :class:`.Events` subclasses from + the implementation used by the event internals, and allows + inspecting tools like Sphinx to work in an unsurprising + way against the public API. + + """ + + # "active_history" is an ORM case we add here. ideally a better + # system would be in place for ad-hoc attributes. + __slots__ = "_parent", "_instance_cls", "__dict__", "_empty_listeners" + + _active_history: bool + + _empty_listener_reg: MutableMapping[ + Type[_ET], Dict[str, _EmptyListener[_ET]] + ] = weakref.WeakKeyDictionary() + + _empty_listeners: Dict[str, _EmptyListener[_ET]] + + _event_names: List[str] + + _instance_cls: Optional[Type[_ET]] + + _joined_dispatch_cls: Type[_JoinedDispatcher[_ET]] + + _events: Type[_HasEventsDispatch[_ET]] + """reference back to the Events class. + + Bidirectional against _HasEventsDispatch.dispatch + + """ + + def __init__( + self, + parent: Optional[_Dispatch[_ET]], + instance_cls: Optional[Type[_ET]] = None, + ): + self._parent = parent + self._instance_cls = instance_cls + + if instance_cls: + assert parent is not None + try: + self._empty_listeners = self._empty_listener_reg[instance_cls] + except KeyError: + self._empty_listeners = self._empty_listener_reg[ + instance_cls + ] = { + ls.name: _EmptyListener(ls, instance_cls) + for ls in parent._event_descriptors + } + else: + self._empty_listeners = {} + + def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: + # Assign EmptyListeners as attributes on demand + # to reduce startup time for new dispatch objects. + try: + ls = self._empty_listeners[name] + except KeyError: + raise AttributeError(name) + else: + setattr(self, ls.name, ls) + return ls + + @property + def _event_descriptors(self) -> Iterator[_ClsLevelDispatch[_ET]]: + for k in self._event_names: + # Yield _ClsLevelDispatch related + # to relevant event name. + yield getattr(self, k) + + def _listen(self, event_key: _EventKey[_ET], **kw: Any) -> None: + return self._events._listen(event_key, **kw) + + def _for_class(self, instance_cls: Type[_ET]) -> _Dispatch[_ET]: + return self.__class__(self, instance_cls) + + def _for_instance(self, instance: _ET) -> _Dispatch[_ET]: + instance_cls = instance.__class__ + return self._for_class(instance_cls) + + def _join(self, other: _DispatchCommon[_ET]) -> _JoinedDispatcher[_ET]: + """Create a 'join' of this :class:`._Dispatch` and another. + + This new dispatcher will dispatch events to both + :class:`._Dispatch` objects. + + """ + assert "_joined_dispatch_cls" in self.__class__.__dict__ + + return self._joined_dispatch_cls(self, other) + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return _UnpickleDispatch(), (self._instance_cls,) + + def _update( + self, other: _Dispatch[_ET], only_propagate: bool = True + ) -> None: + """Populate from the listeners in another :class:`_Dispatch` + object.""" + for ls in other._event_descriptors: + if isinstance(ls, _EmptyListener): + continue + getattr(self, ls.name).for_modify(self)._update( + ls, only_propagate=only_propagate + ) + + def _clear(self) -> None: + for ls in self._event_descriptors: + ls.for_modify(self).clear() + + +def _remove_dispatcher(cls: Type[_HasEventsDispatch[_ET]]) -> None: + for k in cls.dispatch._event_names: + _registrars[k].remove(cls) + if not _registrars[k]: + del _registrars[k] + + +class _HasEventsDispatch(Generic[_ET]): + _dispatch_target: Optional[Type[_ET]] + """class which will receive the .dispatch collection""" + + dispatch: _Dispatch[_ET] + """reference back to the _Dispatch class. + + Bidirectional against _Dispatch._events + + """ + + if typing.TYPE_CHECKING: + + def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: ... + + def __init_subclass__(cls) -> None: + """Intercept new Event subclasses and create associated _Dispatch + classes.""" + + cls._create_dispatcher_class(cls.__name__, cls.__bases__, cls.__dict__) + + @classmethod + def _accept_with( + cls, target: Union[_ET, Type[_ET]], identifier: str + ) -> Optional[Union[_ET, Type[_ET]]]: + raise NotImplementedError() + + @classmethod + def _listen( + cls, + event_key: _EventKey[_ET], + *, + propagate: bool = False, + insert: bool = False, + named: bool = False, + asyncio: bool = False, + ) -> None: + raise NotImplementedError() + + @staticmethod + def _set_dispatch( + klass: Type[_HasEventsDispatch[_ET]], + dispatch_cls: Type[_Dispatch[_ET]], + ) -> _Dispatch[_ET]: + # This allows an Events subclass to define additional utility + # methods made available to the target via + # "self.dispatch._events." + # @staticmethod to allow easy "super" calls while in a metaclass + # constructor. + klass.dispatch = dispatch_cls(None) + dispatch_cls._events = klass + return klass.dispatch + + @classmethod + def _create_dispatcher_class( + cls, classname: str, bases: Tuple[type, ...], dict_: Mapping[str, Any] + ) -> None: + """Create a :class:`._Dispatch` class corresponding to an + :class:`.Events` class.""" + + # there's all kinds of ways to do this, + # i.e. make a Dispatch class that shares the '_listen' method + # of the Event class, this is the straight monkeypatch. + if hasattr(cls, "dispatch"): + dispatch_base = cls.dispatch.__class__ + else: + dispatch_base = _Dispatch + + event_names = [k for k in dict_ if _is_event_name(k)] + dispatch_cls = cast( + "Type[_Dispatch[_ET]]", + type( + "%sDispatch" % classname, + (dispatch_base,), + {"__slots__": event_names}, + ), + ) + + dispatch_cls._event_names = event_names + dispatch_inst = cls._set_dispatch(cls, dispatch_cls) + for k in dispatch_cls._event_names: + setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k])) + _registrars[k].append(cls) + + for super_ in dispatch_cls.__bases__: + if issubclass(super_, _Dispatch) and super_ is not _Dispatch: + for ls in super_._events.dispatch._event_descriptors: + setattr(dispatch_inst, ls.name, ls) + dispatch_cls._event_names.append(ls.name) + + if getattr(cls, "_dispatch_target", None): + dispatch_target_cls = cls._dispatch_target + assert dispatch_target_cls is not None + if ( + hasattr(dispatch_target_cls, "__slots__") + and "_slots_dispatch" in dispatch_target_cls.__slots__ + ): + dispatch_target_cls.dispatch = slots_dispatcher(cls) + else: + dispatch_target_cls.dispatch = dispatcher(cls) + + klass = type( + "Joined%s" % dispatch_cls.__name__, + (_JoinedDispatcher,), + {"__slots__": event_names}, + ) + dispatch_cls._joined_dispatch_cls = klass + + # establish pickle capability by adding it to this module + globals()[klass.__name__] = klass + + +class _JoinedDispatcher(_DispatchCommon[_ET]): + """Represent a connection between two _Dispatch objects.""" + + __slots__ = "local", "parent", "_instance_cls" + + local: _DispatchCommon[_ET] + parent: _DispatchCommon[_ET] + _instance_cls: Optional[Type[_ET]] + + def __init__( + self, local: _DispatchCommon[_ET], parent: _DispatchCommon[_ET] + ): + self.local = local + self.parent = parent + self._instance_cls = self.local._instance_cls + + def __reduce__(self) -> Any: + return (self.__class__, (self.local, self.parent)) + + def __getattr__(self, name: str) -> _JoinedListener[_ET]: + # Assign _JoinedListeners as attributes on demand + # to reduce startup time for new dispatch objects. + ls = getattr(self.local, name) + jl = _JoinedListener(self.parent, ls.name, ls) + setattr(self, ls.name, jl) + return jl + + def _listen(self, event_key: _EventKey[_ET], **kw: Any) -> None: + return self.parent._listen(event_key, **kw) + + @property + def _events(self) -> Type[_HasEventsDispatch[_ET]]: + return self.parent._events + + +class Events(_HasEventsDispatch[_ET]): + """Define event listening functions for a particular target type.""" + + @classmethod + def _accept_with( + cls, target: Union[_ET, Type[_ET]], identifier: str + ) -> Optional[Union[_ET, Type[_ET]]]: + def dispatch_is(*types: Type[Any]) -> bool: + return all(isinstance(target.dispatch, t) for t in types) + + def dispatch_parent_is(t: Type[Any]) -> bool: + return isinstance( + cast("_JoinedDispatcher[_ET]", target.dispatch).parent, t + ) + + # Mapper, ClassManager, Session override this to + # also accept classes, scoped_sessions, sessionmakers, etc. + if hasattr(target, "dispatch"): + if ( + dispatch_is(cls.dispatch.__class__) + or dispatch_is(type, cls.dispatch.__class__) + or ( + dispatch_is(_JoinedDispatcher) + and dispatch_parent_is(cls.dispatch.__class__) + ) + ): + return target + + return None + + @classmethod + def _listen( + cls, + event_key: _EventKey[_ET], + *, + propagate: bool = False, + insert: bool = False, + named: bool = False, + asyncio: bool = False, + ) -> None: + event_key.base_listen( + propagate=propagate, insert=insert, named=named, asyncio=asyncio + ) + + @classmethod + def _remove(cls, event_key: _EventKey[_ET]) -> None: + event_key.remove() + + @classmethod + def _clear(cls) -> None: + cls.dispatch._clear() + + +class dispatcher(Generic[_ET]): + """Descriptor used by target classes to + deliver the _Dispatch class at the class level + and produce new _Dispatch instances for target + instances. + + """ + + def __init__(self, events: Type[_HasEventsDispatch[_ET]]): + self.dispatch = events.dispatch + self.events = events + + @overload + def __get__( + self, obj: Literal[None], cls: Type[Any] + ) -> Type[_Dispatch[_ET]]: ... + + @overload + def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]: ... + + def __get__(self, obj: Any, cls: Type[Any]) -> Any: + if obj is None: + return self.dispatch + + disp = self.dispatch._for_instance(obj) + try: + obj.__dict__["dispatch"] = disp + except AttributeError as ae: + raise TypeError( + "target %r doesn't have __dict__, should it be " + "defining _slots_dispatch?" % (obj,) + ) from ae + return disp + + +class slots_dispatcher(dispatcher[_ET]): + def __get__(self, obj: Any, cls: Type[Any]) -> Any: + if obj is None: + return self.dispatch + + if hasattr(obj, "_slots_dispatch"): + return obj._slots_dispatch + + disp = self.dispatch._for_instance(obj) + obj._slots_dispatch = disp + return disp diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/legacy.py b/.venv/lib/python3.12/site-packages/sqlalchemy/event/legacy.py new file mode 100644 index 00000000..57e561c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/event/legacy.py @@ -0,0 +1,246 @@ +# event/legacy.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Routines to handle adaption of legacy call signatures, +generation of deprecation notes and docstrings. + +""" +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type + +from .registry import _ET +from .registry import _ListenerFnType +from .. import util +from ..util.compat import FullArgSpec + +if typing.TYPE_CHECKING: + from .attr import _ClsLevelDispatch + from .base import _HasEventsDispatch + + +_LegacySignatureType = Tuple[str, List[str], Optional[Callable[..., Any]]] + + +def _legacy_signature( + since: str, + argnames: List[str], + converter: Optional[Callable[..., Any]] = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """legacy sig decorator + + + :param since: string version for deprecation warning + :param argnames: list of strings, which is *all* arguments that the legacy + version accepted, including arguments that are still there + :param converter: lambda that will accept tuple of this full arg signature + and return tuple of new arg signature. + + """ + + def leg(fn: Callable[..., Any]) -> Callable[..., Any]: + if not hasattr(fn, "_legacy_signatures"): + fn._legacy_signatures = [] # type: ignore[attr-defined] + fn._legacy_signatures.append((since, argnames, converter)) # type: ignore[attr-defined] # noqa: E501 + return fn + + return leg + + +def _wrap_fn_for_legacy( + dispatch_collection: _ClsLevelDispatch[_ET], + fn: _ListenerFnType, + argspec: FullArgSpec, +) -> _ListenerFnType: + for since, argnames, conv in dispatch_collection.legacy_signatures: + if argnames[-1] == "**kw": + has_kw = True + argnames = argnames[0:-1] + else: + has_kw = False + + if len(argnames) == len(argspec.args) and has_kw is bool( + argspec.varkw + ): + formatted_def = "def %s(%s%s)" % ( + dispatch_collection.name, + ", ".join(dispatch_collection.arg_names), + ", **kw" if has_kw else "", + ) + warning_txt = ( + 'The argument signature for the "%s.%s" event listener ' + "has changed as of version %s, and conversion for " + "the old argument signature will be removed in a " + 'future release. The new signature is "%s"' + % ( + dispatch_collection.clsname, + dispatch_collection.name, + since, + formatted_def, + ) + ) + + if conv is not None: + assert not has_kw + + def wrap_leg(*args: Any, **kw: Any) -> Any: + util.warn_deprecated(warning_txt, version=since) + assert conv is not None + return fn(*conv(*args)) + + else: + + def wrap_leg(*args: Any, **kw: Any) -> Any: + util.warn_deprecated(warning_txt, version=since) + argdict = dict(zip(dispatch_collection.arg_names, args)) + args_from_dict = [argdict[name] for name in argnames] + if has_kw: + return fn(*args_from_dict, **kw) + else: + return fn(*args_from_dict) + + return wrap_leg + else: + return fn + + +def _indent(text: str, indent: str) -> str: + return "\n".join(indent + line for line in text.split("\n")) + + +def _standard_listen_example( + dispatch_collection: _ClsLevelDispatch[_ET], + sample_target: Any, + fn: _ListenerFnType, +) -> str: + example_kw_arg = _indent( + "\n".join( + "%(arg)s = kw['%(arg)s']" % {"arg": arg} + for arg in dispatch_collection.arg_names[0:2] + ), + " ", + ) + if dispatch_collection.legacy_signatures: + current_since = max( + since + for since, args, conv in dispatch_collection.legacy_signatures + ) + else: + current_since = None + text = ( + "from sqlalchemy import event\n\n\n" + "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" + "def receive_%(event_name)s(" + "%(named_event_arguments)s%(has_kw_arguments)s):\n" + " \"listen for the '%(event_name)s' event\"\n" + "\n # ... (event handling logic) ...\n" + ) + + text %= { + "current_since": ( + " (arguments as of %s)" % current_since if current_since else "" + ), + "event_name": fn.__name__, + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "", + "named_event_arguments": ", ".join(dispatch_collection.arg_names), + "example_kw_arg": example_kw_arg, + "sample_target": sample_target, + } + return text + + +def _legacy_listen_examples( + dispatch_collection: _ClsLevelDispatch[_ET], + sample_target: str, + fn: _ListenerFnType, +) -> str: + text = "" + for since, args, conv in dispatch_collection.legacy_signatures: + text += ( + "\n# DEPRECATED calling style (pre-%(since)s, " + "will be removed in a future release)\n" + "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" + "def receive_%(event_name)s(" + "%(named_event_arguments)s%(has_kw_arguments)s):\n" + " \"listen for the '%(event_name)s' event\"\n" + "\n # ... (event handling logic) ...\n" + % { + "since": since, + "event_name": fn.__name__, + "has_kw_arguments": ( + " **kw" if dispatch_collection.has_kw else "" + ), + "named_event_arguments": ", ".join(args), + "sample_target": sample_target, + } + ) + return text + + +def _version_signature_changes( + parent_dispatch_cls: Type[_HasEventsDispatch[_ET]], + dispatch_collection: _ClsLevelDispatch[_ET], +) -> str: + since, args, conv = dispatch_collection.legacy_signatures[0] + return ( + "\n.. versionchanged:: %(since)s\n" + " The :meth:`.%(clsname)s.%(event_name)s` event now accepts the \n" + " arguments %(named_event_arguments)s%(has_kw_arguments)s.\n" + " Support for listener functions which accept the previous \n" + ' argument signature(s) listed above as "deprecated" will be \n' + " removed in a future release." + % { + "since": since, + "clsname": parent_dispatch_cls.__name__, + "event_name": dispatch_collection.name, + "named_event_arguments": ", ".join( + ":paramref:`.%(clsname)s.%(event_name)s.%(param_name)s`" + % { + "clsname": parent_dispatch_cls.__name__, + "event_name": dispatch_collection.name, + "param_name": param_name, + } + for param_name in dispatch_collection.arg_names + ), + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "", + } + ) + + +def _augment_fn_docs( + dispatch_collection: _ClsLevelDispatch[_ET], + parent_dispatch_cls: Type[_HasEventsDispatch[_ET]], + fn: _ListenerFnType, +) -> str: + header = ( + ".. container:: event_signatures\n\n" + " Example argument forms::\n" + "\n" + ) + + sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj") + text = header + _indent( + _standard_listen_example(dispatch_collection, sample_target, fn), + " " * 8, + ) + if dispatch_collection.legacy_signatures: + text += _indent( + _legacy_listen_examples(dispatch_collection, sample_target, fn), + " " * 8, + ) + + text += _version_signature_changes( + parent_dispatch_cls, dispatch_collection + ) + + return util.inject_docstring_text(fn.__doc__, text, 1) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/event/registry.py b/.venv/lib/python3.12/site-packages/sqlalchemy/event/registry.py new file mode 100644 index 00000000..773620f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/event/registry.py @@ -0,0 +1,386 @@ +# event/registry.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Provides managed registration services on behalf of :func:`.listen` +arguments. + +By "managed registration", we mean that event listening functions and +other objects can be added to various collections in such a way that their +membership in all those collections can be revoked at once, based on +an equivalent :class:`._EventKey`. + +""" +from __future__ import annotations + +import collections +import types +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Deque +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Optional +from typing import Tuple +from typing import TypeVar +from typing import Union +import weakref + +from .. import exc +from .. import util + +if typing.TYPE_CHECKING: + from .attr import RefCollection + from .base import dispatcher + +_ListenerFnType = Callable[..., Any] +_ListenerFnKeyType = Union[int, Tuple[int, int]] +_EventKeyTupleType = Tuple[int, str, _ListenerFnKeyType] + + +_ET = TypeVar("_ET", bound="EventTarget") + + +class EventTarget: + """represents an event target, that is, something we can listen on + either with that target as a class or as an instance. + + Examples include: Connection, Mapper, Table, Session, + InstrumentedAttribute, Engine, Pool, Dialect. + + """ + + __slots__ = () + + dispatch: dispatcher[Any] + + +_RefCollectionToListenerType = Dict[ + "weakref.ref[RefCollection[Any]]", + "weakref.ref[_ListenerFnType]", +] + +_key_to_collection: Dict[_EventKeyTupleType, _RefCollectionToListenerType] = ( + collections.defaultdict(dict) +) +""" +Given an original listen() argument, can locate all +listener collections and the listener fn contained + +(target, identifier, fn) -> { + ref(listenercollection) -> ref(listener_fn) + ref(listenercollection) -> ref(listener_fn) + ref(listenercollection) -> ref(listener_fn) + } +""" + +_ListenerToEventKeyType = Dict[ + "weakref.ref[_ListenerFnType]", + _EventKeyTupleType, +] +_collection_to_key: Dict[ + weakref.ref[RefCollection[Any]], + _ListenerToEventKeyType, +] = collections.defaultdict(dict) +""" +Given a _ListenerCollection or _ClsLevelListener, can locate +all the original listen() arguments and the listener fn contained + +ref(listenercollection) -> { + ref(listener_fn) -> (target, identifier, fn), + ref(listener_fn) -> (target, identifier, fn), + ref(listener_fn) -> (target, identifier, fn), + } +""" + + +def _collection_gced(ref: weakref.ref[Any]) -> None: + # defaultdict, so can't get a KeyError + if not _collection_to_key or ref not in _collection_to_key: + return + + ref = cast("weakref.ref[RefCollection[EventTarget]]", ref) + + listener_to_key = _collection_to_key.pop(ref) + for key in listener_to_key.values(): + if key in _key_to_collection: + # defaultdict, so can't get a KeyError + dispatch_reg = _key_to_collection[key] + dispatch_reg.pop(ref) + if not dispatch_reg: + _key_to_collection.pop(key) + + +def _stored_in_collection( + event_key: _EventKey[_ET], owner: RefCollection[_ET] +) -> bool: + key = event_key._key + + dispatch_reg = _key_to_collection[key] + + owner_ref = owner.ref + listen_ref = weakref.ref(event_key._listen_fn) + + if owner_ref in dispatch_reg: + return False + + dispatch_reg[owner_ref] = listen_ref + + listener_to_key = _collection_to_key[owner_ref] + listener_to_key[listen_ref] = key + + return True + + +def _removed_from_collection( + event_key: _EventKey[_ET], owner: RefCollection[_ET] +) -> None: + key = event_key._key + + dispatch_reg = _key_to_collection[key] + + listen_ref = weakref.ref(event_key._listen_fn) + + owner_ref = owner.ref + dispatch_reg.pop(owner_ref, None) + if not dispatch_reg: + del _key_to_collection[key] + + if owner_ref in _collection_to_key: + listener_to_key = _collection_to_key[owner_ref] + listener_to_key.pop(listen_ref) + + +def _stored_in_collection_multi( + newowner: RefCollection[_ET], + oldowner: RefCollection[_ET], + elements: Iterable[_ListenerFnType], +) -> None: + if not elements: + return + + oldowner_ref = oldowner.ref + newowner_ref = newowner.ref + + old_listener_to_key = _collection_to_key[oldowner_ref] + new_listener_to_key = _collection_to_key[newowner_ref] + + for listen_fn in elements: + listen_ref = weakref.ref(listen_fn) + try: + key = old_listener_to_key[listen_ref] + except KeyError: + # can occur during interpreter shutdown. + # see #6740 + continue + + try: + dispatch_reg = _key_to_collection[key] + except KeyError: + continue + + if newowner_ref in dispatch_reg: + assert dispatch_reg[newowner_ref] == listen_ref + else: + dispatch_reg[newowner_ref] = listen_ref + + new_listener_to_key[listen_ref] = key + + +def _clear( + owner: RefCollection[_ET], + elements: Iterable[_ListenerFnType], +) -> None: + if not elements: + return + + owner_ref = owner.ref + listener_to_key = _collection_to_key[owner_ref] + for listen_fn in elements: + listen_ref = weakref.ref(listen_fn) + key = listener_to_key[listen_ref] + dispatch_reg = _key_to_collection[key] + dispatch_reg.pop(owner_ref, None) + + if not dispatch_reg: + del _key_to_collection[key] + + +class _EventKey(Generic[_ET]): + """Represent :func:`.listen` arguments.""" + + __slots__ = ( + "target", + "identifier", + "fn", + "fn_key", + "fn_wrap", + "dispatch_target", + ) + + target: _ET + identifier: str + fn: _ListenerFnType + fn_key: _ListenerFnKeyType + dispatch_target: Any + _fn_wrap: Optional[_ListenerFnType] + + def __init__( + self, + target: _ET, + identifier: str, + fn: _ListenerFnType, + dispatch_target: Any, + _fn_wrap: Optional[_ListenerFnType] = None, + ): + self.target = target + self.identifier = identifier + self.fn = fn + if isinstance(fn, types.MethodType): + self.fn_key = id(fn.__func__), id(fn.__self__) + else: + self.fn_key = id(fn) + self.fn_wrap = _fn_wrap + self.dispatch_target = dispatch_target + + @property + def _key(self) -> _EventKeyTupleType: + return (id(self.target), self.identifier, self.fn_key) + + def with_wrapper(self, fn_wrap: _ListenerFnType) -> _EventKey[_ET]: + if fn_wrap is self._listen_fn: + return self + else: + return _EventKey( + self.target, + self.identifier, + self.fn, + self.dispatch_target, + _fn_wrap=fn_wrap, + ) + + def with_dispatch_target(self, dispatch_target: Any) -> _EventKey[_ET]: + if dispatch_target is self.dispatch_target: + return self + else: + return _EventKey( + self.target, + self.identifier, + self.fn, + dispatch_target, + _fn_wrap=self.fn_wrap, + ) + + def listen(self, *args: Any, **kw: Any) -> None: + once = kw.pop("once", False) + once_unless_exception = kw.pop("_once_unless_exception", False) + named = kw.pop("named", False) + + target, identifier, fn = ( + self.dispatch_target, + self.identifier, + self._listen_fn, + ) + + dispatch_collection = getattr(target.dispatch, identifier) + + adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named) + + self = self.with_wrapper(adjusted_fn) + + stub_function = getattr( + self.dispatch_target.dispatch._events, self.identifier + ) + if hasattr(stub_function, "_sa_warn"): + stub_function._sa_warn() + + if once or once_unless_exception: + self.with_wrapper( + util.only_once( + self._listen_fn, retry_on_exception=once_unless_exception + ) + ).listen(*args, **kw) + else: + self.dispatch_target.dispatch._listen(self, *args, **kw) + + def remove(self) -> None: + key = self._key + + if key not in _key_to_collection: + raise exc.InvalidRequestError( + "No listeners found for event %s / %r / %s " + % (self.target, self.identifier, self.fn) + ) + + dispatch_reg = _key_to_collection.pop(key) + + for collection_ref, listener_ref in dispatch_reg.items(): + collection = collection_ref() + listener_fn = listener_ref() + if collection is not None and listener_fn is not None: + collection.remove(self.with_wrapper(listener_fn)) + + def contains(self) -> bool: + """Return True if this event key is registered to listen.""" + return self._key in _key_to_collection + + def base_listen( + self, + propagate: bool = False, + insert: bool = False, + named: bool = False, + retval: Optional[bool] = None, + asyncio: bool = False, + ) -> None: + target, identifier = self.dispatch_target, self.identifier + + dispatch_collection = getattr(target.dispatch, identifier) + + for_modify = dispatch_collection.for_modify(target.dispatch) + if asyncio: + for_modify._set_asyncio() + + if insert: + for_modify.insert(self, propagate) + else: + for_modify.append(self, propagate) + + @property + def _listen_fn(self) -> _ListenerFnType: + return self.fn_wrap or self.fn + + def append_to_list( + self, + owner: RefCollection[_ET], + list_: Deque[_ListenerFnType], + ) -> bool: + if _stored_in_collection(self, owner): + list_.append(self._listen_fn) + return True + else: + return False + + def remove_from_list( + self, + owner: RefCollection[_ET], + list_: Deque[_ListenerFnType], + ) -> None: + _removed_from_collection(self, owner) + list_.remove(self._listen_fn) + + def prepend_to_list( + self, + owner: RefCollection[_ET], + list_: Deque[_ListenerFnType], + ) -> bool: + if _stored_in_collection(self, owner): + list_.appendleft(self._listen_fn) + return True + else: + return False diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/events.py new file mode 100644 index 00000000..8c3bf01c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/events.py @@ -0,0 +1,17 @@ +# events.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Core event interfaces.""" + +from __future__ import annotations + +from .engine.events import ConnectionEvents +from .engine.events import DialectEvents +from .pool import PoolResetState +from .pool.events import PoolEvents +from .sql.base import SchemaEventTarget +from .sql.events import DDLEvents diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/exc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/exc.py new file mode 100644 index 00000000..7d7eff36 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/exc.py @@ -0,0 +1,830 @@ +# exc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Exceptions used with SQLAlchemy. + +The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are +raised as a result of DBAPI exceptions are all subclasses of +:exc:`.DBAPIError`. + +""" +from __future__ import annotations + +import typing +from typing import Any +from typing import List +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type +from typing import Union + +from .util import compat +from .util import preloaded as _preloaded + +if typing.TYPE_CHECKING: + from .engine.interfaces import _AnyExecuteParams + from .engine.interfaces import Dialect + from .sql.compiler import Compiled + from .sql.compiler import TypeCompiler + from .sql.elements import ClauseElement + +if typing.TYPE_CHECKING: + _version_token: str +else: + # set by __init__.py + _version_token = None + + +class HasDescriptionCode: + """helper which adds 'code' as an attribute and '_code_str' as a method""" + + code: Optional[str] = None + + def __init__(self, *arg: Any, **kw: Any): + code = kw.pop("code", None) + if code is not None: + self.code = code + super().__init__(*arg, **kw) + + _what_are_we = "error" + + def _code_str(self) -> str: + if not self.code: + return "" + else: + return ( + f"(Background on this {self._what_are_we} at: " + f"https://sqlalche.me/e/{_version_token}/{self.code})" + ) + + def __str__(self) -> str: + message = super().__str__() + if self.code: + message = "%s %s" % (message, self._code_str()) + return message + + +class SQLAlchemyError(HasDescriptionCode, Exception): + """Generic error class.""" + + def _message(self) -> str: + # rules: + # + # 1. single arg string will usually be a unicode + # object, but since __str__() must return unicode, check for + # bytestring just in case + # + # 2. for multiple self.args, this is not a case in current + # SQLAlchemy though this is happening in at least one known external + # library, call str() which does a repr(). + # + text: str + + if len(self.args) == 1: + arg_text = self.args[0] + + if isinstance(arg_text, bytes): + text = compat.decode_backslashreplace(arg_text, "utf-8") + # This is for when the argument is not a string of any sort. + # Otherwise, converting this exception to string would fail for + # non-string arguments. + else: + text = str(arg_text) + + return text + else: + # this is not a normal case within SQLAlchemy but is here for + # compatibility with Exception.args - the str() comes out as + # a repr() of the tuple + return str(self.args) + + def _sql_message(self) -> str: + message = self._message() + + if self.code: + message = "%s %s" % (message, self._code_str()) + + return message + + def __str__(self) -> str: + return self._sql_message() + + +class ArgumentError(SQLAlchemyError): + """Raised when an invalid or conflicting function argument is supplied. + + This error generally corresponds to construction time state errors. + + """ + + +class DuplicateColumnError(ArgumentError): + """a Column is being added to a Table that would replace another + Column, without appropriate parameters to allow this in place. + + .. versionadded:: 2.0.0b4 + + """ + + +class ObjectNotExecutableError(ArgumentError): + """Raised when an object is passed to .execute() that can't be + executed as SQL. + + """ + + def __init__(self, target: Any): + super().__init__("Not an executable object: %r" % target) + self.target = target + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return self.__class__, (self.target,) + + +class NoSuchModuleError(ArgumentError): + """Raised when a dynamically-loaded module (usually a database dialect) + of a particular name cannot be located.""" + + +class NoForeignKeysError(ArgumentError): + """Raised when no foreign keys can be located between two selectables + during a join.""" + + +class AmbiguousForeignKeysError(ArgumentError): + """Raised when more than one foreign key matching can be located + between two selectables during a join.""" + + +class ConstraintColumnNotFoundError(ArgumentError): + """raised when a constraint refers to a string column name that + is not present in the table being constrained. + + .. versionadded:: 2.0 + + """ + + +class CircularDependencyError(SQLAlchemyError): + """Raised by topological sorts when a circular dependency is detected. + + There are two scenarios where this error occurs: + + * In a Session flush operation, if two objects are mutually dependent + on each other, they can not be inserted or deleted via INSERT or + DELETE statements alone; an UPDATE will be needed to post-associate + or pre-deassociate one of the foreign key constrained values. + The ``post_update`` flag described at :ref:`post_update` can resolve + this cycle. + * In a :attr:`_schema.MetaData.sorted_tables` operation, two + :class:`_schema.ForeignKey` + or :class:`_schema.ForeignKeyConstraint` objects mutually refer to each + other. Apply the ``use_alter=True`` flag to one or both, + see :ref:`use_alter`. + + """ + + def __init__( + self, + message: str, + cycles: Any, + edges: Any, + msg: Optional[str] = None, + code: Optional[str] = None, + ): + if msg is None: + message += " (%s)" % ", ".join(repr(s) for s in cycles) + else: + message = msg + SQLAlchemyError.__init__(self, message, code=code) + self.cycles = cycles + self.edges = edges + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return ( + self.__class__, + (None, self.cycles, self.edges, self.args[0]), + {"code": self.code} if self.code is not None else {}, + ) + + +class CompileError(SQLAlchemyError): + """Raised when an error occurs during SQL compilation""" + + +class UnsupportedCompilationError(CompileError): + """Raised when an operation is not supported by the given compiler. + + .. seealso:: + + :ref:`faq_sql_expression_string` + + :ref:`error_l7de` + """ + + code = "l7de" + + def __init__( + self, + compiler: Union[Compiled, TypeCompiler], + element_type: Type[ClauseElement], + message: Optional[str] = None, + ): + super().__init__( + "Compiler %r can't render element of type %s%s" + % (compiler, element_type, ": %s" % message if message else "") + ) + self.compiler = compiler + self.element_type = element_type + self.message = message + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return self.__class__, (self.compiler, self.element_type, self.message) + + +class IdentifierError(SQLAlchemyError): + """Raised when a schema name is beyond the max character limit""" + + +class DisconnectionError(SQLAlchemyError): + """A disconnect is detected on a raw DB-API connection. + + This error is raised and consumed internally by a connection pool. It can + be raised by the :meth:`_events.PoolEvents.checkout` + event so that the host pool + forces a retry; the exception will be caught three times in a row before + the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError` + regarding the connection attempt. + + """ + + invalidate_pool: bool = False + + +class InvalidatePoolError(DisconnectionError): + """Raised when the connection pool should invalidate all stale connections. + + A subclass of :class:`_exc.DisconnectionError` that indicates that the + disconnect situation encountered on the connection probably means the + entire pool should be invalidated, as the database has been restarted. + + This exception will be handled otherwise the same way as + :class:`_exc.DisconnectionError`, allowing three attempts to reconnect + before giving up. + + .. versionadded:: 1.2 + + """ + + invalidate_pool: bool = True + + +class TimeoutError(SQLAlchemyError): # noqa + """Raised when a connection pool times out on getting a connection.""" + + +class InvalidRequestError(SQLAlchemyError): + """SQLAlchemy was asked to do something it can't do. + + This error generally corresponds to runtime state errors. + + """ + + +class IllegalStateChangeError(InvalidRequestError): + """An object that tracks state encountered an illegal state change + of some kind. + + .. versionadded:: 2.0 + + """ + + +class NoInspectionAvailable(InvalidRequestError): + """A subject passed to :func:`sqlalchemy.inspection.inspect` produced + no context for inspection.""" + + +class PendingRollbackError(InvalidRequestError): + """A transaction has failed and needs to be rolled back before + continuing. + + .. versionadded:: 1.4 + + """ + + +class ResourceClosedError(InvalidRequestError): + """An operation was requested from a connection, cursor, or other + object that's in a closed state.""" + + +class NoSuchColumnError(InvalidRequestError, KeyError): + """A nonexistent column is requested from a ``Row``.""" + + +class NoResultFound(InvalidRequestError): + """A database result was required but none was found. + + + .. versionchanged:: 1.4 This exception is now part of the + ``sqlalchemy.exc`` module in Core, moved from the ORM. The symbol + remains importable from ``sqlalchemy.orm.exc``. + + + """ + + +class MultipleResultsFound(InvalidRequestError): + """A single database result was required but more than one were found. + + .. versionchanged:: 1.4 This exception is now part of the + ``sqlalchemy.exc`` module in Core, moved from the ORM. The symbol + remains importable from ``sqlalchemy.orm.exc``. + + + """ + + +class NoReferenceError(InvalidRequestError): + """Raised by ``ForeignKey`` to indicate a reference cannot be resolved.""" + + table_name: str + + +class AwaitRequired(InvalidRequestError): + """Error raised by the async greenlet spawn if no async operation + was awaited when it required one. + + """ + + code = "xd1r" + + +class MissingGreenlet(InvalidRequestError): + r"""Error raised by the async greenlet await\_ if called while not inside + the greenlet spawn context. + + """ + + code = "xd2s" + + +class NoReferencedTableError(NoReferenceError): + """Raised by ``ForeignKey`` when the referred ``Table`` cannot be + located. + + """ + + def __init__(self, message: str, tname: str): + NoReferenceError.__init__(self, message) + self.table_name = tname + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return self.__class__, (self.args[0], self.table_name) + + +class NoReferencedColumnError(NoReferenceError): + """Raised by ``ForeignKey`` when the referred ``Column`` cannot be + located. + + """ + + def __init__(self, message: str, tname: str, cname: str): + NoReferenceError.__init__(self, message) + self.table_name = tname + self.column_name = cname + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return ( + self.__class__, + (self.args[0], self.table_name, self.column_name), + ) + + +class NoSuchTableError(InvalidRequestError): + """Table does not exist or is not visible to a connection.""" + + +class UnreflectableTableError(InvalidRequestError): + """Table exists but can't be reflected for some reason. + + .. versionadded:: 1.2 + + """ + + +class UnboundExecutionError(InvalidRequestError): + """SQL was attempted without a database connection to execute it on.""" + + +class DontWrapMixin: + """A mixin class which, when applied to a user-defined Exception class, + will not be wrapped inside of :exc:`.StatementError` if the error is + emitted within the process of executing a statement. + + E.g.:: + + from sqlalchemy.exc import DontWrapMixin + + class MyCustomException(Exception, DontWrapMixin): + pass + + class MySpecialType(TypeDecorator): + impl = String + + def process_bind_param(self, value, dialect): + if value == 'invalid': + raise MyCustomException("invalid!") + + """ + + +class StatementError(SQLAlchemyError): + """An error occurred during execution of a SQL statement. + + :class:`StatementError` wraps the exception raised + during execution, and features :attr:`.statement` + and :attr:`.params` attributes which supply context regarding + the specifics of the statement which had an issue. + + The wrapped exception object is available in + the :attr:`.orig` attribute. + + """ + + statement: Optional[str] = None + """The string SQL statement being invoked when this exception occurred.""" + + params: Optional[_AnyExecuteParams] = None + """The parameter list being used when this exception occurred.""" + + orig: Optional[BaseException] = None + """The original exception that was thrown. + + """ + + ismulti: Optional[bool] = None + """multi parameter passed to repr_params(). None is meaningful.""" + + connection_invalidated: bool = False + + def __init__( + self, + message: str, + statement: Optional[str], + params: Optional[_AnyExecuteParams], + orig: Optional[BaseException], + hide_parameters: bool = False, + code: Optional[str] = None, + ismulti: Optional[bool] = None, + ): + SQLAlchemyError.__init__(self, message, code=code) + self.statement = statement + self.params = params + self.orig = orig + self.ismulti = ismulti + self.hide_parameters = hide_parameters + self.detail: List[str] = [] + + def add_detail(self, msg: str) -> None: + self.detail.append(msg) + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return ( + self.__class__, + ( + self.args[0], + self.statement, + self.params, + self.orig, + self.hide_parameters, + self.__dict__.get("code"), + self.ismulti, + ), + {"detail": self.detail}, + ) + + @_preloaded.preload_module("sqlalchemy.sql.util") + def _sql_message(self) -> str: + util = _preloaded.sql_util + + details = [self._message()] + if self.statement: + stmt_detail = "[SQL: %s]" % self.statement + details.append(stmt_detail) + if self.params: + if self.hide_parameters: + details.append( + "[SQL parameters hidden due to hide_parameters=True]" + ) + else: + params_repr = util._repr_params( + self.params, 10, ismulti=self.ismulti + ) + details.append("[parameters: %r]" % params_repr) + code_str = self._code_str() + if code_str: + details.append(code_str) + return "\n".join(["(%s)" % det for det in self.detail] + details) + + +class DBAPIError(StatementError): + """Raised when the execution of a database operation fails. + + Wraps exceptions raised by the DB-API underlying the + database operation. Driver-specific implementations of the standard + DB-API exception types are wrapped by matching sub-types of SQLAlchemy's + :class:`DBAPIError` when possible. DB-API's ``Error`` type maps to + :class:`DBAPIError` in SQLAlchemy, otherwise the names are identical. Note + that there is no guarantee that different DB-API implementations will + raise the same exception type for any given error condition. + + :class:`DBAPIError` features :attr:`~.StatementError.statement` + and :attr:`~.StatementError.params` attributes which supply context + regarding the specifics of the statement which had an issue, for the + typical case when the error was raised within the context of + emitting a SQL statement. + + The wrapped exception object is available in the + :attr:`~.StatementError.orig` attribute. Its type and properties are + DB-API implementation specific. + + """ + + code = "dbapi" + + @overload + @classmethod + def instance( + cls, + statement: Optional[str], + params: Optional[_AnyExecuteParams], + orig: Exception, + dbapi_base_err: Type[Exception], + hide_parameters: bool = False, + connection_invalidated: bool = False, + dialect: Optional[Dialect] = None, + ismulti: Optional[bool] = None, + ) -> StatementError: ... + + @overload + @classmethod + def instance( + cls, + statement: Optional[str], + params: Optional[_AnyExecuteParams], + orig: DontWrapMixin, + dbapi_base_err: Type[Exception], + hide_parameters: bool = False, + connection_invalidated: bool = False, + dialect: Optional[Dialect] = None, + ismulti: Optional[bool] = None, + ) -> DontWrapMixin: ... + + @overload + @classmethod + def instance( + cls, + statement: Optional[str], + params: Optional[_AnyExecuteParams], + orig: BaseException, + dbapi_base_err: Type[Exception], + hide_parameters: bool = False, + connection_invalidated: bool = False, + dialect: Optional[Dialect] = None, + ismulti: Optional[bool] = None, + ) -> BaseException: ... + + @classmethod + def instance( + cls, + statement: Optional[str], + params: Optional[_AnyExecuteParams], + orig: Union[BaseException, DontWrapMixin], + dbapi_base_err: Type[Exception], + hide_parameters: bool = False, + connection_invalidated: bool = False, + dialect: Optional[Dialect] = None, + ismulti: Optional[bool] = None, + ) -> Union[BaseException, DontWrapMixin]: + # Don't ever wrap these, just return them directly as if + # DBAPIError didn't exist. + if ( + isinstance(orig, BaseException) and not isinstance(orig, Exception) + ) or isinstance(orig, DontWrapMixin): + return orig + + if orig is not None: + # not a DBAPI error, statement is present. + # raise a StatementError + if isinstance(orig, SQLAlchemyError) and statement: + return StatementError( + "(%s.%s) %s" + % ( + orig.__class__.__module__, + orig.__class__.__name__, + orig.args[0], + ), + statement, + params, + orig, + hide_parameters=hide_parameters, + code=orig.code, + ismulti=ismulti, + ) + elif not isinstance(orig, dbapi_base_err) and statement: + return StatementError( + "(%s.%s) %s" + % ( + orig.__class__.__module__, + orig.__class__.__name__, + orig, + ), + statement, + params, + orig, + hide_parameters=hide_parameters, + ismulti=ismulti, + ) + + glob = globals() + for super_ in orig.__class__.__mro__: + name = super_.__name__ + if dialect: + name = dialect.dbapi_exception_translation_map.get( + name, name + ) + if name in glob and issubclass(glob[name], DBAPIError): + cls = glob[name] + break + + return cls( + statement, + params, + orig, + connection_invalidated=connection_invalidated, + hide_parameters=hide_parameters, + code=cls.code, + ismulti=ismulti, + ) + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + return ( + self.__class__, + ( + self.statement, + self.params, + self.orig, + self.hide_parameters, + self.connection_invalidated, + self.__dict__.get("code"), + self.ismulti, + ), + {"detail": self.detail}, + ) + + def __init__( + self, + statement: Optional[str], + params: Optional[_AnyExecuteParams], + orig: BaseException, + hide_parameters: bool = False, + connection_invalidated: bool = False, + code: Optional[str] = None, + ismulti: Optional[bool] = None, + ): + try: + text = str(orig) + except Exception as e: + text = "Error in str() of DB-API-generated exception: " + str(e) + StatementError.__init__( + self, + "(%s.%s) %s" + % (orig.__class__.__module__, orig.__class__.__name__, text), + statement, + params, + orig, + hide_parameters, + code=code, + ismulti=ismulti, + ) + self.connection_invalidated = connection_invalidated + + +class InterfaceError(DBAPIError): + """Wraps a DB-API InterfaceError.""" + + code = "rvf5" + + +class DatabaseError(DBAPIError): + """Wraps a DB-API DatabaseError.""" + + code = "4xp6" + + +class DataError(DatabaseError): + """Wraps a DB-API DataError.""" + + code = "9h9h" + + +class OperationalError(DatabaseError): + """Wraps a DB-API OperationalError.""" + + code = "e3q8" + + +class IntegrityError(DatabaseError): + """Wraps a DB-API IntegrityError.""" + + code = "gkpj" + + +class InternalError(DatabaseError): + """Wraps a DB-API InternalError.""" + + code = "2j85" + + +class ProgrammingError(DatabaseError): + """Wraps a DB-API ProgrammingError.""" + + code = "f405" + + +class NotSupportedError(DatabaseError): + """Wraps a DB-API NotSupportedError.""" + + code = "tw8g" + + +# Warnings + + +class SATestSuiteWarning(Warning): + """warning for a condition detected during tests that is non-fatal + + Currently outside of SAWarning so that we can work around tools like + Alembic doing the wrong thing with warnings. + + """ + + +class SADeprecationWarning(HasDescriptionCode, DeprecationWarning): + """Issued for usage of deprecated APIs.""" + + deprecated_since: Optional[str] = None + "Indicates the version that started raising this deprecation warning" + + +class Base20DeprecationWarning(SADeprecationWarning): + """Issued for usage of APIs specifically deprecated or legacy in + SQLAlchemy 2.0. + + .. seealso:: + + :ref:`error_b8d9`. + + :ref:`deprecation_20_mode` + + """ + + deprecated_since: Optional[str] = "1.4" + "Indicates the version that started raising this deprecation warning" + + def __str__(self) -> str: + return ( + super().__str__() + + " (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9)" + ) + + +class LegacyAPIWarning(Base20DeprecationWarning): + """indicates an API that is in 'legacy' status, a long term deprecation.""" + + +class MovedIn20Warning(Base20DeprecationWarning): + """Subtype of RemovedIn20Warning to indicate an API that moved only.""" + + +class SAPendingDeprecationWarning(PendingDeprecationWarning): + """A similar warning as :class:`_exc.SADeprecationWarning`, this warning + is not used in modern versions of SQLAlchemy. + + """ + + deprecated_since: Optional[str] = None + "Indicates the version that started raising this deprecation warning" + + +class SAWarning(HasDescriptionCode, RuntimeWarning): + """Issued at runtime.""" + + _what_are_we = "warning" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__init__.py new file mode 100644 index 00000000..f03ed945 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__init__.py @@ -0,0 +1,11 @@ +# ext/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from .. import util as _sa_util + + +_sa_util.preloaded.import_prefix("sqlalchemy.ext") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..df7651bd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc new file mode 100644 index 00000000..d41830ec Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/automap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/automap.cpython-312.pyc new file mode 100644 index 00000000..4a4a13e7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/automap.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/baked.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/baked.cpython-312.pyc new file mode 100644 index 00000000..88fd07ad Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/baked.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc new file mode 100644 index 00000000..0ab7e67b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc new file mode 100644 index 00000000..9f00e411 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc new file mode 100644 index 00000000..7cf1e298 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc new file mode 100644 index 00000000..64fd8972 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc new file mode 100644 index 00000000..cc29eb81 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc new file mode 100644 index 00000000..c6e32e94 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc new file mode 100644 index 00000000..5d7dadd3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc new file mode 100644 index 00000000..93b9e077 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/associationproxy.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/associationproxy.py new file mode 100644 index 00000000..80e6fdac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/associationproxy.py @@ -0,0 +1,2005 @@ +# ext/associationproxy.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Contain the ``AssociationProxy`` class. + +The ``AssociationProxy`` is a Python property object which provides +transparent proxied access to the endpoint of an association object. + +See the example ``examples/association/proxied_association.py``. + +""" +from __future__ import annotations + +import operator +import typing +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import Dict +from typing import Generic +from typing import ItemsView +from typing import Iterable +from typing import Iterator +from typing import KeysView +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import MutableSequence +from typing import MutableSet +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union +from typing import ValuesView + +from .. import ColumnElement +from .. import exc +from .. import inspect +from .. import orm +from .. import util +from ..orm import collections +from ..orm import InspectionAttrExtensionType +from ..orm import interfaces +from ..orm import ORMDescriptor +from ..orm.base import SQLORMOperations +from ..orm.interfaces import _AttributeOptions +from ..orm.interfaces import _DCAttributeOptions +from ..orm.interfaces import _DEFAULT_ATTRIBUTE_OPTIONS +from ..sql import operators +from ..sql import or_ +from ..sql.base import _NoArg +from ..util.typing import Literal +from ..util.typing import Protocol +from ..util.typing import Self +from ..util.typing import SupportsIndex +from ..util.typing import SupportsKeysAndGetItem + +if typing.TYPE_CHECKING: + from ..orm.interfaces import MapperProperty + from ..orm.interfaces import PropComparator + from ..orm.mapper import Mapper + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _InfoType + + +_T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) +_T_con = TypeVar("_T_con", bound=Any, contravariant=True) +_S = TypeVar("_S", bound=Any) +_KT = TypeVar("_KT", bound=Any) +_VT = TypeVar("_VT", bound=Any) + + +def association_proxy( + target_collection: str, + attr: str, + *, + creator: Optional[_CreatorProtocol] = None, + getset_factory: Optional[_GetSetFactoryProtocol] = None, + proxy_factory: Optional[_ProxyFactoryProtocol] = None, + proxy_bulk_set: Optional[_ProxyBulkSetProtocol] = None, + info: Optional[_InfoType] = None, + cascade_scalar_deletes: bool = False, + create_on_none_assignment: bool = False, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, +) -> AssociationProxy[Any]: + r"""Return a Python property implementing a view of a target + attribute which references an attribute on members of the + target. + + The returned value is an instance of :class:`.AssociationProxy`. + + Implements a Python property representing a relationship as a collection + of simpler values, or a scalar value. The proxied property will mimic + the collection type of the target (list, dict or set), or, in the case of + a one to one relationship, a simple scalar value. + + :param target_collection: Name of the attribute that is the immediate + target. This attribute is typically mapped by + :func:`~sqlalchemy.orm.relationship` to link to a target collection, but + can also be a many-to-one or non-scalar relationship. + + :param attr: Attribute on the associated instance or instances that + are available on instances of the target object. + + :param creator: optional. + + Defines custom behavior when new items are added to the proxied + collection. + + By default, adding new items to the collection will trigger a + construction of an instance of the target object, passing the given + item as a positional argument to the target constructor. For cases + where this isn't sufficient, :paramref:`.association_proxy.creator` + can supply a callable that will construct the object in the + appropriate way, given the item that was passed. + + For list- and set- oriented collections, a single argument is + passed to the callable. For dictionary oriented collections, two + arguments are passed, corresponding to the key and value. + + The :paramref:`.association_proxy.creator` callable is also invoked + for scalar (i.e. many-to-one, one-to-one) relationships. If the + current value of the target relationship attribute is ``None``, the + callable is used to construct a new object. If an object value already + exists, the given attribute value is populated onto that object. + + .. seealso:: + + :ref:`associationproxy_creator` + + :param cascade_scalar_deletes: when True, indicates that setting + the proxied value to ``None``, or deleting it via ``del``, should + also remove the source object. Only applies to scalar attributes. + Normally, removing the proxied target will not remove the proxy + source, as this object may have other state that is still to be + kept. + + .. versionadded:: 1.3 + + .. seealso:: + + :ref:`cascade_scalar_deletes` - complete usage example + + :param create_on_none_assignment: when True, indicates that setting + the proxied value to ``None`` should **create** the source object + if it does not exist, using the creator. Only applies to scalar + attributes. This is mutually exclusive + vs. the :paramref:`.assocation_proxy.cascade_scalar_deletes`. + + .. versionadded:: 2.0.18 + + :param init: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__init__()`` + method as generated by the dataclass process. + + .. versionadded:: 2.0.0b4 + + :param repr: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the attribute established by this :class:`.AssociationProxy` + should be part of the ``__repr__()`` method as generated by the dataclass + process. + + .. versionadded:: 2.0.0b4 + + :param default_factory: Specific to + :ref:`orm_declarative_native_dataclasses`, specifies a default-value + generation function that will take place as part of the ``__init__()`` + method as generated by the dataclass process. + + .. versionadded:: 2.0.0b4 + + :param compare: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be included in comparison operations when generating the + ``__eq__()`` and ``__ne__()`` methods for the mapped class. + + .. versionadded:: 2.0.0b4 + + :param kw_only: Specific to :ref:`orm_declarative_native_dataclasses`, + indicates if this field should be marked as keyword-only when generating + the ``__init__()`` method as generated by the dataclass process. + + .. versionadded:: 2.0.0b4 + + :param info: optional, will be assigned to + :attr:`.AssociationProxy.info` if present. + + + The following additional parameters involve injection of custom behaviors + within the :class:`.AssociationProxy` object and are for advanced use + only: + + :param getset_factory: Optional. Proxied attribute access is + automatically handled by routines that get and set values based on + the `attr` argument for this proxy. + + If you would like to customize this behavior, you may supply a + `getset_factory` callable that produces a tuple of `getter` and + `setter` functions. The factory is called with two arguments, the + abstract type of the underlying collection and this proxy instance. + + :param proxy_factory: Optional. The type of collection to emulate is + determined by sniffing the target collection. If your collection + type can't be determined by duck typing or you'd like to use a + different collection implementation, you may supply a factory + function to produce those collections. Only applicable to + non-scalar relationships. + + :param proxy_bulk_set: Optional, use with proxy_factory. + + + """ + return AssociationProxy( + target_collection, + attr, + creator=creator, + getset_factory=getset_factory, + proxy_factory=proxy_factory, + proxy_bulk_set=proxy_bulk_set, + info=info, + cascade_scalar_deletes=cascade_scalar_deletes, + create_on_none_assignment=create_on_none_assignment, + attribute_options=_AttributeOptions( + init, repr, default, default_factory, compare, kw_only + ), + ) + + +class AssociationProxyExtensionType(InspectionAttrExtensionType): + ASSOCIATION_PROXY = "ASSOCIATION_PROXY" + """Symbol indicating an :class:`.InspectionAttr` that's + of type :class:`.AssociationProxy`. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attribute. + + """ + + +class _GetterProtocol(Protocol[_T_co]): + def __call__(self, instance: Any) -> _T_co: ... + + +# mypy 0.990 we are no longer allowed to make this Protocol[_T_con] +class _SetterProtocol(Protocol): ... + + +class _PlainSetterProtocol(_SetterProtocol, Protocol[_T_con]): + def __call__(self, instance: Any, value: _T_con) -> None: ... + + +class _DictSetterProtocol(_SetterProtocol, Protocol[_T_con]): + def __call__(self, instance: Any, key: Any, value: _T_con) -> None: ... + + +# mypy 0.990 we are no longer allowed to make this Protocol[_T_con] +class _CreatorProtocol(Protocol): ... + + +class _PlainCreatorProtocol(_CreatorProtocol, Protocol[_T_con]): + def __call__(self, value: _T_con) -> Any: ... + + +class _KeyCreatorProtocol(_CreatorProtocol, Protocol[_T_con]): + def __call__(self, key: Any, value: Optional[_T_con]) -> Any: ... + + +class _LazyCollectionProtocol(Protocol[_T]): + def __call__( + self, + ) -> Union[ + MutableSet[_T], MutableMapping[Any, _T], MutableSequence[_T] + ]: ... + + +class _GetSetFactoryProtocol(Protocol): + def __call__( + self, + collection_class: Optional[Type[Any]], + assoc_instance: AssociationProxyInstance[Any], + ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: ... + + +class _ProxyFactoryProtocol(Protocol): + def __call__( + self, + lazy_collection: _LazyCollectionProtocol[Any], + creator: _CreatorProtocol, + value_attr: str, + parent: AssociationProxyInstance[Any], + ) -> Any: ... + + +class _ProxyBulkSetProtocol(Protocol): + def __call__( + self, proxy: _AssociationCollection[Any], collection: Iterable[Any] + ) -> None: ... + + +class _AssociationProxyProtocol(Protocol[_T]): + """describes the interface of :class:`.AssociationProxy` + without including descriptor methods in the interface.""" + + creator: Optional[_CreatorProtocol] + key: str + target_collection: str + value_attr: str + cascade_scalar_deletes: bool + create_on_none_assignment: bool + getset_factory: Optional[_GetSetFactoryProtocol] + proxy_factory: Optional[_ProxyFactoryProtocol] + proxy_bulk_set: Optional[_ProxyBulkSetProtocol] + + @util.ro_memoized_property + def info(self) -> _InfoType: ... + + def for_class( + self, class_: Type[Any], obj: Optional[object] = None + ) -> AssociationProxyInstance[_T]: ... + + def _default_getset( + self, collection_class: Any + ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: ... + + +class AssociationProxy( + interfaces.InspectionAttrInfo, + ORMDescriptor[_T], + _DCAttributeOptions, + _AssociationProxyProtocol[_T], +): + """A descriptor that presents a read/write view of an object attribute.""" + + is_attribute = True + extension_type = AssociationProxyExtensionType.ASSOCIATION_PROXY + + def __init__( + self, + target_collection: str, + attr: str, + *, + creator: Optional[_CreatorProtocol] = None, + getset_factory: Optional[_GetSetFactoryProtocol] = None, + proxy_factory: Optional[_ProxyFactoryProtocol] = None, + proxy_bulk_set: Optional[_ProxyBulkSetProtocol] = None, + info: Optional[_InfoType] = None, + cascade_scalar_deletes: bool = False, + create_on_none_assignment: bool = False, + attribute_options: Optional[_AttributeOptions] = None, + ): + """Construct a new :class:`.AssociationProxy`. + + The :class:`.AssociationProxy` object is typically constructed using + the :func:`.association_proxy` constructor function. See the + description of :func:`.association_proxy` for a description of all + parameters. + + + """ + self.target_collection = target_collection + self.value_attr = attr + self.creator = creator + self.getset_factory = getset_factory + self.proxy_factory = proxy_factory + self.proxy_bulk_set = proxy_bulk_set + + if cascade_scalar_deletes and create_on_none_assignment: + raise exc.ArgumentError( + "The cascade_scalar_deletes and create_on_none_assignment " + "parameters are mutually exclusive." + ) + self.cascade_scalar_deletes = cascade_scalar_deletes + self.create_on_none_assignment = create_on_none_assignment + + self.key = "_%s_%s_%s" % ( + type(self).__name__, + target_collection, + id(self), + ) + if info: + self.info = info # type: ignore + + if ( + attribute_options + and attribute_options != _DEFAULT_ATTRIBUTE_OPTIONS + ): + self._has_dataclass_arguments = True + self._attribute_options = attribute_options + else: + self._has_dataclass_arguments = False + self._attribute_options = _DEFAULT_ATTRIBUTE_OPTIONS + + @overload + def __get__( + self, instance: Literal[None], owner: Literal[None] + ) -> Self: ... + + @overload + def __get__( + self, instance: Literal[None], owner: Any + ) -> AssociationProxyInstance[_T]: ... + + @overload + def __get__(self, instance: object, owner: Any) -> _T: ... + + def __get__( + self, instance: object, owner: Any + ) -> Union[AssociationProxyInstance[_T], _T, AssociationProxy[_T]]: + if owner is None: + return self + inst = self._as_instance(owner, instance) + if inst: + return inst.get(instance) + + assert instance is None + + return self + + def __set__(self, instance: object, values: _T) -> None: + class_ = type(instance) + self._as_instance(class_, instance).set(instance, values) + + def __delete__(self, instance: object) -> None: + class_ = type(instance) + self._as_instance(class_, instance).delete(instance) + + def for_class( + self, class_: Type[Any], obj: Optional[object] = None + ) -> AssociationProxyInstance[_T]: + r"""Return the internal state local to a specific mapped class. + + E.g., given a class ``User``:: + + class User(Base): + # ... + + keywords = association_proxy('kws', 'keyword') + + If we access this :class:`.AssociationProxy` from + :attr:`_orm.Mapper.all_orm_descriptors`, and we want to view the + target class for this proxy as mapped by ``User``:: + + inspect(User).all_orm_descriptors["keywords"].for_class(User).target_class + + This returns an instance of :class:`.AssociationProxyInstance` that + is specific to the ``User`` class. The :class:`.AssociationProxy` + object remains agnostic of its parent class. + + :param class\_: the class that we are returning state for. + + :param obj: optional, an instance of the class that is required + if the attribute refers to a polymorphic target, e.g. where we have + to look at the type of the actual destination object to get the + complete path. + + .. versionadded:: 1.3 - :class:`.AssociationProxy` no longer stores + any state specific to a particular parent class; the state is now + stored in per-class :class:`.AssociationProxyInstance` objects. + + + """ + return self._as_instance(class_, obj) + + def _as_instance( + self, class_: Any, obj: Any + ) -> AssociationProxyInstance[_T]: + try: + inst = class_.__dict__[self.key + "_inst"] + except KeyError: + inst = None + + # avoid exception context + if inst is None: + owner = self._calc_owner(class_) + if owner is not None: + inst = AssociationProxyInstance.for_proxy(self, owner, obj) + setattr(class_, self.key + "_inst", inst) + else: + inst = None + + if inst is not None and not inst._is_canonical: + # the AssociationProxyInstance can't be generalized + # since the proxied attribute is not on the targeted + # class, only on subclasses of it, which might be + # different. only return for the specific + # object's current value + return inst._non_canonical_get_for_object(obj) # type: ignore + else: + return inst # type: ignore # TODO + + def _calc_owner(self, target_cls: Any) -> Any: + # we might be getting invoked for a subclass + # that is not mapped yet, in some declarative situations. + # save until we are mapped + try: + insp = inspect(target_cls) + except exc.NoInspectionAvailable: + # can't find a mapper, don't set owner. if we are a not-yet-mapped + # subclass, we can also scan through __mro__ to find a mapped + # class, but instead just wait for us to be called again against a + # mapped class normally. + return None + else: + return insp.mapper.class_manager.class_ + + def _default_getset( + self, collection_class: Any + ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: + attr = self.value_attr + _getter = operator.attrgetter(attr) + + def getter(instance: Any) -> Optional[Any]: + return _getter(instance) if instance is not None else None + + if collection_class is dict: + + def dict_setter(instance: Any, k: Any, value: Any) -> None: + setattr(instance, attr, value) + + return getter, dict_setter + + else: + + def plain_setter(o: Any, v: Any) -> None: + setattr(o, attr, v) + + return getter, plain_setter + + def __repr__(self) -> str: + return "AssociationProxy(%r, %r)" % ( + self.target_collection, + self.value_attr, + ) + + +# the pep-673 Self type does not work in Mypy for a "hybrid" +# style method that returns type or Self, so for one specific case +# we still need to use the pre-pep-673 workaround. +_Self = TypeVar("_Self", bound="AssociationProxyInstance[Any]") + + +class AssociationProxyInstance(SQLORMOperations[_T]): + """A per-class object that serves class- and object-specific results. + + This is used by :class:`.AssociationProxy` when it is invoked + in terms of a specific class or instance of a class, i.e. when it is + used as a regular Python descriptor. + + When referring to the :class:`.AssociationProxy` as a normal Python + descriptor, the :class:`.AssociationProxyInstance` is the object that + actually serves the information. Under normal circumstances, its presence + is transparent:: + + >>> User.keywords.scalar + False + + In the special case that the :class:`.AssociationProxy` object is being + accessed directly, in order to get an explicit handle to the + :class:`.AssociationProxyInstance`, use the + :meth:`.AssociationProxy.for_class` method:: + + proxy_state = inspect(User).all_orm_descriptors["keywords"].for_class(User) + + # view if proxy object is scalar or not + >>> proxy_state.scalar + False + + .. versionadded:: 1.3 + + """ # noqa + + collection_class: Optional[Type[Any]] + parent: _AssociationProxyProtocol[_T] + + def __init__( + self, + parent: _AssociationProxyProtocol[_T], + owning_class: Type[Any], + target_class: Type[Any], + value_attr: str, + ): + self.parent = parent + self.key = parent.key + self.owning_class = owning_class + self.target_collection = parent.target_collection + self.collection_class = None + self.target_class = target_class + self.value_attr = value_attr + + target_class: Type[Any] + """The intermediary class handled by this + :class:`.AssociationProxyInstance`. + + Intercepted append/set/assignment events will result + in the generation of new instances of this class. + + """ + + @classmethod + def for_proxy( + cls, + parent: AssociationProxy[_T], + owning_class: Type[Any], + parent_instance: Any, + ) -> AssociationProxyInstance[_T]: + target_collection = parent.target_collection + value_attr = parent.value_attr + prop = cast( + "orm.RelationshipProperty[_T]", + orm.class_mapper(owning_class).get_property(target_collection), + ) + + # this was never asserted before but this should be made clear. + if not isinstance(prop, orm.RelationshipProperty): + raise NotImplementedError( + "association proxy to a non-relationship " + "intermediary is not supported" + ) from None + + target_class = prop.mapper.class_ + + try: + target_assoc = cast( + "AssociationProxyInstance[_T]", + cls._cls_unwrap_target_assoc_proxy(target_class, value_attr), + ) + except AttributeError: + # the proxied attribute doesn't exist on the target class; + # return an "ambiguous" instance that will work on a per-object + # basis + return AmbiguousAssociationProxyInstance( + parent, owning_class, target_class, value_attr + ) + except Exception as err: + raise exc.InvalidRequestError( + f"Association proxy received an unexpected error when " + f"trying to retreive attribute " + f'"{target_class.__name__}.{parent.value_attr}" from ' + f'class "{target_class.__name__}": {err}' + ) from err + else: + return cls._construct_for_assoc( + target_assoc, parent, owning_class, target_class, value_attr + ) + + @classmethod + def _construct_for_assoc( + cls, + target_assoc: Optional[AssociationProxyInstance[_T]], + parent: _AssociationProxyProtocol[_T], + owning_class: Type[Any], + target_class: Type[Any], + value_attr: str, + ) -> AssociationProxyInstance[_T]: + if target_assoc is not None: + return ObjectAssociationProxyInstance( + parent, owning_class, target_class, value_attr + ) + + attr = getattr(target_class, value_attr) + if not hasattr(attr, "_is_internal_proxy"): + return AmbiguousAssociationProxyInstance( + parent, owning_class, target_class, value_attr + ) + is_object = attr._impl_uses_objects + if is_object: + return ObjectAssociationProxyInstance( + parent, owning_class, target_class, value_attr + ) + else: + return ColumnAssociationProxyInstance( + parent, owning_class, target_class, value_attr + ) + + def _get_property(self) -> MapperProperty[Any]: + return orm.class_mapper(self.owning_class).get_property( + self.target_collection + ) + + @property + def _comparator(self) -> PropComparator[Any]: + return getattr( # type: ignore + self.owning_class, self.target_collection + ).comparator + + def __clause_element__(self) -> NoReturn: + raise NotImplementedError( + "The association proxy can't be used as a plain column " + "expression; it only works inside of a comparison expression" + ) + + @classmethod + def _cls_unwrap_target_assoc_proxy( + cls, target_class: Any, value_attr: str + ) -> Optional[AssociationProxyInstance[_T]]: + attr = getattr(target_class, value_attr) + assert not isinstance(attr, AssociationProxy) + if isinstance(attr, AssociationProxyInstance): + return attr + return None + + @util.memoized_property + def _unwrap_target_assoc_proxy( + self, + ) -> Optional[AssociationProxyInstance[_T]]: + return self._cls_unwrap_target_assoc_proxy( + self.target_class, self.value_attr + ) + + @property + def remote_attr(self) -> SQLORMOperations[_T]: + """The 'remote' class attribute referenced by this + :class:`.AssociationProxyInstance`. + + .. seealso:: + + :attr:`.AssociationProxyInstance.attr` + + :attr:`.AssociationProxyInstance.local_attr` + + """ + return cast( + "SQLORMOperations[_T]", getattr(self.target_class, self.value_attr) + ) + + @property + def local_attr(self) -> SQLORMOperations[Any]: + """The 'local' class attribute referenced by this + :class:`.AssociationProxyInstance`. + + .. seealso:: + + :attr:`.AssociationProxyInstance.attr` + + :attr:`.AssociationProxyInstance.remote_attr` + + """ + return cast( + "SQLORMOperations[Any]", + getattr(self.owning_class, self.target_collection), + ) + + @property + def attr(self) -> Tuple[SQLORMOperations[Any], SQLORMOperations[_T]]: + """Return a tuple of ``(local_attr, remote_attr)``. + + This attribute was originally intended to facilitate using the + :meth:`_query.Query.join` method to join across the two relationships + at once, however this makes use of a deprecated calling style. + + To use :meth:`_sql.select.join` or :meth:`_orm.Query.join` with + an association proxy, the current method is to make use of the + :attr:`.AssociationProxyInstance.local_attr` and + :attr:`.AssociationProxyInstance.remote_attr` attributes separately:: + + stmt = ( + select(Parent). + join(Parent.proxied.local_attr). + join(Parent.proxied.remote_attr) + ) + + A future release may seek to provide a more succinct join pattern + for association proxy attributes. + + .. seealso:: + + :attr:`.AssociationProxyInstance.local_attr` + + :attr:`.AssociationProxyInstance.remote_attr` + + """ + return (self.local_attr, self.remote_attr) + + @util.memoized_property + def scalar(self) -> bool: + """Return ``True`` if this :class:`.AssociationProxyInstance` + proxies a scalar relationship on the local side.""" + + scalar = not self._get_property().uselist + if scalar: + self._initialize_scalar_accessors() + return scalar + + @util.memoized_property + def _value_is_scalar(self) -> bool: + return ( + not self._get_property() + .mapper.get_property(self.value_attr) + .uselist + ) + + @property + def _target_is_object(self) -> bool: + raise NotImplementedError() + + _scalar_get: _GetterProtocol[_T] + _scalar_set: _PlainSetterProtocol[_T] + + def _initialize_scalar_accessors(self) -> None: + if self.parent.getset_factory: + get, set_ = self.parent.getset_factory(None, self) + else: + get, set_ = self.parent._default_getset(None) + self._scalar_get, self._scalar_set = get, cast( + "_PlainSetterProtocol[_T]", set_ + ) + + def _default_getset( + self, collection_class: Any + ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: + attr = self.value_attr + _getter = operator.attrgetter(attr) + + def getter(instance: Any) -> Optional[_T]: + return _getter(instance) if instance is not None else None + + if collection_class is dict: + + def dict_setter(instance: Any, k: Any, value: _T) -> None: + setattr(instance, attr, value) + + return getter, dict_setter + else: + + def plain_setter(o: Any, v: _T) -> None: + setattr(o, attr, v) + + return getter, plain_setter + + @util.ro_non_memoized_property + def info(self) -> _InfoType: + return self.parent.info + + @overload + def get(self: _Self, obj: Literal[None]) -> _Self: ... + + @overload + def get(self, obj: Any) -> _T: ... + + def get( + self, obj: Any + ) -> Union[Optional[_T], AssociationProxyInstance[_T]]: + if obj is None: + return self + + proxy: _T + + if self.scalar: + target = getattr(obj, self.target_collection) + return self._scalar_get(target) + else: + try: + # If the owning instance is reborn (orm session resurrect, + # etc.), refresh the proxy cache. + creator_id, self_id, proxy = cast( + "Tuple[int, int, _T]", getattr(obj, self.key) + ) + except AttributeError: + pass + else: + if id(obj) == creator_id and id(self) == self_id: + assert self.collection_class is not None + return proxy + + self.collection_class, proxy = self._new( + _lazy_collection(obj, self.target_collection) + ) + setattr(obj, self.key, (id(obj), id(self), proxy)) + return proxy + + def set(self, obj: Any, values: _T) -> None: + if self.scalar: + creator = cast( + "_PlainCreatorProtocol[_T]", + ( + self.parent.creator + if self.parent.creator + else self.target_class + ), + ) + target = getattr(obj, self.target_collection) + if target is None: + if ( + values is None + and not self.parent.create_on_none_assignment + ): + return + setattr(obj, self.target_collection, creator(values)) + else: + self._scalar_set(target, values) + if values is None and self.parent.cascade_scalar_deletes: + setattr(obj, self.target_collection, None) + else: + proxy = self.get(obj) + assert self.collection_class is not None + if proxy is not values: + proxy._bulk_replace(self, values) + + def delete(self, obj: Any) -> None: + if self.owning_class is None: + self._calc_owner(obj, None) + + if self.scalar: + target = getattr(obj, self.target_collection) + if target is not None: + delattr(target, self.value_attr) + delattr(obj, self.target_collection) + + def _new( + self, lazy_collection: _LazyCollectionProtocol[_T] + ) -> Tuple[Type[Any], _T]: + creator = ( + self.parent.creator + if self.parent.creator is not None + else cast("_CreatorProtocol", self.target_class) + ) + collection_class = util.duck_type_collection(lazy_collection()) + + if collection_class is None: + raise exc.InvalidRequestError( + f"lazy collection factory did not return a " + f"valid collection type, got {collection_class}" + ) + if self.parent.proxy_factory: + return ( + collection_class, + self.parent.proxy_factory( + lazy_collection, creator, self.value_attr, self + ), + ) + + if self.parent.getset_factory: + getter, setter = self.parent.getset_factory(collection_class, self) + else: + getter, setter = self.parent._default_getset(collection_class) + + if collection_class is list: + return ( + collection_class, + cast( + _T, + _AssociationList( + lazy_collection, creator, getter, setter, self + ), + ), + ) + elif collection_class is dict: + return ( + collection_class, + cast( + _T, + _AssociationDict( + lazy_collection, creator, getter, setter, self + ), + ), + ) + elif collection_class is set: + return ( + collection_class, + cast( + _T, + _AssociationSet( + lazy_collection, creator, getter, setter, self + ), + ), + ) + else: + raise exc.ArgumentError( + "could not guess which interface to use for " + 'collection_class "%s" backing "%s"; specify a ' + "proxy_factory and proxy_bulk_set manually" + % (self.collection_class, self.target_collection) + ) + + def _set( + self, proxy: _AssociationCollection[Any], values: Iterable[Any] + ) -> None: + if self.parent.proxy_bulk_set: + self.parent.proxy_bulk_set(proxy, values) + elif self.collection_class is list: + cast("_AssociationList[Any]", proxy).extend(values) + elif self.collection_class is dict: + cast("_AssociationDict[Any, Any]", proxy).update(values) + elif self.collection_class is set: + cast("_AssociationSet[Any]", proxy).update(values) + else: + raise exc.ArgumentError( + "no proxy_bulk_set supplied for custom " + "collection_class implementation" + ) + + def _inflate(self, proxy: _AssociationCollection[Any]) -> None: + creator = ( + self.parent.creator + and self.parent.creator + or cast(_CreatorProtocol, self.target_class) + ) + + if self.parent.getset_factory: + getter, setter = self.parent.getset_factory( + self.collection_class, self + ) + else: + getter, setter = self.parent._default_getset(self.collection_class) + + proxy.creator = creator + proxy.getter = getter + proxy.setter = setter + + def _criterion_exists( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + is_has = kwargs.pop("is_has", None) + + target_assoc = self._unwrap_target_assoc_proxy + if target_assoc is not None: + inner = target_assoc._criterion_exists( + criterion=criterion, **kwargs + ) + return self._comparator._criterion_exists(inner) + + if self._target_is_object: + attr = getattr(self.target_class, self.value_attr) + value_expr = attr.comparator._criterion_exists(criterion, **kwargs) + else: + if kwargs: + raise exc.ArgumentError( + "Can't apply keyword arguments to column-targeted " + "association proxy; use ==" + ) + elif is_has and criterion is not None: + raise exc.ArgumentError( + "Non-empty has() not allowed for " + "column-targeted association proxy; use ==" + ) + + value_expr = criterion + + return self._comparator._criterion_exists(value_expr) + + def any( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + """Produce a proxied 'any' expression using EXISTS. + + This expression will be a composed product + using the :meth:`.Relationship.Comparator.any` + and/or :meth:`.Relationship.Comparator.has` + operators of the underlying proxied attributes. + + """ + if self._unwrap_target_assoc_proxy is None and ( + self.scalar + and (not self._target_is_object or self._value_is_scalar) + ): + raise exc.InvalidRequestError( + "'any()' not implemented for scalar attributes. Use has()." + ) + return self._criterion_exists( + criterion=criterion, is_has=False, **kwargs + ) + + def has( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + """Produce a proxied 'has' expression using EXISTS. + + This expression will be a composed product + using the :meth:`.Relationship.Comparator.any` + and/or :meth:`.Relationship.Comparator.has` + operators of the underlying proxied attributes. + + """ + if self._unwrap_target_assoc_proxy is None and ( + not self.scalar + or (self._target_is_object and not self._value_is_scalar) + ): + raise exc.InvalidRequestError( + "'has()' not implemented for collections. Use any()." + ) + return self._criterion_exists( + criterion=criterion, is_has=True, **kwargs + ) + + def __repr__(self) -> str: + return "%s(%r)" % (self.__class__.__name__, self.parent) + + +class AmbiguousAssociationProxyInstance(AssociationProxyInstance[_T]): + """an :class:`.AssociationProxyInstance` where we cannot determine + the type of target object. + """ + + _is_canonical = False + + def _ambiguous(self) -> NoReturn: + raise AttributeError( + "Association proxy %s.%s refers to an attribute '%s' that is not " + "directly mapped on class %s; therefore this operation cannot " + "proceed since we don't know what type of object is referred " + "towards" + % ( + self.owning_class.__name__, + self.target_collection, + self.value_attr, + self.target_class, + ) + ) + + def get(self, obj: Any) -> Any: + if obj is None: + return self + else: + return super().get(obj) + + def __eq__(self, obj: object) -> NoReturn: + self._ambiguous() + + def __ne__(self, obj: object) -> NoReturn: + self._ambiguous() + + def any( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> NoReturn: + self._ambiguous() + + def has( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> NoReturn: + self._ambiguous() + + @util.memoized_property + def _lookup_cache(self) -> Dict[Type[Any], AssociationProxyInstance[_T]]: + # mapping of ->AssociationProxyInstance. + # e.g. proxy is A-> A.b -> B -> B.b_attr, but B.b_attr doesn't exist; + # only B1(B) and B2(B) have "b_attr", keys in here would be B1, B2 + return {} + + def _non_canonical_get_for_object( + self, parent_instance: Any + ) -> AssociationProxyInstance[_T]: + if parent_instance is not None: + actual_obj = getattr(parent_instance, self.target_collection) + if actual_obj is not None: + try: + insp = inspect(actual_obj) + except exc.NoInspectionAvailable: + pass + else: + mapper = insp.mapper + instance_class = mapper.class_ + if instance_class not in self._lookup_cache: + self._populate_cache(instance_class, mapper) + + try: + return self._lookup_cache[instance_class] + except KeyError: + pass + + # no object or ambiguous object given, so return "self", which + # is a proxy with generally only instance-level functionality + return self + + def _populate_cache( + self, instance_class: Any, mapper: Mapper[Any] + ) -> None: + prop = orm.class_mapper(self.owning_class).get_property( + self.target_collection + ) + + if mapper.isa(prop.mapper): + target_class = instance_class + try: + target_assoc = self._cls_unwrap_target_assoc_proxy( + target_class, self.value_attr + ) + except AttributeError: + pass + else: + self._lookup_cache[instance_class] = self._construct_for_assoc( + cast("AssociationProxyInstance[_T]", target_assoc), + self.parent, + self.owning_class, + target_class, + self.value_attr, + ) + + +class ObjectAssociationProxyInstance(AssociationProxyInstance[_T]): + """an :class:`.AssociationProxyInstance` that has an object as a target.""" + + _target_is_object: bool = True + _is_canonical = True + + def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: + """Produce a proxied 'contains' expression using EXISTS. + + This expression will be a composed product + using the :meth:`.Relationship.Comparator.any`, + :meth:`.Relationship.Comparator.has`, + and/or :meth:`.Relationship.Comparator.contains` + operators of the underlying proxied attributes. + """ + + target_assoc = self._unwrap_target_assoc_proxy + if target_assoc is not None: + return self._comparator._criterion_exists( + target_assoc.contains(other) + if not target_assoc.scalar + else target_assoc == other + ) + elif ( + self._target_is_object + and self.scalar + and not self._value_is_scalar + ): + return self._comparator.has( + getattr(self.target_class, self.value_attr).contains(other) + ) + elif self._target_is_object and self.scalar and self._value_is_scalar: + raise exc.InvalidRequestError( + "contains() doesn't apply to a scalar object endpoint; use ==" + ) + else: + return self._comparator._criterion_exists( + **{self.value_attr: other} + ) + + def __eq__(self, obj: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + # note the has() here will fail for collections; eq_() + # is only allowed with a scalar. + if obj is None: + return or_( + self._comparator.has(**{self.value_attr: obj}), + self._comparator == None, + ) + else: + return self._comparator.has(**{self.value_attr: obj}) + + def __ne__(self, obj: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + # note the has() here will fail for collections; eq_() + # is only allowed with a scalar. + return self._comparator.has( + getattr(self.target_class, self.value_attr) != obj + ) + + +class ColumnAssociationProxyInstance(AssociationProxyInstance[_T]): + """an :class:`.AssociationProxyInstance` that has a database column as a + target. + """ + + _target_is_object: bool = False + _is_canonical = True + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + # special case "is None" to check for no related row as well + expr = self._criterion_exists( + self.remote_attr.operate(operators.eq, other) + ) + if other is None: + return or_(expr, self._comparator == None) + else: + return expr + + def operate( + self, op: operators.OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return self._criterion_exists( + self.remote_attr.operate(op, *other, **kwargs) + ) + + +class _lazy_collection(_LazyCollectionProtocol[_T]): + def __init__(self, obj: Any, target: str): + self.parent = obj + self.target = target + + def __call__( + self, + ) -> Union[MutableSet[_T], MutableMapping[Any, _T], MutableSequence[_T]]: + return getattr(self.parent, self.target) # type: ignore[no-any-return] + + def __getstate__(self) -> Any: + return {"obj": self.parent, "target": self.target} + + def __setstate__(self, state: Any) -> None: + self.parent = state["obj"] + self.target = state["target"] + + +_IT = TypeVar("_IT", bound="Any") +"""instance type - this is the type of object inside a collection. + +this is not the same as the _T of AssociationProxy and +AssociationProxyInstance itself, which will often refer to the +collection[_IT] type. + +""" + + +class _AssociationCollection(Generic[_IT]): + getter: _GetterProtocol[_IT] + """A function. Given an associated object, return the 'value'.""" + + creator: _CreatorProtocol + """ + A function that creates new target entities. Given one parameter: + value. This assertion is assumed:: + + obj = creator(somevalue) + assert getter(obj) == somevalue + """ + + parent: AssociationProxyInstance[_IT] + setter: _SetterProtocol + """A function. Given an associated object and a value, store that + value on the object. + """ + + lazy_collection: _LazyCollectionProtocol[_IT] + """A callable returning a list-based collection of entities (usually an + object attribute managed by a SQLAlchemy relationship())""" + + def __init__( + self, + lazy_collection: _LazyCollectionProtocol[_IT], + creator: _CreatorProtocol, + getter: _GetterProtocol[_IT], + setter: _SetterProtocol, + parent: AssociationProxyInstance[_IT], + ): + """Constructs an _AssociationCollection. + + This will always be a subclass of either _AssociationList, + _AssociationSet, or _AssociationDict. + + """ + self.lazy_collection = lazy_collection + self.creator = creator + self.getter = getter + self.setter = setter + self.parent = parent + + if typing.TYPE_CHECKING: + col: Collection[_IT] + else: + col = property(lambda self: self.lazy_collection()) + + def __len__(self) -> int: + return len(self.col) + + def __bool__(self) -> bool: + return bool(self.col) + + def __getstate__(self) -> Any: + return {"parent": self.parent, "lazy_collection": self.lazy_collection} + + def __setstate__(self, state: Any) -> None: + self.parent = state["parent"] + self.lazy_collection = state["lazy_collection"] + self.parent._inflate(self) + + def clear(self) -> None: + raise NotImplementedError() + + +class _AssociationSingleItem(_AssociationCollection[_T]): + setter: _PlainSetterProtocol[_T] + creator: _PlainCreatorProtocol[_T] + + def _create(self, value: _T) -> Any: + return self.creator(value) + + def _get(self, object_: Any) -> _T: + return self.getter(object_) + + def _bulk_replace( + self, assoc_proxy: AssociationProxyInstance[Any], values: Iterable[_IT] + ) -> None: + self.clear() + assoc_proxy._set(self, values) + + +class _AssociationList(_AssociationSingleItem[_T], MutableSequence[_T]): + """Generic, converting, list-to-list proxy.""" + + col: MutableSequence[_T] + + def _set(self, object_: Any, value: _T) -> None: + self.setter(object_, value) + + @overload + def __getitem__(self, index: int) -> _T: ... + + @overload + def __getitem__(self, index: slice) -> MutableSequence[_T]: ... + + def __getitem__( + self, index: Union[int, slice] + ) -> Union[_T, MutableSequence[_T]]: + if not isinstance(index, slice): + return self._get(self.col[index]) + else: + return [self._get(member) for member in self.col[index]] + + @overload + def __setitem__(self, index: int, value: _T) -> None: ... + + @overload + def __setitem__(self, index: slice, value: Iterable[_T]) -> None: ... + + def __setitem__( + self, index: Union[int, slice], value: Union[_T, Iterable[_T]] + ) -> None: + if not isinstance(index, slice): + self._set(self.col[index], cast("_T", value)) + else: + if index.stop is None: + stop = len(self) + elif index.stop < 0: + stop = len(self) + index.stop + else: + stop = index.stop + step = index.step or 1 + + start = index.start or 0 + rng = list(range(index.start or 0, stop, step)) + + sized_value = list(value) + + if step == 1: + for i in rng: + del self[start] + i = start + for item in sized_value: + self.insert(i, item) + i += 1 + else: + if len(sized_value) != len(rng): + raise ValueError( + "attempt to assign sequence of size %s to " + "extended slice of size %s" + % (len(sized_value), len(rng)) + ) + for i, item in zip(rng, value): + self._set(self.col[i], item) + + @overload + def __delitem__(self, index: int) -> None: ... + + @overload + def __delitem__(self, index: slice) -> None: ... + + def __delitem__(self, index: Union[slice, int]) -> None: + del self.col[index] + + def __contains__(self, value: object) -> bool: + for member in self.col: + # testlib.pragma exempt:__eq__ + if self._get(member) == value: + return True + return False + + def __iter__(self) -> Iterator[_T]: + """Iterate over proxied values. + + For the actual domain objects, iterate over .col instead or + just use the underlying collection directly from its property + on the parent. + """ + + for member in self.col: + yield self._get(member) + return + + def append(self, value: _T) -> None: + col = self.col + item = self._create(value) + col.append(item) + + def count(self, value: Any) -> int: + count = 0 + for v in self: + if v == value: + count += 1 + return count + + def extend(self, values: Iterable[_T]) -> None: + for v in values: + self.append(v) + + def insert(self, index: int, value: _T) -> None: + self.col[index:index] = [self._create(value)] + + def pop(self, index: int = -1) -> _T: + return self.getter(self.col.pop(index)) + + def remove(self, value: _T) -> None: + for i, val in enumerate(self): + if val == value: + del self.col[i] + return + raise ValueError("value not in list") + + def reverse(self) -> NoReturn: + """Not supported, use reversed(mylist)""" + + raise NotImplementedError() + + def sort(self) -> NoReturn: + """Not supported, use sorted(mylist)""" + + raise NotImplementedError() + + def clear(self) -> None: + del self.col[0 : len(self.col)] + + def __eq__(self, other: object) -> bool: + return list(self) == other + + def __ne__(self, other: object) -> bool: + return list(self) != other + + def __lt__(self, other: List[_T]) -> bool: + return list(self) < other + + def __le__(self, other: List[_T]) -> bool: + return list(self) <= other + + def __gt__(self, other: List[_T]) -> bool: + return list(self) > other + + def __ge__(self, other: List[_T]) -> bool: + return list(self) >= other + + def __add__(self, other: List[_T]) -> List[_T]: + try: + other = list(other) + except TypeError: + return NotImplemented + return list(self) + other + + def __radd__(self, other: List[_T]) -> List[_T]: + try: + other = list(other) + except TypeError: + return NotImplemented + return other + list(self) + + def __mul__(self, n: SupportsIndex) -> List[_T]: + if not isinstance(n, int): + return NotImplemented + return list(self) * n + + def __rmul__(self, n: SupportsIndex) -> List[_T]: + if not isinstance(n, int): + return NotImplemented + return n * list(self) + + def __iadd__(self, iterable: Iterable[_T]) -> Self: + self.extend(iterable) + return self + + def __imul__(self, n: SupportsIndex) -> Self: + # unlike a regular list *=, proxied __imul__ will generate unique + # backing objects for each copy. *= on proxied lists is a bit of + # a stretch anyhow, and this interpretation of the __imul__ contract + # is more plausibly useful than copying the backing objects. + if not isinstance(n, int): + raise NotImplementedError() + if n == 0: + self.clear() + elif n > 1: + self.extend(list(self) * (n - 1)) + return self + + if typing.TYPE_CHECKING: + # TODO: no idea how to do this without separate "stub" + def index( + self, value: Any, start: int = ..., stop: int = ... + ) -> int: ... + + else: + + def index(self, value: Any, *arg) -> int: + ls = list(self) + return ls.index(value, *arg) + + def copy(self) -> List[_T]: + return list(self) + + def __repr__(self) -> str: + return repr(list(self)) + + def __hash__(self) -> NoReturn: + raise TypeError("%s objects are unhashable" % type(self).__name__) + + if not typing.TYPE_CHECKING: + for func_name, func in list(locals().items()): + if ( + callable(func) + and func.__name__ == func_name + and not func.__doc__ + and hasattr(list, func_name) + ): + func.__doc__ = getattr(list, func_name).__doc__ + del func_name, func + + +class _AssociationDict(_AssociationCollection[_VT], MutableMapping[_KT, _VT]): + """Generic, converting, dict-to-dict proxy.""" + + setter: _DictSetterProtocol[_VT] + creator: _KeyCreatorProtocol[_VT] + col: MutableMapping[_KT, Optional[_VT]] + + def _create(self, key: _KT, value: Optional[_VT]) -> Any: + return self.creator(key, value) + + def _get(self, object_: Any) -> _VT: + return self.getter(object_) + + def _set(self, object_: Any, key: _KT, value: _VT) -> None: + return self.setter(object_, key, value) + + def __getitem__(self, key: _KT) -> _VT: + return self._get(self.col[key]) + + def __setitem__(self, key: _KT, value: _VT) -> None: + if key in self.col: + self._set(self.col[key], key, value) + else: + self.col[key] = self._create(key, value) + + def __delitem__(self, key: _KT) -> None: + del self.col[key] + + def __contains__(self, key: object) -> bool: + return key in self.col + + def __iter__(self) -> Iterator[_KT]: + return iter(self.col.keys()) + + def clear(self) -> None: + self.col.clear() + + def __eq__(self, other: object) -> bool: + return dict(self) == other + + def __ne__(self, other: object) -> bool: + return dict(self) != other + + def __repr__(self) -> str: + return repr(dict(self)) + + @overload + def get(self, __key: _KT) -> Optional[_VT]: ... + + @overload + def get(self, __key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ... + + def get( + self, key: _KT, default: Optional[Union[_VT, _T]] = None + ) -> Union[_VT, _T, None]: + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: _KT, default: Optional[_VT] = None) -> _VT: + # TODO: again, no idea how to create an actual MutableMapping. + # default must allow None, return type can't include None, + # the stub explicitly allows for default of None with a cryptic message + # "This overload should be allowed only if the value type is + # compatible with None.". + if key not in self.col: + self.col[key] = self._create(key, default) + return default # type: ignore + else: + return self[key] + + def keys(self) -> KeysView[_KT]: + return self.col.keys() + + def items(self) -> ItemsView[_KT, _VT]: + return ItemsView(self) + + def values(self) -> ValuesView[_VT]: + return ValuesView(self) + + @overload + def pop(self, __key: _KT) -> _VT: ... + + @overload + def pop( + self, __key: _KT, default: Union[_VT, _T] = ... + ) -> Union[_VT, _T]: ... + + def pop(self, __key: _KT, *arg: Any, **kw: Any) -> Union[_VT, _T]: + member = self.col.pop(__key, *arg, **kw) + return self._get(member) + + def popitem(self) -> Tuple[_KT, _VT]: + item = self.col.popitem() + return (item[0], self._get(item[1])) + + @overload + def update( + self, __m: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT + ) -> None: ... + + @overload + def update( + self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT + ) -> None: ... + + @overload + def update(self, **kwargs: _VT) -> None: ... + + def update(self, *a: Any, **kw: Any) -> None: + up: Dict[_KT, _VT] = {} + up.update(*a, **kw) + + for key, value in up.items(): + self[key] = value + + def _bulk_replace( + self, + assoc_proxy: AssociationProxyInstance[Any], + values: Mapping[_KT, _VT], + ) -> None: + existing = set(self) + constants = existing.intersection(values or ()) + additions = set(values or ()).difference(constants) + removals = existing.difference(constants) + + for key, member in values.items() or (): + if key in additions: + self[key] = member + elif key in constants: + self[key] = member + + for key in removals: + del self[key] + + def copy(self) -> Dict[_KT, _VT]: + return dict(self.items()) + + def __hash__(self) -> NoReturn: + raise TypeError("%s objects are unhashable" % type(self).__name__) + + if not typing.TYPE_CHECKING: + for func_name, func in list(locals().items()): + if ( + callable(func) + and func.__name__ == func_name + and not func.__doc__ + and hasattr(dict, func_name) + ): + func.__doc__ = getattr(dict, func_name).__doc__ + del func_name, func + + +class _AssociationSet(_AssociationSingleItem[_T], MutableSet[_T]): + """Generic, converting, set-to-set proxy.""" + + col: MutableSet[_T] + + def __len__(self) -> int: + return len(self.col) + + def __bool__(self) -> bool: + if self.col: + return True + else: + return False + + def __contains__(self, __o: object) -> bool: + for member in self.col: + if self._get(member) == __o: + return True + return False + + def __iter__(self) -> Iterator[_T]: + """Iterate over proxied values. + + For the actual domain objects, iterate over .col instead or just use + the underlying collection directly from its property on the parent. + + """ + for member in self.col: + yield self._get(member) + return + + def add(self, __element: _T) -> None: + if __element not in self: + self.col.add(self._create(__element)) + + # for discard and remove, choosing a more expensive check strategy rather + # than call self.creator() + def discard(self, __element: _T) -> None: + for member in self.col: + if self._get(member) == __element: + self.col.discard(member) + break + + def remove(self, __element: _T) -> None: + for member in self.col: + if self._get(member) == __element: + self.col.discard(member) + return + raise KeyError(__element) + + def pop(self) -> _T: + if not self.col: + raise KeyError("pop from an empty set") + member = self.col.pop() + return self._get(member) + + def update(self, *s: Iterable[_T]) -> None: + for iterable in s: + for value in iterable: + self.add(value) + + def _bulk_replace(self, assoc_proxy: Any, values: Iterable[_T]) -> None: + existing = set(self) + constants = existing.intersection(values or ()) + additions = set(values or ()).difference(constants) + removals = existing.difference(constants) + + appender = self.add + remover = self.remove + + for member in values or (): + if member in additions: + appender(member) + elif member in constants: + appender(member) + + for member in removals: + remover(member) + + def __ior__( # type: ignore + self, other: AbstractSet[_S] + ) -> MutableSet[Union[_T, _S]]: + if not collections._set_binops_check_strict(self, other): + raise NotImplementedError() + for value in other: + self.add(value) + return self + + def _set(self) -> Set[_T]: + return set(iter(self)) + + def union(self, *s: Iterable[_S]) -> MutableSet[Union[_T, _S]]: + return set(self).union(*s) + + def __or__(self, __s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: + return self.union(__s) + + def difference(self, *s: Iterable[Any]) -> MutableSet[_T]: + return set(self).difference(*s) + + def __sub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: + return self.difference(s) + + def difference_update(self, *s: Iterable[Any]) -> None: + for other in s: + for value in other: + self.discard(value) + + def __isub__(self, s: AbstractSet[Any]) -> Self: + if not collections._set_binops_check_strict(self, s): + raise NotImplementedError() + for value in s: + self.discard(value) + return self + + def intersection(self, *s: Iterable[Any]) -> MutableSet[_T]: + return set(self).intersection(*s) + + def __and__(self, s: AbstractSet[Any]) -> MutableSet[_T]: + return self.intersection(s) + + def intersection_update(self, *s: Iterable[Any]) -> None: + for other in s: + want, have = self.intersection(other), set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + + def __iand__(self, s: AbstractSet[Any]) -> Self: + if not collections._set_binops_check_strict(self, s): + raise NotImplementedError() + want = self.intersection(s) + have: Set[_T] = set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + return self + + def symmetric_difference(self, __s: Iterable[_T]) -> MutableSet[_T]: + return set(self).symmetric_difference(__s) + + def __xor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: + return self.symmetric_difference(s) + + def symmetric_difference_update(self, other: Iterable[Any]) -> None: + want, have = self.symmetric_difference(other), set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + + def __ixor__(self, other: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: # type: ignore # noqa: E501 + if not collections._set_binops_check_strict(self, other): + raise NotImplementedError() + + self.symmetric_difference_update(other) + return self + + def issubset(self, __s: Iterable[Any]) -> bool: + return set(self).issubset(__s) + + def issuperset(self, __s: Iterable[Any]) -> bool: + return set(self).issuperset(__s) + + def clear(self) -> None: + self.col.clear() + + def copy(self) -> AbstractSet[_T]: + return set(self) + + def __eq__(self, other: object) -> bool: + return set(self) == other + + def __ne__(self, other: object) -> bool: + return set(self) != other + + def __lt__(self, other: AbstractSet[Any]) -> bool: + return set(self) < other + + def __le__(self, other: AbstractSet[Any]) -> bool: + return set(self) <= other + + def __gt__(self, other: AbstractSet[Any]) -> bool: + return set(self) > other + + def __ge__(self, other: AbstractSet[Any]) -> bool: + return set(self) >= other + + def __repr__(self) -> str: + return repr(set(self)) + + def __hash__(self) -> NoReturn: + raise TypeError("%s objects are unhashable" % type(self).__name__) + + if not typing.TYPE_CHECKING: + for func_name, func in list(locals().items()): + if ( + callable(func) + and func.__name__ == func_name + and not func.__doc__ + and hasattr(set, func_name) + ): + func.__doc__ = getattr(set, func_name).__doc__ + del func_name, func diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__init__.py new file mode 100644 index 00000000..78c707b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__init__.py @@ -0,0 +1,25 @@ +# ext/asyncio/__init__.py +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from .engine import async_engine_from_config as async_engine_from_config +from .engine import AsyncConnection as AsyncConnection +from .engine import AsyncEngine as AsyncEngine +from .engine import AsyncTransaction as AsyncTransaction +from .engine import create_async_engine as create_async_engine +from .engine import create_async_pool_from_url as create_async_pool_from_url +from .result import AsyncMappingResult as AsyncMappingResult +from .result import AsyncResult as AsyncResult +from .result import AsyncScalarResult as AsyncScalarResult +from .result import AsyncTupleResult as AsyncTupleResult +from .scoping import async_scoped_session as async_scoped_session +from .session import async_object_session as async_object_session +from .session import async_session as async_session +from .session import async_sessionmaker as async_sessionmaker +from .session import AsyncAttrs as AsyncAttrs +from .session import AsyncSession as AsyncSession +from .session import AsyncSessionTransaction as AsyncSessionTransaction +from .session import close_all_sessions as close_all_sessions diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..17ab485b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..ec1d7712 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc new file mode 100644 index 00000000..f78e7166 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc new file mode 100644 index 00000000..7e3bb9d3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc new file mode 100644 index 00000000..929b035f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc new file mode 100644 index 00000000..eb8be3b4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc new file mode 100644 index 00000000..5edbe0a4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/base.py new file mode 100644 index 00000000..9899364d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/base.py @@ -0,0 +1,279 @@ +# ext/asyncio/base.py +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import abc +import functools +from typing import Any +from typing import AsyncGenerator +from typing import AsyncIterator +from typing import Awaitable +from typing import Callable +from typing import ClassVar +from typing import Dict +from typing import Generator +from typing import Generic +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Tuple +from typing import TypeVar +import weakref + +from . import exc as async_exc +from ... import util +from ...util.typing import Literal +from ...util.typing import Self + +_T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + + +_PT = TypeVar("_PT", bound=Any) + + +class ReversibleProxy(Generic[_PT]): + _proxy_objects: ClassVar[ + Dict[weakref.ref[Any], weakref.ref[ReversibleProxy[Any]]] + ] = {} + __slots__ = ("__weakref__",) + + @overload + def _assign_proxied(self, target: _PT) -> _PT: ... + + @overload + def _assign_proxied(self, target: None) -> None: ... + + def _assign_proxied(self, target: Optional[_PT]) -> Optional[_PT]: + if target is not None: + target_ref: weakref.ref[_PT] = weakref.ref( + target, ReversibleProxy._target_gced + ) + proxy_ref = weakref.ref( + self, + functools.partial(ReversibleProxy._target_gced, target_ref), + ) + ReversibleProxy._proxy_objects[target_ref] = proxy_ref + + return target + + @classmethod + def _target_gced( + cls, + ref: weakref.ref[_PT], + proxy_ref: Optional[weakref.ref[Self]] = None, # noqa: U100 + ) -> None: + cls._proxy_objects.pop(ref, None) + + @classmethod + def _regenerate_proxy_for_target(cls, target: _PT) -> Self: + raise NotImplementedError() + + @overload + @classmethod + def _retrieve_proxy_for_target( + cls, + target: _PT, + regenerate: Literal[True] = ..., + ) -> Self: ... + + @overload + @classmethod + def _retrieve_proxy_for_target( + cls, target: _PT, regenerate: bool = True + ) -> Optional[Self]: ... + + @classmethod + def _retrieve_proxy_for_target( + cls, target: _PT, regenerate: bool = True + ) -> Optional[Self]: + try: + proxy_ref = cls._proxy_objects[weakref.ref(target)] + except KeyError: + pass + else: + proxy = proxy_ref() + if proxy is not None: + return proxy # type: ignore + + if regenerate: + return cls._regenerate_proxy_for_target(target) + else: + return None + + +class StartableContext(Awaitable[_T_co], abc.ABC): + __slots__ = () + + @abc.abstractmethod + async def start(self, is_ctxmanager: bool = False) -> _T_co: + raise NotImplementedError() + + def __await__(self) -> Generator[Any, Any, _T_co]: + return self.start().__await__() + + async def __aenter__(self) -> _T_co: + return await self.start(is_ctxmanager=True) + + @abc.abstractmethod + async def __aexit__( + self, type_: Any, value: Any, traceback: Any + ) -> Optional[bool]: + pass + + def _raise_for_not_started(self) -> NoReturn: + raise async_exc.AsyncContextNotStarted( + "%s context has not been started and object has not been awaited." + % (self.__class__.__name__) + ) + + +class GeneratorStartableContext(StartableContext[_T_co]): + __slots__ = ("gen",) + + gen: AsyncGenerator[_T_co, Any] + + def __init__( + self, + func: Callable[..., AsyncIterator[_T_co]], + args: Tuple[Any, ...], + kwds: Dict[str, Any], + ): + self.gen = func(*args, **kwds) # type: ignore + + async def start(self, is_ctxmanager: bool = False) -> _T_co: + try: + start_value = await util.anext_(self.gen) + except StopAsyncIteration: + raise RuntimeError("generator didn't yield") from None + + # if not a context manager, then interrupt the generator, don't + # let it complete. this step is technically not needed, as the + # generator will close in any case at gc time. not clear if having + # this here is a good idea or not (though it helps for clarity IMO) + if not is_ctxmanager: + await self.gen.aclose() + + return start_value + + async def __aexit__( + self, typ: Any, value: Any, traceback: Any + ) -> Optional[bool]: + # vendored from contextlib.py + if typ is None: + try: + await util.anext_(self.gen) + except StopAsyncIteration: + return False + else: + raise RuntimeError("generator didn't stop") + else: + if value is None: + # Need to force instantiation so we can reliably + # tell if we get the same exception back + value = typ() + try: + await self.gen.athrow(value) + except StopAsyncIteration as exc: + # Suppress StopIteration *unless* it's the same exception that + # was passed to throw(). This prevents a StopIteration + # raised inside the "with" statement from being suppressed. + return exc is not value + except RuntimeError as exc: + # Don't re-raise the passed in exception. (issue27122) + if exc is value: + return False + # Avoid suppressing if a Stop(Async)Iteration exception + # was passed to athrow() and later wrapped into a RuntimeError + # (see PEP 479 for sync generators; async generators also + # have this behavior). But do this only if the exception + # wrapped + # by the RuntimeError is actully Stop(Async)Iteration (see + # issue29692). + if ( + isinstance(value, (StopIteration, StopAsyncIteration)) + and exc.__cause__ is value + ): + return False + raise + except BaseException as exc: + # only re-raise if it's *not* the exception that was + # passed to throw(), because __exit__() must not raise + # an exception unless __exit__() itself failed. But throw() + # has to raise the exception to signal propagation, so this + # fixes the impedance mismatch between the throw() protocol + # and the __exit__() protocol. + if exc is not value: + raise + return False + raise RuntimeError("generator didn't stop after athrow()") + + +def asyncstartablecontext( + func: Callable[..., AsyncIterator[_T_co]] +) -> Callable[..., GeneratorStartableContext[_T_co]]: + """@asyncstartablecontext decorator. + + the decorated function can be called either as ``async with fn()``, **or** + ``await fn()``. This is decidedly different from what + ``@contextlib.asynccontextmanager`` supports, and the usage pattern + is different as well. + + Typical usage:: + + @asyncstartablecontext + async def some_async_generator(): + + try: + yield + except GeneratorExit: + # return value was awaited, no context manager is present + # and caller will .close() the resource explicitly + pass + else: + + + + Above, ``GeneratorExit`` is caught if the function were used as an + ``await``. In this case, it's essential that the cleanup does **not** + occur, so there should not be a ``finally`` block. + + If ``GeneratorExit`` is not invoked, this means we're in ``__aexit__`` + and we were invoked as a context manager, and cleanup should proceed. + + + """ + + @functools.wraps(func) + def helper(*args: Any, **kwds: Any) -> GeneratorStartableContext[_T_co]: + return GeneratorStartableContext(func, args, kwds) + + return helper + + +class ProxyComparable(ReversibleProxy[_PT]): + __slots__ = () + + @util.ro_non_memoized_property + def _proxied(self) -> _PT: + raise NotImplementedError() + + def __hash__(self) -> int: + return id(self) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, self.__class__) + and self._proxied == other._proxied + ) + + def __ne__(self, other: Any) -> bool: + return ( + not isinstance(other, self.__class__) + or self._proxied != other._proxied + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/engine.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/engine.py new file mode 100644 index 00000000..8fc8e96d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/engine.py @@ -0,0 +1,1466 @@ +# ext/asyncio/engine.py +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import asyncio +import contextlib +from typing import Any +from typing import AsyncIterator +from typing import Callable +from typing import Dict +from typing import Generator +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import exc as async_exc +from .base import asyncstartablecontext +from .base import GeneratorStartableContext +from .base import ProxyComparable +from .base import StartableContext +from .result import _ensure_sync_result +from .result import AsyncResult +from .result import AsyncScalarResult +from ... import exc +from ... import inspection +from ... import util +from ...engine import Connection +from ...engine import create_engine as _create_engine +from ...engine import create_pool_from_url as _create_pool_from_url +from ...engine import Engine +from ...engine.base import NestedTransaction +from ...engine.base import Transaction +from ...exc import ArgumentError +from ...util.concurrency import greenlet_spawn +from ...util.typing import Concatenate +from ...util.typing import ParamSpec + +if TYPE_CHECKING: + from ...engine.cursor import CursorResult + from ...engine.interfaces import _CoreAnyExecuteParams + from ...engine.interfaces import _CoreSingleExecuteParams + from ...engine.interfaces import _DBAPIAnyExecuteParams + from ...engine.interfaces import _ExecuteOptions + from ...engine.interfaces import CompiledCacheType + from ...engine.interfaces import CoreExecuteOptionsParameter + from ...engine.interfaces import Dialect + from ...engine.interfaces import IsolationLevel + from ...engine.interfaces import SchemaTranslateMapType + from ...engine.result import ScalarResult + from ...engine.url import URL + from ...pool import Pool + from ...pool import PoolProxiedConnection + from ...sql._typing import _InfoType + from ...sql.base import Executable + from ...sql.selectable import TypedReturnsRows + +_P = ParamSpec("_P") +_T = TypeVar("_T", bound=Any) + + +def create_async_engine(url: Union[str, URL], **kw: Any) -> AsyncEngine: + """Create a new async engine instance. + + Arguments passed to :func:`_asyncio.create_async_engine` are mostly + identical to those passed to the :func:`_sa.create_engine` function. + The specified dialect must be an asyncio-compatible dialect + such as :ref:`dialect-postgresql-asyncpg`. + + .. versionadded:: 1.4 + + :param async_creator: an async callable which returns a driver-level + asyncio connection. If given, the function should take no arguments, + and return a new asyncio connection from the underlying asyncio + database driver; the connection will be wrapped in the appropriate + structures to be used with the :class:`.AsyncEngine`. Note that the + parameters specified in the URL are not applied here, and the creator + function should use its own connection parameters. + + This parameter is the asyncio equivalent of the + :paramref:`_sa.create_engine.creator` parameter of the + :func:`_sa.create_engine` function. + + .. versionadded:: 2.0.16 + + """ + + if kw.get("server_side_cursors", False): + raise async_exc.AsyncMethodRequired( + "Can't set server_side_cursors for async engine globally; " + "use the connection.stream() method for an async " + "streaming result set" + ) + kw["_is_async"] = True + async_creator = kw.pop("async_creator", None) + if async_creator: + if kw.get("creator", None): + raise ArgumentError( + "Can only specify one of 'async_creator' or 'creator', " + "not both." + ) + + def creator() -> Any: + # note that to send adapted arguments like + # prepared_statement_cache_size, user would use + # "creator" and emulate this form here + return sync_engine.dialect.dbapi.connect( # type: ignore + async_creator_fn=async_creator + ) + + kw["creator"] = creator + sync_engine = _create_engine(url, **kw) + return AsyncEngine(sync_engine) + + +def async_engine_from_config( + configuration: Dict[str, Any], prefix: str = "sqlalchemy.", **kwargs: Any +) -> AsyncEngine: + """Create a new AsyncEngine instance using a configuration dictionary. + + This function is analogous to the :func:`_sa.engine_from_config` function + in SQLAlchemy Core, except that the requested dialect must be an + asyncio-compatible dialect such as :ref:`dialect-postgresql-asyncpg`. + The argument signature of the function is identical to that + of :func:`_sa.engine_from_config`. + + .. versionadded:: 1.4.29 + + """ + options = { + key[len(prefix) :]: value + for key, value in configuration.items() + if key.startswith(prefix) + } + options["_coerce_config"] = True + options.update(kwargs) + url = options.pop("url") + return create_async_engine(url, **options) + + +def create_async_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: + """Create a new async engine instance. + + Arguments passed to :func:`_asyncio.create_async_pool_from_url` are mostly + identical to those passed to the :func:`_sa.create_pool_from_url` function. + The specified dialect must be an asyncio-compatible dialect + such as :ref:`dialect-postgresql-asyncpg`. + + .. versionadded:: 2.0.10 + + """ + kwargs["_is_async"] = True + return _create_pool_from_url(url, **kwargs) + + +class AsyncConnectable: + __slots__ = "_slots_dispatch", "__weakref__" + + @classmethod + def _no_async_engine_events(cls) -> NoReturn: + raise NotImplementedError( + "asynchronous events are not implemented at this time. Apply " + "synchronous listeners to the AsyncEngine.sync_engine or " + "AsyncConnection.sync_connection attributes." + ) + + +@util.create_proxy_methods( + Connection, + ":class:`_engine.Connection`", + ":class:`_asyncio.AsyncConnection`", + classmethods=[], + methods=[], + attributes=[ + "closed", + "invalidated", + "dialect", + "default_isolation_level", + ], +) +class AsyncConnection( + ProxyComparable[Connection], + StartableContext["AsyncConnection"], + AsyncConnectable, +): + """An asyncio proxy for a :class:`_engine.Connection`. + + :class:`_asyncio.AsyncConnection` is acquired using the + :meth:`_asyncio.AsyncEngine.connect` + method of :class:`_asyncio.AsyncEngine`:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("postgresql+asyncpg://user:pass@host/dbname") + + async with engine.connect() as conn: + result = await conn.execute(select(table)) + + .. versionadded:: 1.4 + + """ # noqa + + # AsyncConnection is a thin proxy; no state should be added here + # that is not retrievable from the "sync" engine / connection, e.g. + # current transaction, info, etc. It should be possible to + # create a new AsyncConnection that matches this one given only the + # "sync" elements. + __slots__ = ( + "engine", + "sync_engine", + "sync_connection", + ) + + def __init__( + self, + async_engine: AsyncEngine, + sync_connection: Optional[Connection] = None, + ): + self.engine = async_engine + self.sync_engine = async_engine.sync_engine + self.sync_connection = self._assign_proxied(sync_connection) + + sync_connection: Optional[Connection] + """Reference to the sync-style :class:`_engine.Connection` this + :class:`_asyncio.AsyncConnection` proxies requests towards. + + This instance can be used as an event target. + + .. seealso:: + + :ref:`asyncio_events` + + """ + + sync_engine: Engine + """Reference to the sync-style :class:`_engine.Engine` this + :class:`_asyncio.AsyncConnection` is associated with via its underlying + :class:`_engine.Connection`. + + This instance can be used as an event target. + + .. seealso:: + + :ref:`asyncio_events` + + """ + + @classmethod + def _regenerate_proxy_for_target( + cls, target: Connection + ) -> AsyncConnection: + return AsyncConnection( + AsyncEngine._retrieve_proxy_for_target(target.engine), target + ) + + async def start( + self, is_ctxmanager: bool = False # noqa: U100 + ) -> AsyncConnection: + """Start this :class:`_asyncio.AsyncConnection` object's context + outside of using a Python ``with:`` block. + + """ + if self.sync_connection: + raise exc.InvalidRequestError("connection is already started") + self.sync_connection = self._assign_proxied( + await greenlet_spawn(self.sync_engine.connect) + ) + return self + + @property + def connection(self) -> NoReturn: + """Not implemented for async; call + :meth:`_asyncio.AsyncConnection.get_raw_connection`. + """ + raise exc.InvalidRequestError( + "AsyncConnection.connection accessor is not implemented as the " + "attribute may need to reconnect on an invalidated connection. " + "Use the get_raw_connection() method." + ) + + async def get_raw_connection(self) -> PoolProxiedConnection: + """Return the pooled DBAPI-level connection in use by this + :class:`_asyncio.AsyncConnection`. + + This is a SQLAlchemy connection-pool proxied connection + which then has the attribute + :attr:`_pool._ConnectionFairy.driver_connection` that refers to the + actual driver connection. Its + :attr:`_pool._ConnectionFairy.dbapi_connection` refers instead + to an :class:`_engine.AdaptedConnection` instance that + adapts the driver connection to the DBAPI protocol. + + """ + + return await greenlet_spawn(getattr, self._proxied, "connection") + + @util.ro_non_memoized_property + def info(self) -> _InfoType: + """Return the :attr:`_engine.Connection.info` dictionary of the + underlying :class:`_engine.Connection`. + + This dictionary is freely writable for user-defined state to be + associated with the database connection. + + This attribute is only available if the :class:`.AsyncConnection` is + currently connected. If the :attr:`.AsyncConnection.closed` attribute + is ``True``, then accessing this attribute will raise + :class:`.ResourceClosedError`. + + .. versionadded:: 1.4.0b2 + + """ + return self._proxied.info + + @util.ro_non_memoized_property + def _proxied(self) -> Connection: + if not self.sync_connection: + self._raise_for_not_started() + return self.sync_connection + + def begin(self) -> AsyncTransaction: + """Begin a transaction prior to autobegin occurring.""" + assert self._proxied + return AsyncTransaction(self) + + def begin_nested(self) -> AsyncTransaction: + """Begin a nested transaction and return a transaction handle.""" + assert self._proxied + return AsyncTransaction(self, nested=True) + + async def invalidate( + self, exception: Optional[BaseException] = None + ) -> None: + """Invalidate the underlying DBAPI connection associated with + this :class:`_engine.Connection`. + + See the method :meth:`_engine.Connection.invalidate` for full + detail on this method. + + """ + + return await greenlet_spawn( + self._proxied.invalidate, exception=exception + ) + + async def get_isolation_level(self) -> IsolationLevel: + return await greenlet_spawn(self._proxied.get_isolation_level) + + def in_transaction(self) -> bool: + """Return True if a transaction is in progress.""" + + return self._proxied.in_transaction() + + def in_nested_transaction(self) -> bool: + """Return True if a transaction is in progress. + + .. versionadded:: 1.4.0b2 + + """ + return self._proxied.in_nested_transaction() + + def get_transaction(self) -> Optional[AsyncTransaction]: + """Return an :class:`.AsyncTransaction` representing the current + transaction, if any. + + This makes use of the underlying synchronous connection's + :meth:`_engine.Connection.get_transaction` method to get the current + :class:`_engine.Transaction`, which is then proxied in a new + :class:`.AsyncTransaction` object. + + .. versionadded:: 1.4.0b2 + + """ + + trans = self._proxied.get_transaction() + if trans is not None: + return AsyncTransaction._retrieve_proxy_for_target(trans) + else: + return None + + def get_nested_transaction(self) -> Optional[AsyncTransaction]: + """Return an :class:`.AsyncTransaction` representing the current + nested (savepoint) transaction, if any. + + This makes use of the underlying synchronous connection's + :meth:`_engine.Connection.get_nested_transaction` method to get the + current :class:`_engine.Transaction`, which is then proxied in a new + :class:`.AsyncTransaction` object. + + .. versionadded:: 1.4.0b2 + + """ + + trans = self._proxied.get_nested_transaction() + if trans is not None: + return AsyncTransaction._retrieve_proxy_for_target(trans) + else: + return None + + @overload + async def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + no_parameters: bool = False, + stream_results: bool = False, + max_row_buffer: int = ..., + yield_per: int = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + preserve_rowcount: bool = False, + **opt: Any, + ) -> AsyncConnection: ... + + @overload + async def execution_options(self, **opt: Any) -> AsyncConnection: ... + + async def execution_options(self, **opt: Any) -> AsyncConnection: + r"""Set non-SQL options for the connection which take effect + during execution. + + This returns this :class:`_asyncio.AsyncConnection` object with + the new options added. + + See :meth:`_engine.Connection.execution_options` for full details + on this method. + + """ + + conn = self._proxied + c2 = await greenlet_spawn(conn.execution_options, **opt) + assert c2 is conn + return self + + async def commit(self) -> None: + """Commit the transaction that is currently in progress. + + This method commits the current transaction if one has been started. + If no transaction was started, the method has no effect, assuming + the connection is in a non-invalidated state. + + A transaction is begun on a :class:`_engine.Connection` automatically + whenever a statement is first executed, or when the + :meth:`_engine.Connection.begin` method is called. + + """ + await greenlet_spawn(self._proxied.commit) + + async def rollback(self) -> None: + """Roll back the transaction that is currently in progress. + + This method rolls back the current transaction if one has been started. + If no transaction was started, the method has no effect. If a + transaction was started and the connection is in an invalidated state, + the transaction is cleared using this method. + + A transaction is begun on a :class:`_engine.Connection` automatically + whenever a statement is first executed, or when the + :meth:`_engine.Connection.begin` method is called. + + + """ + await greenlet_spawn(self._proxied.rollback) + + async def close(self) -> None: + """Close this :class:`_asyncio.AsyncConnection`. + + This has the effect of also rolling back the transaction if one + is in place. + + """ + await greenlet_spawn(self._proxied.close) + + async def aclose(self) -> None: + """A synonym for :meth:`_asyncio.AsyncConnection.close`. + + The :meth:`_asyncio.AsyncConnection.aclose` name is specifically + to support the Python standard library ``@contextlib.aclosing`` + context manager function. + + .. versionadded:: 2.0.20 + + """ + await self.close() + + async def exec_driver_sql( + self, + statement: str, + parameters: Optional[_DBAPIAnyExecuteParams] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[Any]: + r"""Executes a driver-level SQL string and return buffered + :class:`_engine.Result`. + + """ + + result = await greenlet_spawn( + self._proxied.exec_driver_sql, + statement, + parameters, + execution_options, + _require_await=True, + ) + + return await _ensure_sync_result(result, self.exec_driver_sql) + + @overload + def stream( + self, + statement: TypedReturnsRows[_T], + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> GeneratorStartableContext[AsyncResult[_T]]: ... + + @overload + def stream( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> GeneratorStartableContext[AsyncResult[Any]]: ... + + @asyncstartablecontext + async def stream( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> AsyncIterator[AsyncResult[Any]]: + """Execute a statement and return an awaitable yielding a + :class:`_asyncio.AsyncResult` object. + + E.g.:: + + result = await conn.stream(stmt): + async for row in result: + print(f"{row}") + + The :meth:`.AsyncConnection.stream` + method supports optional context manager use against the + :class:`.AsyncResult` object, as in:: + + async with conn.stream(stmt) as result: + async for row in result: + print(f"{row}") + + In the above pattern, the :meth:`.AsyncResult.close` method is + invoked unconditionally, even if the iterator is interrupted by an + exception throw. Context manager use remains optional, however, + and the function may be called in either an ``async with fn():`` or + ``await fn()`` style. + + .. versionadded:: 2.0.0b3 added context manager support + + + :return: an awaitable object that will yield an + :class:`_asyncio.AsyncResult` object. + + .. seealso:: + + :meth:`.AsyncConnection.stream_scalars` + + """ + if not self.dialect.supports_server_side_cursors: + raise exc.InvalidRequestError( + "Cant use `stream` or `stream_scalars` with the current " + "dialect since it does not support server side cursors." + ) + + result = await greenlet_spawn( + self._proxied.execute, + statement, + parameters, + execution_options=util.EMPTY_DICT.merge_with( + execution_options, {"stream_results": True} + ), + _require_await=True, + ) + assert result.context._is_server_side + ar = AsyncResult(result) + try: + yield ar + except GeneratorExit: + pass + else: + task = asyncio.create_task(ar.close()) + await asyncio.shield(task) + + @overload + async def execute( + self, + statement: TypedReturnsRows[_T], + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[_T]: ... + + @overload + async def execute( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[Any]: ... + + async def execute( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> CursorResult[Any]: + r"""Executes a SQL statement construct and return a buffered + :class:`_engine.Result`. + + :param object: The statement to be executed. This is always + an object that is in both the :class:`_expression.ClauseElement` and + :class:`_expression.Executable` hierarchies, including: + + * :class:`_expression.Select` + * :class:`_expression.Insert`, :class:`_expression.Update`, + :class:`_expression.Delete` + * :class:`_expression.TextClause` and + :class:`_expression.TextualSelect` + * :class:`_schema.DDL` and objects which inherit from + :class:`_schema.ExecutableDDLElement` + + :param parameters: parameters which will be bound into the statement. + This may be either a dictionary of parameter names to values, + or a mutable sequence (e.g. a list) of dictionaries. When a + list of dictionaries is passed, the underlying statement execution + will make use of the DBAPI ``cursor.executemany()`` method. + When a single dictionary is passed, the DBAPI ``cursor.execute()`` + method will be used. + + :param execution_options: optional dictionary of execution options, + which will be associated with the statement execution. This + dictionary can provide a subset of the options that are accepted + by :meth:`_engine.Connection.execution_options`. + + :return: a :class:`_engine.Result` object. + + """ + result = await greenlet_spawn( + self._proxied.execute, + statement, + parameters, + execution_options=execution_options, + _require_await=True, + ) + return await _ensure_sync_result(result, self.execute) + + @overload + async def scalar( + self, + statement: TypedReturnsRows[Tuple[_T]], + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Optional[_T]: ... + + @overload + async def scalar( + self, + statement: Executable, + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Any: ... + + async def scalar( + self, + statement: Executable, + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Any: + r"""Executes a SQL statement construct and returns a scalar object. + + This method is shorthand for invoking the + :meth:`_engine.Result.scalar` method after invoking the + :meth:`_engine.Connection.execute` method. Parameters are equivalent. + + :return: a scalar Python value representing the first column of the + first row returned. + + """ + result = await self.execute( + statement, parameters, execution_options=execution_options + ) + return result.scalar() + + @overload + async def scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> ScalarResult[_T]: ... + + @overload + async def scalars( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> ScalarResult[Any]: ... + + async def scalars( + self, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> ScalarResult[Any]: + r"""Executes a SQL statement construct and returns a scalar objects. + + This method is shorthand for invoking the + :meth:`_engine.Result.scalars` method after invoking the + :meth:`_engine.Connection.execute` method. Parameters are equivalent. + + :return: a :class:`_engine.ScalarResult` object. + + .. versionadded:: 1.4.24 + + """ + result = await self.execute( + statement, parameters, execution_options=execution_options + ) + return result.scalars() + + @overload + def stream_scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> GeneratorStartableContext[AsyncScalarResult[_T]]: ... + + @overload + def stream_scalars( + self, + statement: Executable, + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> GeneratorStartableContext[AsyncScalarResult[Any]]: ... + + @asyncstartablecontext + async def stream_scalars( + self, + statement: Executable, + parameters: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> AsyncIterator[AsyncScalarResult[Any]]: + r"""Execute a statement and return an awaitable yielding a + :class:`_asyncio.AsyncScalarResult` object. + + E.g.:: + + result = await conn.stream_scalars(stmt) + async for scalar in result: + print(f"{scalar}") + + This method is shorthand for invoking the + :meth:`_engine.AsyncResult.scalars` method after invoking the + :meth:`_engine.Connection.stream` method. Parameters are equivalent. + + The :meth:`.AsyncConnection.stream_scalars` + method supports optional context manager use against the + :class:`.AsyncScalarResult` object, as in:: + + async with conn.stream_scalars(stmt) as result: + async for scalar in result: + print(f"{scalar}") + + In the above pattern, the :meth:`.AsyncScalarResult.close` method is + invoked unconditionally, even if the iterator is interrupted by an + exception throw. Context manager use remains optional, however, + and the function may be called in either an ``async with fn():`` or + ``await fn()`` style. + + .. versionadded:: 2.0.0b3 added context manager support + + :return: an awaitable object that will yield an + :class:`_asyncio.AsyncScalarResult` object. + + .. versionadded:: 1.4.24 + + .. seealso:: + + :meth:`.AsyncConnection.stream` + + """ + + async with self.stream( + statement, parameters, execution_options=execution_options + ) as result: + yield result.scalars() + + async def run_sync( + self, + fn: Callable[Concatenate[Connection, _P], _T], + *arg: _P.args, + **kw: _P.kwargs, + ) -> _T: + """Invoke the given synchronous (i.e. not async) callable, + passing a synchronous-style :class:`_engine.Connection` as the first + argument. + + This method allows traditional synchronous SQLAlchemy functions to + run within the context of an asyncio application. + + E.g.:: + + def do_something_with_core(conn: Connection, arg1: int, arg2: str) -> str: + '''A synchronous function that does not require awaiting + + :param conn: a Core SQLAlchemy Connection, used synchronously + + :return: an optional return value is supported + + ''' + conn.execute( + some_table.insert().values(int_col=arg1, str_col=arg2) + ) + return "success" + + + async def do_something_async(async_engine: AsyncEngine) -> None: + '''an async function that uses awaiting''' + + async with async_engine.begin() as async_conn: + # run do_something_with_core() with a sync-style + # Connection, proxied into an awaitable + return_code = await async_conn.run_sync(do_something_with_core, 5, "strval") + print(return_code) + + This method maintains the asyncio event loop all the way through + to the database connection by running the given callable in a + specially instrumented greenlet. + + The most rudimentary use of :meth:`.AsyncConnection.run_sync` is to + invoke methods such as :meth:`_schema.MetaData.create_all`, given + an :class:`.AsyncConnection` that needs to be provided to + :meth:`_schema.MetaData.create_all` as a :class:`_engine.Connection` + object:: + + # run metadata.create_all(conn) with a sync-style Connection, + # proxied into an awaitable + with async_engine.begin() as conn: + await conn.run_sync(metadata.create_all) + + .. note:: + + The provided callable is invoked inline within the asyncio event + loop, and will block on traditional IO calls. IO within this + callable should only call into SQLAlchemy's asyncio database + APIs which will be properly adapted to the greenlet context. + + .. seealso:: + + :meth:`.AsyncSession.run_sync` + + :ref:`session_run_sync` + + """ # noqa: E501 + + return await greenlet_spawn( + fn, self._proxied, *arg, _require_await=False, **kw + ) + + def __await__(self) -> Generator[Any, None, AsyncConnection]: + return self.start().__await__() + + async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None: + task = asyncio.create_task(self.close()) + await asyncio.shield(task) + + # START PROXY METHODS AsyncConnection + + # code within this block is **programmatically, + # statically generated** by tools/generate_proxy_methods.py + + @property + def closed(self) -> Any: + r"""Return True if this connection is closed. + + .. container:: class_bases + + Proxied for the :class:`_engine.Connection` class + on behalf of the :class:`_asyncio.AsyncConnection` class. + + """ # noqa: E501 + + return self._proxied.closed + + @property + def invalidated(self) -> Any: + r"""Return True if this connection was invalidated. + + .. container:: class_bases + + Proxied for the :class:`_engine.Connection` class + on behalf of the :class:`_asyncio.AsyncConnection` class. + + This does not indicate whether or not the connection was + invalidated at the pool level, however + + + """ # noqa: E501 + + return self._proxied.invalidated + + @property + def dialect(self) -> Dialect: + r"""Proxy for the :attr:`_engine.Connection.dialect` attribute + on behalf of the :class:`_asyncio.AsyncConnection` class. + + """ # noqa: E501 + + return self._proxied.dialect + + @dialect.setter + def dialect(self, attr: Dialect) -> None: + self._proxied.dialect = attr + + @property + def default_isolation_level(self) -> Any: + r"""The initial-connection time isolation level associated with the + :class:`_engine.Dialect` in use. + + .. container:: class_bases + + Proxied for the :class:`_engine.Connection` class + on behalf of the :class:`_asyncio.AsyncConnection` class. + + This value is independent of the + :paramref:`.Connection.execution_options.isolation_level` and + :paramref:`.Engine.execution_options.isolation_level` execution + options, and is determined by the :class:`_engine.Dialect` when the + first connection is created, by performing a SQL query against the + database for the current isolation level before any additional commands + have been emitted. + + Calling this accessor does not invoke any new SQL queries. + + .. seealso:: + + :meth:`_engine.Connection.get_isolation_level` + - view current actual isolation level + + :paramref:`_sa.create_engine.isolation_level` + - set per :class:`_engine.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`_engine.Connection` isolation level + + + """ # noqa: E501 + + return self._proxied.default_isolation_level + + # END PROXY METHODS AsyncConnection + + +@util.create_proxy_methods( + Engine, + ":class:`_engine.Engine`", + ":class:`_asyncio.AsyncEngine`", + classmethods=[], + methods=[ + "clear_compiled_cache", + "update_execution_options", + "get_execution_options", + ], + attributes=["url", "pool", "dialect", "engine", "name", "driver", "echo"], +) +class AsyncEngine(ProxyComparable[Engine], AsyncConnectable): + """An asyncio proxy for a :class:`_engine.Engine`. + + :class:`_asyncio.AsyncEngine` is acquired using the + :func:`_asyncio.create_async_engine` function:: + + from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("postgresql+asyncpg://user:pass@host/dbname") + + .. versionadded:: 1.4 + + """ # noqa + + # AsyncEngine is a thin proxy; no state should be added here + # that is not retrievable from the "sync" engine / connection, e.g. + # current transaction, info, etc. It should be possible to + # create a new AsyncEngine that matches this one given only the + # "sync" elements. + __slots__ = "sync_engine" + + _connection_cls: Type[AsyncConnection] = AsyncConnection + + sync_engine: Engine + """Reference to the sync-style :class:`_engine.Engine` this + :class:`_asyncio.AsyncEngine` proxies requests towards. + + This instance can be used as an event target. + + .. seealso:: + + :ref:`asyncio_events` + """ + + def __init__(self, sync_engine: Engine): + if not sync_engine.dialect.is_async: + raise exc.InvalidRequestError( + "The asyncio extension requires an async driver to be used. " + f"The loaded {sync_engine.dialect.driver!r} is not async." + ) + self.sync_engine = self._assign_proxied(sync_engine) + + @util.ro_non_memoized_property + def _proxied(self) -> Engine: + return self.sync_engine + + @classmethod + def _regenerate_proxy_for_target(cls, target: Engine) -> AsyncEngine: + return AsyncEngine(target) + + @contextlib.asynccontextmanager + async def begin(self) -> AsyncIterator[AsyncConnection]: + """Return a context manager which when entered will deliver an + :class:`_asyncio.AsyncConnection` with an + :class:`_asyncio.AsyncTransaction` established. + + E.g.:: + + async with async_engine.begin() as conn: + await conn.execute( + text("insert into table (x, y, z) values (1, 2, 3)") + ) + await conn.execute(text("my_special_procedure(5)")) + + + """ + conn = self.connect() + + async with conn: + async with conn.begin(): + yield conn + + def connect(self) -> AsyncConnection: + """Return an :class:`_asyncio.AsyncConnection` object. + + The :class:`_asyncio.AsyncConnection` will procure a database + connection from the underlying connection pool when it is entered + as an async context manager:: + + async with async_engine.connect() as conn: + result = await conn.execute(select(user_table)) + + The :class:`_asyncio.AsyncConnection` may also be started outside of a + context manager by invoking its :meth:`_asyncio.AsyncConnection.start` + method. + + """ + + return self._connection_cls(self) + + async def raw_connection(self) -> PoolProxiedConnection: + """Return a "raw" DBAPI connection from the connection pool. + + .. seealso:: + + :ref:`dbapi_connections` + + """ + return await greenlet_spawn(self.sync_engine.raw_connection) + + @overload + def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + **opt: Any, + ) -> AsyncEngine: ... + + @overload + def execution_options(self, **opt: Any) -> AsyncEngine: ... + + def execution_options(self, **opt: Any) -> AsyncEngine: + """Return a new :class:`_asyncio.AsyncEngine` that will provide + :class:`_asyncio.AsyncConnection` objects with the given execution + options. + + Proxied from :meth:`_engine.Engine.execution_options`. See that + method for details. + + """ + + return AsyncEngine(self.sync_engine.execution_options(**opt)) + + async def dispose(self, close: bool = True) -> None: + """Dispose of the connection pool used by this + :class:`_asyncio.AsyncEngine`. + + :param close: if left at its default of ``True``, has the + effect of fully closing all **currently checked in** + database connections. Connections that are still checked out + will **not** be closed, however they will no longer be associated + with this :class:`_engine.Engine`, + so when they are closed individually, eventually the + :class:`_pool.Pool` which they are associated with will + be garbage collected and they will be closed out fully, if + not already closed on checkin. + + If set to ``False``, the previous connection pool is de-referenced, + and otherwise not touched in any way. + + .. seealso:: + + :meth:`_engine.Engine.dispose` + + """ + + await greenlet_spawn(self.sync_engine.dispose, close=close) + + # START PROXY METHODS AsyncEngine + + # code within this block is **programmatically, + # statically generated** by tools/generate_proxy_methods.py + + def clear_compiled_cache(self) -> None: + r"""Clear the compiled cache associated with the dialect. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class on + behalf of the :class:`_asyncio.AsyncEngine` class. + + This applies **only** to the built-in cache that is established + via the :paramref:`_engine.create_engine.query_cache_size` parameter. + It will not impact any dictionary caches that were passed via the + :paramref:`.Connection.execution_options.compiled_cache` parameter. + + .. versionadded:: 1.4 + + + """ # noqa: E501 + + return self._proxied.clear_compiled_cache() + + def update_execution_options(self, **opt: Any) -> None: + r"""Update the default execution_options dictionary + of this :class:`_engine.Engine`. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class on + behalf of the :class:`_asyncio.AsyncEngine` class. + + The given keys/values in \**opt are added to the + default execution options that will be used for + all connections. The initial contents of this dictionary + can be sent via the ``execution_options`` parameter + to :func:`_sa.create_engine`. + + .. seealso:: + + :meth:`_engine.Connection.execution_options` + + :meth:`_engine.Engine.execution_options` + + + """ # noqa: E501 + + return self._proxied.update_execution_options(**opt) + + def get_execution_options(self) -> _ExecuteOptions: + r"""Get the non-SQL options which will take effect during execution. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class on + behalf of the :class:`_asyncio.AsyncEngine` class. + + .. versionadded: 1.3 + + .. seealso:: + + :meth:`_engine.Engine.execution_options` + + """ # noqa: E501 + + return self._proxied.get_execution_options() + + @property + def url(self) -> URL: + r"""Proxy for the :attr:`_engine.Engine.url` attribute + on behalf of the :class:`_asyncio.AsyncEngine` class. + + """ # noqa: E501 + + return self._proxied.url + + @url.setter + def url(self, attr: URL) -> None: + self._proxied.url = attr + + @property + def pool(self) -> Pool: + r"""Proxy for the :attr:`_engine.Engine.pool` attribute + on behalf of the :class:`_asyncio.AsyncEngine` class. + + """ # noqa: E501 + + return self._proxied.pool + + @pool.setter + def pool(self, attr: Pool) -> None: + self._proxied.pool = attr + + @property + def dialect(self) -> Dialect: + r"""Proxy for the :attr:`_engine.Engine.dialect` attribute + on behalf of the :class:`_asyncio.AsyncEngine` class. + + """ # noqa: E501 + + return self._proxied.dialect + + @dialect.setter + def dialect(self, attr: Dialect) -> None: + self._proxied.dialect = attr + + @property + def engine(self) -> Any: + r"""Returns this :class:`.Engine`. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class + on behalf of the :class:`_asyncio.AsyncEngine` class. + + Used for legacy schemes that accept :class:`.Connection` / + :class:`.Engine` objects within the same variable. + + + """ # noqa: E501 + + return self._proxied.engine + + @property + def name(self) -> Any: + r"""String name of the :class:`~sqlalchemy.engine.interfaces.Dialect` + in use by this :class:`Engine`. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class + on behalf of the :class:`_asyncio.AsyncEngine` class. + + + """ # noqa: E501 + + return self._proxied.name + + @property + def driver(self) -> Any: + r"""Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect` + in use by this :class:`Engine`. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class + on behalf of the :class:`_asyncio.AsyncEngine` class. + + + """ # noqa: E501 + + return self._proxied.driver + + @property + def echo(self) -> Any: + r"""When ``True``, enable log output for this element. + + .. container:: class_bases + + Proxied for the :class:`_engine.Engine` class + on behalf of the :class:`_asyncio.AsyncEngine` class. + + This has the effect of setting the Python logging level for the namespace + of this element's class and object reference. A value of boolean ``True`` + indicates that the loglevel ``logging.INFO`` will be set for the logger, + whereas the string value ``debug`` will set the loglevel to + ``logging.DEBUG``. + + """ # noqa: E501 + + return self._proxied.echo + + @echo.setter + def echo(self, attr: Any) -> None: + self._proxied.echo = attr + + # END PROXY METHODS AsyncEngine + + +class AsyncTransaction( + ProxyComparable[Transaction], StartableContext["AsyncTransaction"] +): + """An asyncio proxy for a :class:`_engine.Transaction`.""" + + __slots__ = ("connection", "sync_transaction", "nested") + + sync_transaction: Optional[Transaction] + connection: AsyncConnection + nested: bool + + def __init__(self, connection: AsyncConnection, nested: bool = False): + self.connection = connection + self.sync_transaction = None + self.nested = nested + + @classmethod + def _regenerate_proxy_for_target( + cls, target: Transaction + ) -> AsyncTransaction: + sync_connection = target.connection + sync_transaction = target + nested = isinstance(target, NestedTransaction) + + async_connection = AsyncConnection._retrieve_proxy_for_target( + sync_connection + ) + assert async_connection is not None + + obj = cls.__new__(cls) + obj.connection = async_connection + obj.sync_transaction = obj._assign_proxied(sync_transaction) + obj.nested = nested + return obj + + @util.ro_non_memoized_property + def _proxied(self) -> Transaction: + if not self.sync_transaction: + self._raise_for_not_started() + return self.sync_transaction + + @property + def is_valid(self) -> bool: + return self._proxied.is_valid + + @property + def is_active(self) -> bool: + return self._proxied.is_active + + async def close(self) -> None: + """Close this :class:`.AsyncTransaction`. + + If this transaction is the base transaction in a begin/commit + nesting, the transaction will rollback(). Otherwise, the + method returns. + + This is used to cancel a Transaction without affecting the scope of + an enclosing transaction. + + """ + await greenlet_spawn(self._proxied.close) + + async def rollback(self) -> None: + """Roll back this :class:`.AsyncTransaction`.""" + await greenlet_spawn(self._proxied.rollback) + + async def commit(self) -> None: + """Commit this :class:`.AsyncTransaction`.""" + + await greenlet_spawn(self._proxied.commit) + + async def start(self, is_ctxmanager: bool = False) -> AsyncTransaction: + """Start this :class:`_asyncio.AsyncTransaction` object's context + outside of using a Python ``with:`` block. + + """ + + self.sync_transaction = self._assign_proxied( + await greenlet_spawn( + self.connection._proxied.begin_nested + if self.nested + else self.connection._proxied.begin + ) + ) + if is_ctxmanager: + self.sync_transaction.__enter__() + return self + + async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None: + await greenlet_spawn(self._proxied.__exit__, type_, value, traceback) + + +@overload +def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine: ... + + +@overload +def _get_sync_engine_or_connection( + async_engine: AsyncConnection, +) -> Connection: ... + + +def _get_sync_engine_or_connection( + async_engine: Union[AsyncEngine, AsyncConnection] +) -> Union[Engine, Connection]: + if isinstance(async_engine, AsyncConnection): + return async_engine._proxied + + try: + return async_engine.sync_engine + except AttributeError as e: + raise exc.ArgumentError( + "AsyncEngine expected, got %r" % async_engine + ) from e + + +@inspection._inspects(AsyncConnection) +def _no_insp_for_async_conn_yet( + subject: AsyncConnection, # noqa: U100 +) -> NoReturn: + raise exc.NoInspectionAvailable( + "Inspection on an AsyncConnection is currently not supported. " + "Please use ``run_sync`` to pass a callable where it's possible " + "to call ``inspect`` on the passed connection.", + code="xd3s", + ) + + +@inspection._inspects(AsyncEngine) +def _no_insp_for_async_engine_xyet( + subject: AsyncEngine, # noqa: U100 +) -> NoReturn: + raise exc.NoInspectionAvailable( + "Inspection on an AsyncEngine is currently not supported. " + "Please obtain a connection then use ``conn.run_sync`` to pass a " + "callable where it's possible to call ``inspect`` on the " + "passed connection.", + code="xd3s", + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/exc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/exc.py new file mode 100644 index 00000000..1cf6f363 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/exc.py @@ -0,0 +1,21 @@ +# ext/asyncio/exc.py +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from ... import exc + + +class AsyncMethodRequired(exc.InvalidRequestError): + """an API can't be used because its result would not be + compatible with async""" + + +class AsyncContextNotStarted(exc.InvalidRequestError): + """a startable context manager has not been started.""" + + +class AsyncContextAlreadyStarted(exc.InvalidRequestError): + """a startable context manager is already started.""" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/result.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/result.py new file mode 100644 index 00000000..7dcbe328 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/result.py @@ -0,0 +1,961 @@ +# ext/asyncio/result.py +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import operator +from typing import Any +from typing import AsyncIterator +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar + +from . import exc as async_exc +from ... import util +from ...engine import Result +from ...engine.result import _NO_ROW +from ...engine.result import _R +from ...engine.result import _WithKeys +from ...engine.result import FilterResult +from ...engine.result import FrozenResult +from ...engine.result import ResultMetaData +from ...engine.row import Row +from ...engine.row import RowMapping +from ...sql.base import _generative +from ...util.concurrency import greenlet_spawn +from ...util.typing import Literal +from ...util.typing import Self + +if TYPE_CHECKING: + from ...engine import CursorResult + from ...engine.result import _KeyIndexType + from ...engine.result import _UniqueFilterType + +_T = TypeVar("_T", bound=Any) +_TP = TypeVar("_TP", bound=Tuple[Any, ...]) + + +class AsyncCommon(FilterResult[_R]): + __slots__ = () + + _real_result: Result[Any] + _metadata: ResultMetaData + + async def close(self) -> None: # type: ignore[override] + """Close this result.""" + + await greenlet_spawn(self._real_result.close) + + @property + def closed(self) -> bool: + """proxies the .closed attribute of the underlying result object, + if any, else raises ``AttributeError``. + + .. versionadded:: 2.0.0b3 + + """ + return self._real_result.closed + + +class AsyncResult(_WithKeys, AsyncCommon[Row[_TP]]): + """An asyncio wrapper around a :class:`_result.Result` object. + + The :class:`_asyncio.AsyncResult` only applies to statement executions that + use a server-side cursor. It is returned only from the + :meth:`_asyncio.AsyncConnection.stream` and + :meth:`_asyncio.AsyncSession.stream` methods. + + .. note:: As is the case with :class:`_engine.Result`, this object is + used for ORM results returned by :meth:`_asyncio.AsyncSession.execute`, + which can yield instances of ORM mapped objects either individually or + within tuple-like rows. Note that these result objects do not + deduplicate instances or rows automatically as is the case with the + legacy :class:`_orm.Query` object. For in-Python de-duplication of + instances or rows, use the :meth:`_asyncio.AsyncResult.unique` modifier + method. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + _real_result: Result[_TP] + + def __init__(self, real_result: Result[_TP]): + self._real_result = real_result + + self._metadata = real_result._metadata + self._unique_filter_state = real_result._unique_filter_state + self._post_creational_filter = None + + # BaseCursorResult pre-generates the "_row_getter". Use that + # if available rather than building a second one + if "_row_getter" in real_result.__dict__: + self._set_memoized_attribute( + "_row_getter", real_result.__dict__["_row_getter"] + ) + + @property + def t(self) -> AsyncTupleResult[_TP]: + """Apply a "typed tuple" typing filter to returned rows. + + The :attr:`_asyncio.AsyncResult.t` attribute is a synonym for + calling the :meth:`_asyncio.AsyncResult.tuples` method. + + .. versionadded:: 2.0 + + """ + return self # type: ignore + + def tuples(self) -> AsyncTupleResult[_TP]: + """Apply a "typed tuple" typing filter to returned rows. + + This method returns the same :class:`_asyncio.AsyncResult` object + at runtime, + however annotates as returning a :class:`_asyncio.AsyncTupleResult` + object that will indicate to :pep:`484` typing tools that plain typed + ``Tuple`` instances are returned rather than rows. This allows + tuple unpacking and ``__getitem__`` access of :class:`_engine.Row` + objects to by typed, for those cases where the statement invoked + itself included typing information. + + .. versionadded:: 2.0 + + :return: the :class:`_result.AsyncTupleResult` type at typing time. + + .. seealso:: + + :attr:`_asyncio.AsyncResult.t` - shorter synonym + + :attr:`_engine.Row.t` - :class:`_engine.Row` version + + """ + + return self # type: ignore + + @_generative + def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self: + """Apply unique filtering to the objects returned by this + :class:`_asyncio.AsyncResult`. + + Refer to :meth:`_engine.Result.unique` in the synchronous + SQLAlchemy API for a complete behavioral description. + + """ + self._unique_filter_state = (set(), strategy) + return self + + def columns(self, *col_expressions: _KeyIndexType) -> Self: + r"""Establish the columns that should be returned in each row. + + Refer to :meth:`_engine.Result.columns` in the synchronous + SQLAlchemy API for a complete behavioral description. + + """ + return self._column_slices(col_expressions) + + async def partitions( + self, size: Optional[int] = None + ) -> AsyncIterator[Sequence[Row[_TP]]]: + """Iterate through sub-lists of rows of the size given. + + An async iterator is returned:: + + async def scroll_results(connection): + result = await connection.stream(select(users_table)) + + async for partition in result.partitions(100): + print("list of rows: %s" % partition) + + Refer to :meth:`_engine.Result.partitions` in the synchronous + SQLAlchemy API for a complete behavioral description. + + """ + + getter = self._manyrow_getter + + while True: + partition = await greenlet_spawn(getter, self, size) + if partition: + yield partition + else: + break + + async def fetchall(self) -> Sequence[Row[_TP]]: + """A synonym for the :meth:`_asyncio.AsyncResult.all` method. + + .. versionadded:: 2.0 + + """ + + return await greenlet_spawn(self._allrows) + + async def fetchone(self) -> Optional[Row[_TP]]: + """Fetch one row. + + When all rows are exhausted, returns None. + + This method is provided for backwards compatibility with + SQLAlchemy 1.x.x. + + To fetch the first row of a result only, use the + :meth:`_asyncio.AsyncResult.first` method. To iterate through all + rows, iterate the :class:`_asyncio.AsyncResult` object directly. + + :return: a :class:`_engine.Row` object if no filters are applied, + or ``None`` if no rows remain. + + """ + row = await greenlet_spawn(self._onerow_getter, self) + if row is _NO_ROW: + return None + else: + return row + + async def fetchmany( + self, size: Optional[int] = None + ) -> Sequence[Row[_TP]]: + """Fetch many rows. + + When all rows are exhausted, returns an empty list. + + This method is provided for backwards compatibility with + SQLAlchemy 1.x.x. + + To fetch rows in groups, use the + :meth:`._asyncio.AsyncResult.partitions` method. + + :return: a list of :class:`_engine.Row` objects. + + .. seealso:: + + :meth:`_asyncio.AsyncResult.partitions` + + """ + + return await greenlet_spawn(self._manyrow_getter, self, size) + + async def all(self) -> Sequence[Row[_TP]]: + """Return all rows in a list. + + Closes the result set after invocation. Subsequent invocations + will return an empty list. + + :return: a list of :class:`_engine.Row` objects. + + """ + + return await greenlet_spawn(self._allrows) + + def __aiter__(self) -> AsyncResult[_TP]: + return self + + async def __anext__(self) -> Row[_TP]: + row = await greenlet_spawn(self._onerow_getter, self) + if row is _NO_ROW: + raise StopAsyncIteration() + else: + return row + + async def first(self) -> Optional[Row[_TP]]: + """Fetch the first row or ``None`` if no row is present. + + Closes the result set and discards remaining rows. + + .. note:: This method returns one **row**, e.g. tuple, by default. + To return exactly one single scalar value, that is, the first + column of the first row, use the + :meth:`_asyncio.AsyncResult.scalar` method, + or combine :meth:`_asyncio.AsyncResult.scalars` and + :meth:`_asyncio.AsyncResult.first`. + + Additionally, in contrast to the behavior of the legacy ORM + :meth:`_orm.Query.first` method, **no limit is applied** to the + SQL query which was invoked to produce this + :class:`_asyncio.AsyncResult`; + for a DBAPI driver that buffers results in memory before yielding + rows, all rows will be sent to the Python process and all but + the first row will be discarded. + + .. seealso:: + + :ref:`migration_20_unify_select` + + :return: a :class:`_engine.Row` object, or None + if no rows remain. + + .. seealso:: + + :meth:`_asyncio.AsyncResult.scalar` + + :meth:`_asyncio.AsyncResult.one` + + """ + return await greenlet_spawn(self._only_one_row, False, False, False) + + async def one_or_none(self) -> Optional[Row[_TP]]: + """Return at most one result or raise an exception. + + Returns ``None`` if the result has no rows. + Raises :class:`.MultipleResultsFound` + if multiple rows are returned. + + .. versionadded:: 1.4 + + :return: The first :class:`_engine.Row` or ``None`` if no row + is available. + + :raises: :class:`.MultipleResultsFound` + + .. seealso:: + + :meth:`_asyncio.AsyncResult.first` + + :meth:`_asyncio.AsyncResult.one` + + """ + return await greenlet_spawn(self._only_one_row, True, False, False) + + @overload + async def scalar_one(self: AsyncResult[Tuple[_T]]) -> _T: ... + + @overload + async def scalar_one(self) -> Any: ... + + async def scalar_one(self) -> Any: + """Return exactly one scalar result or raise an exception. + + This is equivalent to calling :meth:`_asyncio.AsyncResult.scalars` and + then :meth:`_asyncio.AsyncResult.one`. + + .. seealso:: + + :meth:`_asyncio.AsyncResult.one` + + :meth:`_asyncio.AsyncResult.scalars` + + """ + return await greenlet_spawn(self._only_one_row, True, True, True) + + @overload + async def scalar_one_or_none( + self: AsyncResult[Tuple[_T]], + ) -> Optional[_T]: ... + + @overload + async def scalar_one_or_none(self) -> Optional[Any]: ... + + async def scalar_one_or_none(self) -> Optional[Any]: + """Return exactly one scalar result or ``None``. + + This is equivalent to calling :meth:`_asyncio.AsyncResult.scalars` and + then :meth:`_asyncio.AsyncResult.one_or_none`. + + .. seealso:: + + :meth:`_asyncio.AsyncResult.one_or_none` + + :meth:`_asyncio.AsyncResult.scalars` + + """ + return await greenlet_spawn(self._only_one_row, True, False, True) + + async def one(self) -> Row[_TP]: + """Return exactly one row or raise an exception. + + Raises :class:`.NoResultFound` if the result returns no + rows, or :class:`.MultipleResultsFound` if multiple rows + would be returned. + + .. note:: This method returns one **row**, e.g. tuple, by default. + To return exactly one single scalar value, that is, the first + column of the first row, use the + :meth:`_asyncio.AsyncResult.scalar_one` method, or combine + :meth:`_asyncio.AsyncResult.scalars` and + :meth:`_asyncio.AsyncResult.one`. + + .. versionadded:: 1.4 + + :return: The first :class:`_engine.Row`. + + :raises: :class:`.MultipleResultsFound`, :class:`.NoResultFound` + + .. seealso:: + + :meth:`_asyncio.AsyncResult.first` + + :meth:`_asyncio.AsyncResult.one_or_none` + + :meth:`_asyncio.AsyncResult.scalar_one` + + """ + return await greenlet_spawn(self._only_one_row, True, True, False) + + @overload + async def scalar(self: AsyncResult[Tuple[_T]]) -> Optional[_T]: ... + + @overload + async def scalar(self) -> Any: ... + + async def scalar(self) -> Any: + """Fetch the first column of the first row, and close the result set. + + Returns ``None`` if there are no rows to fetch. + + No validation is performed to test if additional rows remain. + + After calling this method, the object is fully closed, + e.g. the :meth:`_engine.CursorResult.close` + method will have been called. + + :return: a Python scalar value, or ``None`` if no rows remain. + + """ + return await greenlet_spawn(self._only_one_row, False, False, True) + + async def freeze(self) -> FrozenResult[_TP]: + """Return a callable object that will produce copies of this + :class:`_asyncio.AsyncResult` when invoked. + + The callable object returned is an instance of + :class:`_engine.FrozenResult`. + + This is used for result set caching. The method must be called + on the result when it has been unconsumed, and calling the method + will consume the result fully. When the :class:`_engine.FrozenResult` + is retrieved from a cache, it can be called any number of times where + it will produce a new :class:`_engine.Result` object each time + against its stored set of rows. + + .. seealso:: + + :ref:`do_orm_execute_re_executing` - example usage within the + ORM to implement a result-set cache. + + """ + + return await greenlet_spawn(FrozenResult, self) + + @overload + def scalars( + self: AsyncResult[Tuple[_T]], index: Literal[0] + ) -> AsyncScalarResult[_T]: ... + + @overload + def scalars(self: AsyncResult[Tuple[_T]]) -> AsyncScalarResult[_T]: ... + + @overload + def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]: ... + + def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]: + """Return an :class:`_asyncio.AsyncScalarResult` filtering object which + will return single elements rather than :class:`_row.Row` objects. + + Refer to :meth:`_result.Result.scalars` in the synchronous + SQLAlchemy API for a complete behavioral description. + + :param index: integer or row key indicating the column to be fetched + from each row, defaults to ``0`` indicating the first column. + + :return: a new :class:`_asyncio.AsyncScalarResult` filtering object + referring to this :class:`_asyncio.AsyncResult` object. + + """ + return AsyncScalarResult(self._real_result, index) + + def mappings(self) -> AsyncMappingResult: + """Apply a mappings filter to returned rows, returning an instance of + :class:`_asyncio.AsyncMappingResult`. + + When this filter is applied, fetching rows will return + :class:`_engine.RowMapping` objects instead of :class:`_engine.Row` + objects. + + :return: a new :class:`_asyncio.AsyncMappingResult` filtering object + referring to the underlying :class:`_result.Result` object. + + """ + + return AsyncMappingResult(self._real_result) + + +class AsyncScalarResult(AsyncCommon[_R]): + """A wrapper for a :class:`_asyncio.AsyncResult` that returns scalar values + rather than :class:`_row.Row` values. + + The :class:`_asyncio.AsyncScalarResult` object is acquired by calling the + :meth:`_asyncio.AsyncResult.scalars` method. + + Refer to the :class:`_result.ScalarResult` object in the synchronous + SQLAlchemy API for a complete behavioral description. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + _generate_rows = False + + def __init__(self, real_result: Result[Any], index: _KeyIndexType): + self._real_result = real_result + + if real_result._source_supports_scalars: + self._metadata = real_result._metadata + self._post_creational_filter = None + else: + self._metadata = real_result._metadata._reduce([index]) + self._post_creational_filter = operator.itemgetter(0) + + self._unique_filter_state = real_result._unique_filter_state + + def unique( + self, + strategy: Optional[_UniqueFilterType] = None, + ) -> Self: + """Apply unique filtering to the objects returned by this + :class:`_asyncio.AsyncScalarResult`. + + See :meth:`_asyncio.AsyncResult.unique` for usage details. + + """ + self._unique_filter_state = (set(), strategy) + return self + + async def partitions( + self, size: Optional[int] = None + ) -> AsyncIterator[Sequence[_R]]: + """Iterate through sub-lists of elements of the size given. + + Equivalent to :meth:`_asyncio.AsyncResult.partitions` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + + getter = self._manyrow_getter + + while True: + partition = await greenlet_spawn(getter, self, size) + if partition: + yield partition + else: + break + + async def fetchall(self) -> Sequence[_R]: + """A synonym for the :meth:`_asyncio.AsyncScalarResult.all` method.""" + + return await greenlet_spawn(self._allrows) + + async def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]: + """Fetch many objects. + + Equivalent to :meth:`_asyncio.AsyncResult.fetchmany` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return await greenlet_spawn(self._manyrow_getter, self, size) + + async def all(self) -> Sequence[_R]: + """Return all scalar values in a list. + + Equivalent to :meth:`_asyncio.AsyncResult.all` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return await greenlet_spawn(self._allrows) + + def __aiter__(self) -> AsyncScalarResult[_R]: + return self + + async def __anext__(self) -> _R: + row = await greenlet_spawn(self._onerow_getter, self) + if row is _NO_ROW: + raise StopAsyncIteration() + else: + return row + + async def first(self) -> Optional[_R]: + """Fetch the first object or ``None`` if no object is present. + + Equivalent to :meth:`_asyncio.AsyncResult.first` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return await greenlet_spawn(self._only_one_row, False, False, False) + + async def one_or_none(self) -> Optional[_R]: + """Return at most one object or raise an exception. + + Equivalent to :meth:`_asyncio.AsyncResult.one_or_none` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return await greenlet_spawn(self._only_one_row, True, False, False) + + async def one(self) -> _R: + """Return exactly one object or raise an exception. + + Equivalent to :meth:`_asyncio.AsyncResult.one` except that + scalar values, rather than :class:`_engine.Row` objects, + are returned. + + """ + return await greenlet_spawn(self._only_one_row, True, True, False) + + +class AsyncMappingResult(_WithKeys, AsyncCommon[RowMapping]): + """A wrapper for a :class:`_asyncio.AsyncResult` that returns dictionary + values rather than :class:`_engine.Row` values. + + The :class:`_asyncio.AsyncMappingResult` object is acquired by calling the + :meth:`_asyncio.AsyncResult.mappings` method. + + Refer to the :class:`_result.MappingResult` object in the synchronous + SQLAlchemy API for a complete behavioral description. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + _generate_rows = True + + _post_creational_filter = operator.attrgetter("_mapping") + + def __init__(self, result: Result[Any]): + self._real_result = result + self._unique_filter_state = result._unique_filter_state + self._metadata = result._metadata + if result._source_supports_scalars: + self._metadata = self._metadata._reduce([0]) + + def unique( + self, + strategy: Optional[_UniqueFilterType] = None, + ) -> Self: + """Apply unique filtering to the objects returned by this + :class:`_asyncio.AsyncMappingResult`. + + See :meth:`_asyncio.AsyncResult.unique` for usage details. + + """ + self._unique_filter_state = (set(), strategy) + return self + + def columns(self, *col_expressions: _KeyIndexType) -> Self: + r"""Establish the columns that should be returned in each row.""" + return self._column_slices(col_expressions) + + async def partitions( + self, size: Optional[int] = None + ) -> AsyncIterator[Sequence[RowMapping]]: + """Iterate through sub-lists of elements of the size given. + + Equivalent to :meth:`_asyncio.AsyncResult.partitions` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + getter = self._manyrow_getter + + while True: + partition = await greenlet_spawn(getter, self, size) + if partition: + yield partition + else: + break + + async def fetchall(self) -> Sequence[RowMapping]: + """A synonym for the :meth:`_asyncio.AsyncMappingResult.all` method.""" + + return await greenlet_spawn(self._allrows) + + async def fetchone(self) -> Optional[RowMapping]: + """Fetch one object. + + Equivalent to :meth:`_asyncio.AsyncResult.fetchone` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + row = await greenlet_spawn(self._onerow_getter, self) + if row is _NO_ROW: + return None + else: + return row + + async def fetchmany( + self, size: Optional[int] = None + ) -> Sequence[RowMapping]: + """Fetch many rows. + + Equivalent to :meth:`_asyncio.AsyncResult.fetchmany` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + return await greenlet_spawn(self._manyrow_getter, self, size) + + async def all(self) -> Sequence[RowMapping]: + """Return all rows in a list. + + Equivalent to :meth:`_asyncio.AsyncResult.all` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + + return await greenlet_spawn(self._allrows) + + def __aiter__(self) -> AsyncMappingResult: + return self + + async def __anext__(self) -> RowMapping: + row = await greenlet_spawn(self._onerow_getter, self) + if row is _NO_ROW: + raise StopAsyncIteration() + else: + return row + + async def first(self) -> Optional[RowMapping]: + """Fetch the first object or ``None`` if no object is present. + + Equivalent to :meth:`_asyncio.AsyncResult.first` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + return await greenlet_spawn(self._only_one_row, False, False, False) + + async def one_or_none(self) -> Optional[RowMapping]: + """Return at most one object or raise an exception. + + Equivalent to :meth:`_asyncio.AsyncResult.one_or_none` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + return await greenlet_spawn(self._only_one_row, True, False, False) + + async def one(self) -> RowMapping: + """Return exactly one object or raise an exception. + + Equivalent to :meth:`_asyncio.AsyncResult.one` except that + :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` + objects, are returned. + + """ + return await greenlet_spawn(self._only_one_row, True, True, False) + + +class AsyncTupleResult(AsyncCommon[_R], util.TypingOnly): + """A :class:`_asyncio.AsyncResult` that's typed as returning plain + Python tuples instead of rows. + + Since :class:`_engine.Row` acts like a tuple in every way already, + this class is a typing only class, regular :class:`_asyncio.AsyncResult` is + still used at runtime. + + """ + + __slots__ = () + + if TYPE_CHECKING: + + async def partitions( + self, size: Optional[int] = None + ) -> AsyncIterator[Sequence[_R]]: + """Iterate through sub-lists of elements of the size given. + + Equivalent to :meth:`_result.Result.partitions` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + async def fetchone(self) -> Optional[_R]: + """Fetch one tuple. + + Equivalent to :meth:`_result.Result.fetchone` except that + tuple values, rather than :class:`_engine.Row` + objects, are returned. + + """ + ... + + async def fetchall(self) -> Sequence[_R]: + """A synonym for the :meth:`_engine.ScalarResult.all` method.""" + ... + + async def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]: + """Fetch many objects. + + Equivalent to :meth:`_result.Result.fetchmany` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + async def all(self) -> Sequence[_R]: # noqa: A001 + """Return all scalar values in a list. + + Equivalent to :meth:`_result.Result.all` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + async def __aiter__(self) -> AsyncIterator[_R]: ... + + async def __anext__(self) -> _R: ... + + async def first(self) -> Optional[_R]: + """Fetch the first object or ``None`` if no object is present. + + Equivalent to :meth:`_result.Result.first` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + + """ + ... + + async def one_or_none(self) -> Optional[_R]: + """Return at most one object or raise an exception. + + Equivalent to :meth:`_result.Result.one_or_none` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + async def one(self) -> _R: + """Return exactly one object or raise an exception. + + Equivalent to :meth:`_result.Result.one` except that + tuple values, rather than :class:`_engine.Row` objects, + are returned. + + """ + ... + + @overload + async def scalar_one(self: AsyncTupleResult[Tuple[_T]]) -> _T: ... + + @overload + async def scalar_one(self) -> Any: ... + + async def scalar_one(self) -> Any: + """Return exactly one scalar result or raise an exception. + + This is equivalent to calling :meth:`_engine.Result.scalars` + and then :meth:`_engine.Result.one`. + + .. seealso:: + + :meth:`_engine.Result.one` + + :meth:`_engine.Result.scalars` + + """ + ... + + @overload + async def scalar_one_or_none( + self: AsyncTupleResult[Tuple[_T]], + ) -> Optional[_T]: ... + + @overload + async def scalar_one_or_none(self) -> Optional[Any]: ... + + async def scalar_one_or_none(self) -> Optional[Any]: + """Return exactly one or no scalar result. + + This is equivalent to calling :meth:`_engine.Result.scalars` + and then :meth:`_engine.Result.one_or_none`. + + .. seealso:: + + :meth:`_engine.Result.one_or_none` + + :meth:`_engine.Result.scalars` + + """ + ... + + @overload + async def scalar( + self: AsyncTupleResult[Tuple[_T]], + ) -> Optional[_T]: ... + + @overload + async def scalar(self) -> Any: ... + + async def scalar(self) -> Any: + """Fetch the first column of the first row, and close the result + set. + + Returns ``None`` if there are no rows to fetch. + + No validation is performed to test if additional rows remain. + + After calling this method, the object is fully closed, + e.g. the :meth:`_engine.CursorResult.close` + method will have been called. + + :return: a Python scalar value , or ``None`` if no rows remain. + + """ + ... + + +_RT = TypeVar("_RT", bound="Result[Any]") + + +async def _ensure_sync_result(result: _RT, calling_method: Any) -> _RT: + cursor_result: CursorResult[Any] + + try: + is_cursor = result._is_cursor + except AttributeError: + # legacy execute(DefaultGenerator) case + return result + + if not is_cursor: + cursor_result = getattr(result, "raw", None) # type: ignore + else: + cursor_result = result # type: ignore + if cursor_result and cursor_result.context._is_server_side: + await greenlet_spawn(cursor_result.close) + raise async_exc.AsyncMethodRequired( + "Can't use the %s.%s() method with a " + "server-side cursor. " + "Use the %s.stream() method for an async " + "streaming result set." + % ( + calling_method.__self__.__class__.__name__, + calling_method.__name__, + calling_method.__self__.__class__.__name__, + ) + ) + return result diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/scoping.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/scoping.py new file mode 100644 index 00000000..162f34ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/scoping.py @@ -0,0 +1,1614 @@ +# ext/asyncio/scoping.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .session import _AS +from .session import async_sessionmaker +from .session import AsyncSession +from ... import exc as sa_exc +from ... import util +from ...orm.session import Session +from ...util import create_proxy_methods +from ...util import ScopedRegistry +from ...util import warn +from ...util import warn_deprecated + +if TYPE_CHECKING: + from .engine import AsyncConnection + from .result import AsyncResult + from .result import AsyncScalarResult + from .session import AsyncSessionTransaction + from ...engine import Connection + from ...engine import CursorResult + from ...engine import Engine + from ...engine import Result + from ...engine import Row + from ...engine import RowMapping + from ...engine.interfaces import _CoreAnyExecuteParams + from ...engine.interfaces import CoreExecuteOptionsParameter + from ...engine.result import ScalarResult + from ...orm._typing import _IdentityKeyType + from ...orm._typing import _O + from ...orm._typing import OrmExecuteOptionsParameter + from ...orm.interfaces import ORMOption + from ...orm.session import _BindArguments + from ...orm.session import _EntityBindKey + from ...orm.session import _PKIdentityArgument + from ...orm.session import _SessionBind + from ...sql.base import Executable + from ...sql.dml import UpdateBase + from ...sql.elements import ClauseElement + from ...sql.selectable import ForUpdateParameter + from ...sql.selectable import TypedReturnsRows + +_T = TypeVar("_T", bound=Any) + + +@create_proxy_methods( + AsyncSession, + ":class:`_asyncio.AsyncSession`", + ":class:`_asyncio.scoping.async_scoped_session`", + classmethods=["close_all", "object_session", "identity_key"], + methods=[ + "__contains__", + "__iter__", + "aclose", + "add", + "add_all", + "begin", + "begin_nested", + "close", + "reset", + "commit", + "connection", + "delete", + "execute", + "expire", + "expire_all", + "expunge", + "expunge_all", + "flush", + "get_bind", + "is_modified", + "invalidate", + "merge", + "refresh", + "rollback", + "scalar", + "scalars", + "get", + "get_one", + "stream", + "stream_scalars", + ], + attributes=[ + "bind", + "dirty", + "deleted", + "new", + "identity_map", + "is_active", + "autoflush", + "no_autoflush", + "info", + ], + use_intermediate_variable=["get"], +) +class async_scoped_session(Generic[_AS]): + """Provides scoped management of :class:`.AsyncSession` objects. + + See the section :ref:`asyncio_scoped_session` for usage details. + + .. versionadded:: 1.4.19 + + + """ + + _support_async = True + + session_factory: async_sessionmaker[_AS] + """The `session_factory` provided to `__init__` is stored in this + attribute and may be accessed at a later time. This can be useful when + a new non-scoped :class:`.AsyncSession` is needed.""" + + registry: ScopedRegistry[_AS] + + def __init__( + self, + session_factory: async_sessionmaker[_AS], + scopefunc: Callable[[], Any], + ): + """Construct a new :class:`_asyncio.async_scoped_session`. + + :param session_factory: a factory to create new :class:`_asyncio.AsyncSession` + instances. This is usually, but not necessarily, an instance + of :class:`_asyncio.async_sessionmaker`. + + :param scopefunc: function which defines + the current scope. A function such as ``asyncio.current_task`` + may be useful here. + + """ # noqa: E501 + + self.session_factory = session_factory + self.registry = ScopedRegistry(session_factory, scopefunc) + + @property + def _proxied(self) -> _AS: + return self.registry() + + def __call__(self, **kw: Any) -> _AS: + r"""Return the current :class:`.AsyncSession`, creating it + using the :attr:`.scoped_session.session_factory` if not present. + + :param \**kw: Keyword arguments will be passed to the + :attr:`.scoped_session.session_factory` callable, if an existing + :class:`.AsyncSession` is not present. If the + :class:`.AsyncSession` is present + and keyword arguments have been passed, + :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. + + """ + if kw: + if self.registry.has(): + raise sa_exc.InvalidRequestError( + "Scoped session is already present; " + "no new arguments may be specified." + ) + else: + sess = self.session_factory(**kw) + self.registry.set(sess) + else: + sess = self.registry() + if not self._support_async and sess._is_asyncio: + warn_deprecated( + "Using `scoped_session` with asyncio is deprecated and " + "will raise an error in a future version. " + "Please use `async_scoped_session` instead.", + "1.4.23", + ) + return sess + + def configure(self, **kwargs: Any) -> None: + """reconfigure the :class:`.sessionmaker` used by this + :class:`.scoped_session`. + + See :meth:`.sessionmaker.configure`. + + """ + + if self.registry.has(): + warn( + "At least one scoped session is already present. " + " configure() can not affect sessions that have " + "already been created." + ) + + self.session_factory.configure(**kwargs) + + async def remove(self) -> None: + """Dispose of the current :class:`.AsyncSession`, if present. + + Different from scoped_session's remove method, this method would use + await to wait for the close method of AsyncSession. + + """ + + if self.registry.has(): + await self.registry().close() + self.registry.clear() + + # START PROXY METHODS async_scoped_session + + # code within this block is **programmatically, + # statically generated** by tools/generate_proxy_methods.py + + def __contains__(self, instance: object) -> bool: + r"""Return True if the instance is associated with this session. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + The instance may be pending or persistent within the Session for a + result of True. + + + + """ # noqa: E501 + + return self._proxied.__contains__(instance) + + def __iter__(self) -> Iterator[object]: + r"""Iterate over all pending or persistent instances within this + Session. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + + + """ # noqa: E501 + + return self._proxied.__iter__() + + async def aclose(self) -> None: + r"""A synonym for :meth:`_asyncio.AsyncSession.close`. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + The :meth:`_asyncio.AsyncSession.aclose` name is specifically + to support the Python standard library ``@contextlib.aclosing`` + context manager function. + + .. versionadded:: 2.0.20 + + + """ # noqa: E501 + + return await self._proxied.aclose() + + def add(self, instance: object, _warn: bool = True) -> None: + r"""Place an object into this :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + Objects that are in the :term:`transient` state when passed to the + :meth:`_orm.Session.add` method will move to the + :term:`pending` state, until the next flush, at which point they + will move to the :term:`persistent` state. + + Objects that are in the :term:`detached` state when passed to the + :meth:`_orm.Session.add` method will move to the :term:`persistent` + state directly. + + If the transaction used by the :class:`_orm.Session` is rolled back, + objects which were transient when they were passed to + :meth:`_orm.Session.add` will be moved back to the + :term:`transient` state, and will no longer be present within this + :class:`_orm.Session`. + + .. seealso:: + + :meth:`_orm.Session.add_all` + + :ref:`session_adding` - at :ref:`session_basics` + + + + """ # noqa: E501 + + return self._proxied.add(instance, _warn=_warn) + + def add_all(self, instances: Iterable[object]) -> None: + r"""Add the given collection of instances to this :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + See the documentation for :meth:`_orm.Session.add` for a general + behavioral description. + + .. seealso:: + + :meth:`_orm.Session.add` + + :ref:`session_adding` - at :ref:`session_basics` + + + + """ # noqa: E501 + + return self._proxied.add_all(instances) + + def begin(self) -> AsyncSessionTransaction: + r"""Return an :class:`_asyncio.AsyncSessionTransaction` object. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + The underlying :class:`_orm.Session` will perform the + "begin" action when the :class:`_asyncio.AsyncSessionTransaction` + object is entered:: + + async with async_session.begin(): + # .. ORM transaction is begun + + Note that database IO will not normally occur when the session-level + transaction is begun, as database transactions begin on an + on-demand basis. However, the begin block is async to accommodate + for a :meth:`_orm.SessionEvents.after_transaction_create` + event hook that may perform IO. + + For a general description of ORM begin, see + :meth:`_orm.Session.begin`. + + + """ # noqa: E501 + + return self._proxied.begin() + + def begin_nested(self) -> AsyncSessionTransaction: + r"""Return an :class:`_asyncio.AsyncSessionTransaction` object + which will begin a "nested" transaction, e.g. SAVEPOINT. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + Behavior is the same as that of :meth:`_asyncio.AsyncSession.begin`. + + For a general description of ORM begin nested, see + :meth:`_orm.Session.begin_nested`. + + .. seealso:: + + :ref:`aiosqlite_serializable` - special workarounds required + with the SQLite asyncio driver in order for SAVEPOINT to work + correctly. + + + """ # noqa: E501 + + return self._proxied.begin_nested() + + async def close(self) -> None: + r"""Close out the transactional resources and ORM objects used by this + :class:`_asyncio.AsyncSession`. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.close` - main documentation for + "close" + + :ref:`session_closing` - detail on the semantics of + :meth:`_asyncio.AsyncSession.close` and + :meth:`_asyncio.AsyncSession.reset`. + + + """ # noqa: E501 + + return await self._proxied.close() + + async def reset(self) -> None: + r"""Close out the transactional resources and ORM objects used by this + :class:`_orm.Session`, resetting the session to its initial state. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. versionadded:: 2.0.22 + + .. seealso:: + + :meth:`_orm.Session.reset` - main documentation for + "reset" + + :ref:`session_closing` - detail on the semantics of + :meth:`_asyncio.AsyncSession.close` and + :meth:`_asyncio.AsyncSession.reset`. + + + """ # noqa: E501 + + return await self._proxied.reset() + + async def commit(self) -> None: + r"""Commit the current transaction in progress. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.commit` - main documentation for + "commit" + + """ # noqa: E501 + + return await self._proxied.commit() + + async def connection( + self, + bind_arguments: Optional[_BindArguments] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + **kw: Any, + ) -> AsyncConnection: + r"""Return a :class:`_asyncio.AsyncConnection` object corresponding to + this :class:`.Session` object's transactional state. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + This method may also be used to establish execution options for the + database connection used by the current transaction. + + .. versionadded:: 1.4.24 Added \**kw arguments which are passed + through to the underlying :meth:`_orm.Session.connection` method. + + .. seealso:: + + :meth:`_orm.Session.connection` - main documentation for + "connection" + + + """ # noqa: E501 + + return await self._proxied.connection( + bind_arguments=bind_arguments, + execution_options=execution_options, + **kw, + ) + + async def delete(self, instance: object) -> None: + r"""Mark an instance as deleted. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + The database delete operation occurs upon ``flush()``. + + As this operation may need to cascade along unloaded relationships, + it is awaitable to allow for those queries to take place. + + .. seealso:: + + :meth:`_orm.Session.delete` - main documentation for delete + + + """ # noqa: E501 + + return await self._proxied.delete(instance) + + @overload + async def execute( + self, + statement: TypedReturnsRows[_T], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[_T]: ... + + @overload + async def execute( + self, + statement: UpdateBase, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> CursorResult[Any]: ... + + @overload + async def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[Any]: ... + + async def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Result[Any]: + r"""Execute a statement and return a buffered + :class:`_engine.Result` object. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.execute` - main documentation for execute + + + """ # noqa: E501 + + return await self._proxied.execute( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + def expire( + self, instance: object, attribute_names: Optional[Iterable[str]] = None + ) -> None: + r"""Expire the attributes on an instance. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + Marks the attributes of an instance as out of date. When an expired + attribute is next accessed, a query will be issued to the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire all objects in the :class:`.Session` simultaneously, + use :meth:`Session.expire_all`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire` only makes sense for the specific + case that a non-ORM SQL statement was emitted in the current + transaction. + + :param instance: The instance to be refreshed. + :param attribute_names: optional list of string attribute names + indicating a subset of attributes to be expired. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + + + """ # noqa: E501 + + return self._proxied.expire(instance, attribute_names=attribute_names) + + def expire_all(self) -> None: + r"""Expires all persistent instances within this Session. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + When any attributes on a persistent instance is next accessed, + a query will be issued using the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire individual objects and individual attributes + on those objects, use :meth:`Session.expire`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire_all` is not usually needed, + assuming the transaction is isolated. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + + + """ # noqa: E501 + + return self._proxied.expire_all() + + def expunge(self, instance: object) -> None: + r"""Remove the `instance` from this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This will free all internal references to the instance. Cascading + will be applied according to the *expunge* cascade rule. + + + + """ # noqa: E501 + + return self._proxied.expunge(instance) + + def expunge_all(self) -> None: + r"""Remove all object instances from this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This is equivalent to calling ``expunge(obj)`` on all objects in this + ``Session``. + + + + """ # noqa: E501 + + return self._proxied.expunge_all() + + async def flush(self, objects: Optional[Sequence[Any]] = None) -> None: + r"""Flush all the object changes to the database. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.flush` - main documentation for flush + + + """ # noqa: E501 + + return await self._proxied.flush(objects=objects) + + def get_bind( + self, + mapper: Optional[_EntityBindKey[_O]] = None, + clause: Optional[ClauseElement] = None, + bind: Optional[_SessionBind] = None, + **kw: Any, + ) -> Union[Engine, Connection]: + r"""Return a "bind" to which the synchronous proxied :class:`_orm.Session` + is bound. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + Unlike the :meth:`_orm.Session.get_bind` method, this method is + currently **not** used by this :class:`.AsyncSession` in any way + in order to resolve engines for requests. + + .. note:: + + This method proxies directly to the :meth:`_orm.Session.get_bind` + method, however is currently **not** useful as an override target, + in contrast to that of the :meth:`_orm.Session.get_bind` method. + The example below illustrates how to implement custom + :meth:`_orm.Session.get_bind` schemes that work with + :class:`.AsyncSession` and :class:`.AsyncEngine`. + + The pattern introduced at :ref:`session_custom_partitioning` + illustrates how to apply a custom bind-lookup scheme to a + :class:`_orm.Session` given a set of :class:`_engine.Engine` objects. + To apply a corresponding :meth:`_orm.Session.get_bind` implementation + for use with a :class:`.AsyncSession` and :class:`.AsyncEngine` + objects, continue to subclass :class:`_orm.Session` and apply it to + :class:`.AsyncSession` using + :paramref:`.AsyncSession.sync_session_class`. The inner method must + continue to return :class:`_engine.Engine` instances, which can be + acquired from a :class:`_asyncio.AsyncEngine` using the + :attr:`_asyncio.AsyncEngine.sync_engine` attribute:: + + # using example from "Custom Vertical Partitioning" + + + import random + + from sqlalchemy.ext.asyncio import AsyncSession + from sqlalchemy.ext.asyncio import create_async_engine + from sqlalchemy.ext.asyncio import async_sessionmaker + from sqlalchemy.orm import Session + + # construct async engines w/ async drivers + engines = { + 'leader':create_async_engine("sqlite+aiosqlite:///leader.db"), + 'other':create_async_engine("sqlite+aiosqlite:///other.db"), + 'follower1':create_async_engine("sqlite+aiosqlite:///follower1.db"), + 'follower2':create_async_engine("sqlite+aiosqlite:///follower2.db"), + } + + class RoutingSession(Session): + def get_bind(self, mapper=None, clause=None, **kw): + # within get_bind(), return sync engines + if mapper and issubclass(mapper.class_, MyOtherClass): + return engines['other'].sync_engine + elif self._flushing or isinstance(clause, (Update, Delete)): + return engines['leader'].sync_engine + else: + return engines[ + random.choice(['follower1','follower2']) + ].sync_engine + + # apply to AsyncSession using sync_session_class + AsyncSessionMaker = async_sessionmaker( + sync_session_class=RoutingSession + ) + + The :meth:`_orm.Session.get_bind` method is called in a non-asyncio, + implicitly non-blocking context in the same manner as ORM event hooks + and functions that are invoked via :meth:`.AsyncSession.run_sync`, so + routines that wish to run SQL commands inside of + :meth:`_orm.Session.get_bind` can continue to do so using + blocking-style code, which will be translated to implicitly async calls + at the point of invoking IO on the database drivers. + + + """ # noqa: E501 + + return self._proxied.get_bind( + mapper=mapper, clause=clause, bind=bind, **kw + ) + + def is_modified( + self, instance: object, include_collections: bool = True + ) -> bool: + r"""Return ``True`` if the given instance has locally + modified attributes. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This method retrieves the history for each instrumented + attribute on the instance and performs a comparison of the current + value to its previously flushed or committed value, if any. + + It is in effect a more expensive and accurate + version of checking for the given instance in the + :attr:`.Session.dirty` collection; a full test for + each attribute's net "dirty" status is performed. + + E.g.:: + + return session.is_modified(someobject) + + A few caveats to this method apply: + + * Instances present in the :attr:`.Session.dirty` collection may + report ``False`` when tested with this method. This is because + the object may have received change events via attribute mutation, + thus placing it in :attr:`.Session.dirty`, but ultimately the state + is the same as that loaded from the database, resulting in no net + change here. + * Scalar attributes may not have recorded the previously set + value when a new value was applied, if the attribute was not loaded, + or was expired, at the time the new value was received - in these + cases, the attribute is assumed to have a change, even if there is + ultimately no net change against its database value. SQLAlchemy in + most cases does not need the "old" value when a set event occurs, so + it skips the expense of a SQL call if the old value isn't present, + based on the assumption that an UPDATE of the scalar value is + usually needed, and in those few cases where it isn't, is less + expensive on average than issuing a defensive SELECT. + + The "old" value is fetched unconditionally upon set only if the + attribute container has the ``active_history`` flag set to ``True``. + This flag is set typically for primary key attributes and scalar + object references that are not a simple many-to-one. To set this + flag for any arbitrary mapped column, use the ``active_history`` + argument with :func:`.column_property`. + + :param instance: mapped instance to be tested for pending changes. + :param include_collections: Indicates if multivalued collections + should be included in the operation. Setting this to ``False`` is a + way to detect only local-column based properties (i.e. scalar columns + or many-to-one foreign keys) that would result in an UPDATE for this + instance upon flush. + + + + """ # noqa: E501 + + return self._proxied.is_modified( + instance, include_collections=include_collections + ) + + async def invalidate(self) -> None: + r"""Close this Session, using connection invalidation. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + For a complete description, see :meth:`_orm.Session.invalidate`. + + """ # noqa: E501 + + return await self._proxied.invalidate() + + async def merge( + self, + instance: _O, + *, + load: bool = True, + options: Optional[Sequence[ORMOption]] = None, + ) -> _O: + r"""Copy the state of a given instance into a corresponding instance + within this :class:`_asyncio.AsyncSession`. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.merge` - main documentation for merge + + + """ # noqa: E501 + + return await self._proxied.merge(instance, load=load, options=options) + + async def refresh( + self, + instance: object, + attribute_names: Optional[Iterable[str]] = None, + with_for_update: ForUpdateParameter = None, + ) -> None: + r"""Expire and refresh the attributes on the given instance. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + A query will be issued to the database and all attributes will be + refreshed with their current database value. + + This is the async version of the :meth:`_orm.Session.refresh` method. + See that method for a complete description of all options. + + .. seealso:: + + :meth:`_orm.Session.refresh` - main documentation for refresh + + + """ # noqa: E501 + + return await self._proxied.refresh( + instance, + attribute_names=attribute_names, + with_for_update=with_for_update, + ) + + async def rollback(self) -> None: + r"""Rollback the current transaction in progress. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.rollback` - main documentation for + "rollback" + + """ # noqa: E501 + + return await self._proxied.rollback() + + @overload + async def scalar( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Optional[_T]: ... + + @overload + async def scalar( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: ... + + async def scalar( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: + r"""Execute a statement and return a scalar result. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.scalar` - main documentation for scalar + + + """ # noqa: E501 + + return await self._proxied.scalar( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + @overload + async def scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[_T]: ... + + @overload + async def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: ... + + async def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: + r"""Execute a statement and return scalar results. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + :return: a :class:`_result.ScalarResult` object + + .. versionadded:: 1.4.24 Added :meth:`_asyncio.AsyncSession.scalars` + + .. versionadded:: 1.4.26 Added + :meth:`_asyncio.async_scoped_session.scalars` + + .. seealso:: + + :meth:`_orm.Session.scalars` - main documentation for scalars + + :meth:`_asyncio.AsyncSession.stream_scalars` - streaming version + + + """ # noqa: E501 + + return await self._proxied.scalars( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + async def get( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + ) -> Union[_O, None]: + r"""Return an instance based on the given primary key identifier, + or ``None`` if not found. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. seealso:: + + :meth:`_orm.Session.get` - main documentation for get + + + + """ # noqa: E501 + + result = await self._proxied.get( + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + ) + return result + + async def get_one( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + ) -> _O: + r"""Return an instance based on the given primary key identifier, + or raise an exception if not found. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects + no rows. + + ..versionadded: 2.0.22 + + .. seealso:: + + :meth:`_orm.Session.get_one` - main documentation for get_one + + + """ # noqa: E501 + + return await self._proxied.get_one( + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + ) + + @overload + async def stream( + self, + statement: TypedReturnsRows[_T], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncResult[_T]: ... + + @overload + async def stream( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncResult[Any]: ... + + async def stream( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncResult[Any]: + r"""Execute a statement and return a streaming + :class:`_asyncio.AsyncResult` object. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + + """ # noqa: E501 + + return await self._proxied.stream( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + @overload + async def stream_scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncScalarResult[_T]: ... + + @overload + async def stream_scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncScalarResult[Any]: ... + + async def stream_scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncScalarResult[Any]: + r"""Execute a statement and return a stream of scalar results. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + :return: an :class:`_asyncio.AsyncScalarResult` object + + .. versionadded:: 1.4.24 + + .. seealso:: + + :meth:`_orm.Session.scalars` - main documentation for scalars + + :meth:`_asyncio.AsyncSession.scalars` - non streaming version + + + """ # noqa: E501 + + return await self._proxied.stream_scalars( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + @property + def bind(self) -> Any: + r"""Proxy for the :attr:`_asyncio.AsyncSession.bind` attribute + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + """ # noqa: E501 + + return self._proxied.bind + + @bind.setter + def bind(self, attr: Any) -> None: + self._proxied.bind = attr + + @property + def dirty(self) -> Any: + r"""The set of all persistent instances considered dirty. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + E.g.:: + + some_mapped_object in session.dirty + + Instances are considered dirty when they were modified but not + deleted. + + Note that this 'dirty' calculation is 'optimistic'; most + attribute-setting or collection modification operations will + mark an instance as 'dirty' and place it in this set, even if + there is no net change to the attribute's value. At flush + time, the value of each attribute is compared to its + previously saved value, and if there's no net change, no SQL + operation will occur (this is a more expensive operation so + it's only done at flush time). + + To check if an instance has actionable net changes to its + attributes, use the :meth:`.Session.is_modified` method. + + + + """ # noqa: E501 + + return self._proxied.dirty + + @property + def deleted(self) -> Any: + r"""The set of all instances marked as 'deleted' within this ``Session`` + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + + """ # noqa: E501 + + return self._proxied.deleted + + @property + def new(self) -> Any: + r"""The set of all instances marked as 'new' within this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + + """ # noqa: E501 + + return self._proxied.new + + @property + def identity_map(self) -> Any: + r"""Proxy for the :attr:`_orm.Session.identity_map` attribute + on behalf of the :class:`_asyncio.AsyncSession` class. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + + """ # noqa: E501 + + return self._proxied.identity_map + + @identity_map.setter + def identity_map(self, attr: Any) -> None: + self._proxied.identity_map = attr + + @property + def is_active(self) -> Any: + r"""True if this :class:`.Session` not in "partial rollback" state. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + .. versionchanged:: 1.4 The :class:`_orm.Session` no longer begins + a new transaction immediately, so this attribute will be False + when the :class:`_orm.Session` is first instantiated. + + "partial rollback" state typically indicates that the flush process + of the :class:`_orm.Session` has failed, and that the + :meth:`_orm.Session.rollback` method must be emitted in order to + fully roll back the transaction. + + If this :class:`_orm.Session` is not in a transaction at all, the + :class:`_orm.Session` will autobegin when it is first used, so in this + case :attr:`_orm.Session.is_active` will return True. + + Otherwise, if this :class:`_orm.Session` is within a transaction, + and that transaction has not been rolled back internally, the + :attr:`_orm.Session.is_active` will also return True. + + .. seealso:: + + :ref:`faq_session_rollback` + + :meth:`_orm.Session.in_transaction` + + + + """ # noqa: E501 + + return self._proxied.is_active + + @property + def autoflush(self) -> Any: + r"""Proxy for the :attr:`_orm.Session.autoflush` attribute + on behalf of the :class:`_asyncio.AsyncSession` class. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + + """ # noqa: E501 + + return self._proxied.autoflush + + @autoflush.setter + def autoflush(self, attr: Any) -> None: + self._proxied.autoflush = attr + + @property + def no_autoflush(self) -> Any: + r"""Return a context manager that disables autoflush. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + e.g.:: + + with session.no_autoflush: + + some_object = SomeClass() + session.add(some_object) + # won't autoflush + some_object.related_thing = session.query(SomeRelated).first() + + Operations that proceed within the ``with:`` block + will not be subject to flushes occurring upon query + access. This is useful when initializing a series + of objects which involve existing database queries, + where the uncompleted object should not yet be flushed. + + + + """ # noqa: E501 + + return self._proxied.no_autoflush + + @property + def info(self) -> Any: + r"""A user-modifiable dictionary. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class + on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + The initial value of this dictionary can be populated using the + ``info`` argument to the :class:`.Session` constructor or + :class:`.sessionmaker` constructor or factory methods. The dictionary + here is always local to this :class:`.Session` and can be modified + independently of all other :class:`.Session` objects. + + + + """ # noqa: E501 + + return self._proxied.info + + @classmethod + async def close_all(cls) -> None: + r"""Close all :class:`_asyncio.AsyncSession` sessions. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. deprecated:: 2.0 The :meth:`.AsyncSession.close_all` method is deprecated and will be removed in a future release. Please refer to :func:`_asyncio.close_all_sessions`. + + """ # noqa: E501 + + return await AsyncSession.close_all() + + @classmethod + def object_session(cls, instance: object) -> Optional[Session]: + r"""Return the :class:`.Session` to which an object belongs. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This is an alias of :func:`.object_session`. + + + + """ # noqa: E501 + + return AsyncSession.object_session(instance) + + @classmethod + def identity_key( + cls, + class_: Optional[Type[Any]] = None, + ident: Union[Any, Tuple[Any, ...]] = None, + *, + instance: Optional[Any] = None, + row: Optional[Union[Row[Any], RowMapping]] = None, + identity_token: Optional[Any] = None, + ) -> _IdentityKeyType[Any]: + r"""Return an identity key. + + .. container:: class_bases + + Proxied for the :class:`_asyncio.AsyncSession` class on + behalf of the :class:`_asyncio.scoping.async_scoped_session` class. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This is an alias of :func:`.util.identity_key`. + + + + """ # noqa: E501 + + return AsyncSession.identity_key( + class_=class_, + ident=ident, + instance=instance, + row=row, + identity_token=identity_token, + ) + + # END PROXY METHODS async_scoped_session diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/session.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/session.py new file mode 100644 index 00000000..473a8c17 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/session.py @@ -0,0 +1,1936 @@ +# ext/asyncio/session.py +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import asyncio +from typing import Any +from typing import Awaitable +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import engine +from .base import ReversibleProxy +from .base import StartableContext +from .result import _ensure_sync_result +from .result import AsyncResult +from .result import AsyncScalarResult +from ... import util +from ...orm import close_all_sessions as _sync_close_all_sessions +from ...orm import object_session +from ...orm import Session +from ...orm import SessionTransaction +from ...orm import state as _instance_state +from ...util.concurrency import greenlet_spawn +from ...util.typing import Concatenate +from ...util.typing import ParamSpec + + +if TYPE_CHECKING: + from .engine import AsyncConnection + from .engine import AsyncEngine + from ...engine import Connection + from ...engine import CursorResult + from ...engine import Engine + from ...engine import Result + from ...engine import Row + from ...engine import RowMapping + from ...engine import ScalarResult + from ...engine.interfaces import _CoreAnyExecuteParams + from ...engine.interfaces import CoreExecuteOptionsParameter + from ...event import dispatcher + from ...orm._typing import _IdentityKeyType + from ...orm._typing import _O + from ...orm._typing import OrmExecuteOptionsParameter + from ...orm.identity import IdentityMap + from ...orm.interfaces import ORMOption + from ...orm.session import _BindArguments + from ...orm.session import _EntityBindKey + from ...orm.session import _PKIdentityArgument + from ...orm.session import _SessionBind + from ...orm.session import _SessionBindKey + from ...sql._typing import _InfoType + from ...sql.base import Executable + from ...sql.dml import UpdateBase + from ...sql.elements import ClauseElement + from ...sql.selectable import ForUpdateParameter + from ...sql.selectable import TypedReturnsRows + +_AsyncSessionBind = Union["AsyncEngine", "AsyncConnection"] + +_P = ParamSpec("_P") +_T = TypeVar("_T", bound=Any) + + +_EXECUTE_OPTIONS = util.immutabledict({"prebuffer_rows": True}) +_STREAM_OPTIONS = util.immutabledict({"stream_results": True}) + + +class AsyncAttrs: + """Mixin class which provides an awaitable accessor for all attributes. + + E.g.:: + + from __future__ import annotations + + from typing import List + + from sqlalchemy import ForeignKey + from sqlalchemy import func + from sqlalchemy.ext.asyncio import AsyncAttrs + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + from sqlalchemy.orm import relationship + + + class Base(AsyncAttrs, DeclarativeBase): + pass + + + class A(Base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[str] + bs: Mapped[List[B]] = relationship() + + + class B(Base): + __tablename__ = "b" + id: Mapped[int] = mapped_column(primary_key=True) + a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + data: Mapped[str] + + In the above example, the :class:`_asyncio.AsyncAttrs` mixin is applied to + the declarative ``Base`` class where it takes effect for all subclasses. + This mixin adds a single new attribute + :attr:`_asyncio.AsyncAttrs.awaitable_attrs` to all classes, which will + yield the value of any attribute as an awaitable. This allows attributes + which may be subject to lazy loading or deferred / unexpiry loading to be + accessed such that IO can still be emitted:: + + a1 = (await async_session.scalars(select(A).where(A.id == 5))).one() + + # use the lazy loader on ``a1.bs`` via the ``.awaitable_attrs`` + # interface, so that it may be awaited + for b1 in await a1.awaitable_attrs.bs: + print(b1) + + The :attr:`_asyncio.AsyncAttrs.awaitable_attrs` performs a call against the + attribute that is approximately equivalent to using the + :meth:`_asyncio.AsyncSession.run_sync` method, e.g.:: + + for b1 in await async_session.run_sync(lambda sess: a1.bs): + print(b1) + + .. versionadded:: 2.0.13 + + .. seealso:: + + :ref:`asyncio_orm_avoid_lazyloads` + + """ + + class _AsyncAttrGetitem: + __slots__ = "_instance" + + def __init__(self, _instance: Any): + self._instance = _instance + + def __getattr__(self, name: str) -> Awaitable[Any]: + return greenlet_spawn(getattr, self._instance, name) + + @property + def awaitable_attrs(self) -> AsyncAttrs._AsyncAttrGetitem: + """provide a namespace of all attributes on this object wrapped + as awaitables. + + e.g.:: + + + a1 = (await async_session.scalars(select(A).where(A.id == 5))).one() + + some_attribute = await a1.awaitable_attrs.some_deferred_attribute + some_collection = await a1.awaitable_attrs.some_collection + + """ # noqa: E501 + + return AsyncAttrs._AsyncAttrGetitem(self) + + +@util.create_proxy_methods( + Session, + ":class:`_orm.Session`", + ":class:`_asyncio.AsyncSession`", + classmethods=["object_session", "identity_key"], + methods=[ + "__contains__", + "__iter__", + "add", + "add_all", + "expire", + "expire_all", + "expunge", + "expunge_all", + "is_modified", + "in_transaction", + "in_nested_transaction", + ], + attributes=[ + "dirty", + "deleted", + "new", + "identity_map", + "is_active", + "autoflush", + "no_autoflush", + "info", + ], +) +class AsyncSession(ReversibleProxy[Session]): + """Asyncio version of :class:`_orm.Session`. + + The :class:`_asyncio.AsyncSession` is a proxy for a traditional + :class:`_orm.Session` instance. + + The :class:`_asyncio.AsyncSession` is **not safe for use in concurrent + tasks.**. See :ref:`session_faq_threadsafe` for background. + + .. versionadded:: 1.4 + + To use an :class:`_asyncio.AsyncSession` with custom :class:`_orm.Session` + implementations, see the + :paramref:`_asyncio.AsyncSession.sync_session_class` parameter. + + + """ + + _is_asyncio = True + + dispatch: dispatcher[Session] + + def __init__( + self, + bind: Optional[_AsyncSessionBind] = None, + *, + binds: Optional[Dict[_SessionBindKey, _AsyncSessionBind]] = None, + sync_session_class: Optional[Type[Session]] = None, + **kw: Any, + ): + r"""Construct a new :class:`_asyncio.AsyncSession`. + + All parameters other than ``sync_session_class`` are passed to the + ``sync_session_class`` callable directly to instantiate a new + :class:`_orm.Session`. Refer to :meth:`_orm.Session.__init__` for + parameter documentation. + + :param sync_session_class: + A :class:`_orm.Session` subclass or other callable which will be used + to construct the :class:`_orm.Session` which will be proxied. This + parameter may be used to provide custom :class:`_orm.Session` + subclasses. Defaults to the + :attr:`_asyncio.AsyncSession.sync_session_class` class-level + attribute. + + .. versionadded:: 1.4.24 + + """ + sync_bind = sync_binds = None + + if bind: + self.bind = bind + sync_bind = engine._get_sync_engine_or_connection(bind) + + if binds: + self.binds = binds + sync_binds = { + key: engine._get_sync_engine_or_connection(b) + for key, b in binds.items() + } + + if sync_session_class: + self.sync_session_class = sync_session_class + + self.sync_session = self._proxied = self._assign_proxied( + self.sync_session_class(bind=sync_bind, binds=sync_binds, **kw) + ) + + sync_session_class: Type[Session] = Session + """The class or callable that provides the + underlying :class:`_orm.Session` instance for a particular + :class:`_asyncio.AsyncSession`. + + At the class level, this attribute is the default value for the + :paramref:`_asyncio.AsyncSession.sync_session_class` parameter. Custom + subclasses of :class:`_asyncio.AsyncSession` can override this. + + At the instance level, this attribute indicates the current class or + callable that was used to provide the :class:`_orm.Session` instance for + this :class:`_asyncio.AsyncSession` instance. + + .. versionadded:: 1.4.24 + + """ + + sync_session: Session + """Reference to the underlying :class:`_orm.Session` this + :class:`_asyncio.AsyncSession` proxies requests towards. + + This instance can be used as an event target. + + .. seealso:: + + :ref:`asyncio_events` + + """ + + @classmethod + def _no_async_engine_events(cls) -> NoReturn: + raise NotImplementedError( + "asynchronous events are not implemented at this time. Apply " + "synchronous listeners to the AsyncSession.sync_session." + ) + + async def refresh( + self, + instance: object, + attribute_names: Optional[Iterable[str]] = None, + with_for_update: ForUpdateParameter = None, + ) -> None: + """Expire and refresh the attributes on the given instance. + + A query will be issued to the database and all attributes will be + refreshed with their current database value. + + This is the async version of the :meth:`_orm.Session.refresh` method. + See that method for a complete description of all options. + + .. seealso:: + + :meth:`_orm.Session.refresh` - main documentation for refresh + + """ + + await greenlet_spawn( + self.sync_session.refresh, + instance, + attribute_names=attribute_names, + with_for_update=with_for_update, + ) + + async def run_sync( + self, + fn: Callable[Concatenate[Session, _P], _T], + *arg: _P.args, + **kw: _P.kwargs, + ) -> _T: + """Invoke the given synchronous (i.e. not async) callable, + passing a synchronous-style :class:`_orm.Session` as the first + argument. + + This method allows traditional synchronous SQLAlchemy functions to + run within the context of an asyncio application. + + E.g.:: + + def some_business_method(session: Session, param: str) -> str: + '''A synchronous function that does not require awaiting + + :param session: a SQLAlchemy Session, used synchronously + + :return: an optional return value is supported + + ''' + session.add(MyObject(param=param)) + session.flush() + return "success" + + + async def do_something_async(async_engine: AsyncEngine) -> None: + '''an async function that uses awaiting''' + + with AsyncSession(async_engine) as async_session: + # run some_business_method() with a sync-style + # Session, proxied into an awaitable + return_code = await async_session.run_sync(some_business_method, param="param1") + print(return_code) + + This method maintains the asyncio event loop all the way through + to the database connection by running the given callable in a + specially instrumented greenlet. + + .. tip:: + + The provided callable is invoked inline within the asyncio event + loop, and will block on traditional IO calls. IO within this + callable should only call into SQLAlchemy's asyncio database + APIs which will be properly adapted to the greenlet context. + + .. seealso:: + + :class:`.AsyncAttrs` - a mixin for ORM mapped classes that provides + a similar feature more succinctly on a per-attribute basis + + :meth:`.AsyncConnection.run_sync` + + :ref:`session_run_sync` + """ # noqa: E501 + + return await greenlet_spawn( + fn, self.sync_session, *arg, _require_await=False, **kw + ) + + @overload + async def execute( + self, + statement: TypedReturnsRows[_T], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[_T]: ... + + @overload + async def execute( + self, + statement: UpdateBase, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> CursorResult[Any]: ... + + @overload + async def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[Any]: ... + + async def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Result[Any]: + """Execute a statement and return a buffered + :class:`_engine.Result` object. + + .. seealso:: + + :meth:`_orm.Session.execute` - main documentation for execute + + """ + + if execution_options: + execution_options = util.immutabledict(execution_options).union( + _EXECUTE_OPTIONS + ) + else: + execution_options = _EXECUTE_OPTIONS + + result = await greenlet_spawn( + self.sync_session.execute, + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + return await _ensure_sync_result(result, self.execute) + + @overload + async def scalar( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Optional[_T]: ... + + @overload + async def scalar( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: ... + + async def scalar( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: + """Execute a statement and return a scalar result. + + .. seealso:: + + :meth:`_orm.Session.scalar` - main documentation for scalar + + """ + + if execution_options: + execution_options = util.immutabledict(execution_options).union( + _EXECUTE_OPTIONS + ) + else: + execution_options = _EXECUTE_OPTIONS + + return await greenlet_spawn( + self.sync_session.scalar, + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + @overload + async def scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[_T]: ... + + @overload + async def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: ... + + async def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: + """Execute a statement and return scalar results. + + :return: a :class:`_result.ScalarResult` object + + .. versionadded:: 1.4.24 Added :meth:`_asyncio.AsyncSession.scalars` + + .. versionadded:: 1.4.26 Added + :meth:`_asyncio.async_scoped_session.scalars` + + .. seealso:: + + :meth:`_orm.Session.scalars` - main documentation for scalars + + :meth:`_asyncio.AsyncSession.stream_scalars` - streaming version + + """ + + result = await self.execute( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + return result.scalars() + + async def get( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + ) -> Union[_O, None]: + """Return an instance based on the given primary key identifier, + or ``None`` if not found. + + .. seealso:: + + :meth:`_orm.Session.get` - main documentation for get + + + """ + + return await greenlet_spawn( + cast("Callable[..., _O]", self.sync_session.get), + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + ) + + async def get_one( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + ) -> _O: + """Return an instance based on the given primary key identifier, + or raise an exception if not found. + + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects + no rows. + + ..versionadded: 2.0.22 + + .. seealso:: + + :meth:`_orm.Session.get_one` - main documentation for get_one + + """ + + return await greenlet_spawn( + cast("Callable[..., _O]", self.sync_session.get_one), + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + ) + + @overload + async def stream( + self, + statement: TypedReturnsRows[_T], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncResult[_T]: ... + + @overload + async def stream( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncResult[Any]: ... + + async def stream( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncResult[Any]: + """Execute a statement and return a streaming + :class:`_asyncio.AsyncResult` object. + + """ + + if execution_options: + execution_options = util.immutabledict(execution_options).union( + _STREAM_OPTIONS + ) + else: + execution_options = _STREAM_OPTIONS + + result = await greenlet_spawn( + self.sync_session.execute, + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + return AsyncResult(result) + + @overload + async def stream_scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncScalarResult[_T]: ... + + @overload + async def stream_scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncScalarResult[Any]: ... + + async def stream_scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> AsyncScalarResult[Any]: + """Execute a statement and return a stream of scalar results. + + :return: an :class:`_asyncio.AsyncScalarResult` object + + .. versionadded:: 1.4.24 + + .. seealso:: + + :meth:`_orm.Session.scalars` - main documentation for scalars + + :meth:`_asyncio.AsyncSession.scalars` - non streaming version + + """ + + result = await self.stream( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + return result.scalars() + + async def delete(self, instance: object) -> None: + """Mark an instance as deleted. + + The database delete operation occurs upon ``flush()``. + + As this operation may need to cascade along unloaded relationships, + it is awaitable to allow for those queries to take place. + + .. seealso:: + + :meth:`_orm.Session.delete` - main documentation for delete + + """ + await greenlet_spawn(self.sync_session.delete, instance) + + async def merge( + self, + instance: _O, + *, + load: bool = True, + options: Optional[Sequence[ORMOption]] = None, + ) -> _O: + """Copy the state of a given instance into a corresponding instance + within this :class:`_asyncio.AsyncSession`. + + .. seealso:: + + :meth:`_orm.Session.merge` - main documentation for merge + + """ + return await greenlet_spawn( + self.sync_session.merge, instance, load=load, options=options + ) + + async def flush(self, objects: Optional[Sequence[Any]] = None) -> None: + """Flush all the object changes to the database. + + .. seealso:: + + :meth:`_orm.Session.flush` - main documentation for flush + + """ + await greenlet_spawn(self.sync_session.flush, objects=objects) + + def get_transaction(self) -> Optional[AsyncSessionTransaction]: + """Return the current root transaction in progress, if any. + + :return: an :class:`_asyncio.AsyncSessionTransaction` object, or + ``None``. + + .. versionadded:: 1.4.18 + + """ + trans = self.sync_session.get_transaction() + if trans is not None: + return AsyncSessionTransaction._retrieve_proxy_for_target(trans) + else: + return None + + def get_nested_transaction(self) -> Optional[AsyncSessionTransaction]: + """Return the current nested transaction in progress, if any. + + :return: an :class:`_asyncio.AsyncSessionTransaction` object, or + ``None``. + + .. versionadded:: 1.4.18 + + """ + + trans = self.sync_session.get_nested_transaction() + if trans is not None: + return AsyncSessionTransaction._retrieve_proxy_for_target(trans) + else: + return None + + def get_bind( + self, + mapper: Optional[_EntityBindKey[_O]] = None, + clause: Optional[ClauseElement] = None, + bind: Optional[_SessionBind] = None, + **kw: Any, + ) -> Union[Engine, Connection]: + """Return a "bind" to which the synchronous proxied :class:`_orm.Session` + is bound. + + Unlike the :meth:`_orm.Session.get_bind` method, this method is + currently **not** used by this :class:`.AsyncSession` in any way + in order to resolve engines for requests. + + .. note:: + + This method proxies directly to the :meth:`_orm.Session.get_bind` + method, however is currently **not** useful as an override target, + in contrast to that of the :meth:`_orm.Session.get_bind` method. + The example below illustrates how to implement custom + :meth:`_orm.Session.get_bind` schemes that work with + :class:`.AsyncSession` and :class:`.AsyncEngine`. + + The pattern introduced at :ref:`session_custom_partitioning` + illustrates how to apply a custom bind-lookup scheme to a + :class:`_orm.Session` given a set of :class:`_engine.Engine` objects. + To apply a corresponding :meth:`_orm.Session.get_bind` implementation + for use with a :class:`.AsyncSession` and :class:`.AsyncEngine` + objects, continue to subclass :class:`_orm.Session` and apply it to + :class:`.AsyncSession` using + :paramref:`.AsyncSession.sync_session_class`. The inner method must + continue to return :class:`_engine.Engine` instances, which can be + acquired from a :class:`_asyncio.AsyncEngine` using the + :attr:`_asyncio.AsyncEngine.sync_engine` attribute:: + + # using example from "Custom Vertical Partitioning" + + + import random + + from sqlalchemy.ext.asyncio import AsyncSession + from sqlalchemy.ext.asyncio import create_async_engine + from sqlalchemy.ext.asyncio import async_sessionmaker + from sqlalchemy.orm import Session + + # construct async engines w/ async drivers + engines = { + 'leader':create_async_engine("sqlite+aiosqlite:///leader.db"), + 'other':create_async_engine("sqlite+aiosqlite:///other.db"), + 'follower1':create_async_engine("sqlite+aiosqlite:///follower1.db"), + 'follower2':create_async_engine("sqlite+aiosqlite:///follower2.db"), + } + + class RoutingSession(Session): + def get_bind(self, mapper=None, clause=None, **kw): + # within get_bind(), return sync engines + if mapper and issubclass(mapper.class_, MyOtherClass): + return engines['other'].sync_engine + elif self._flushing or isinstance(clause, (Update, Delete)): + return engines['leader'].sync_engine + else: + return engines[ + random.choice(['follower1','follower2']) + ].sync_engine + + # apply to AsyncSession using sync_session_class + AsyncSessionMaker = async_sessionmaker( + sync_session_class=RoutingSession + ) + + The :meth:`_orm.Session.get_bind` method is called in a non-asyncio, + implicitly non-blocking context in the same manner as ORM event hooks + and functions that are invoked via :meth:`.AsyncSession.run_sync`, so + routines that wish to run SQL commands inside of + :meth:`_orm.Session.get_bind` can continue to do so using + blocking-style code, which will be translated to implicitly async calls + at the point of invoking IO on the database drivers. + + """ # noqa: E501 + + return self.sync_session.get_bind( + mapper=mapper, clause=clause, bind=bind, **kw + ) + + async def connection( + self, + bind_arguments: Optional[_BindArguments] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + **kw: Any, + ) -> AsyncConnection: + r"""Return a :class:`_asyncio.AsyncConnection` object corresponding to + this :class:`.Session` object's transactional state. + + This method may also be used to establish execution options for the + database connection used by the current transaction. + + .. versionadded:: 1.4.24 Added \**kw arguments which are passed + through to the underlying :meth:`_orm.Session.connection` method. + + .. seealso:: + + :meth:`_orm.Session.connection` - main documentation for + "connection" + + """ + + sync_connection = await greenlet_spawn( + self.sync_session.connection, + bind_arguments=bind_arguments, + execution_options=execution_options, + **kw, + ) + return engine.AsyncConnection._retrieve_proxy_for_target( + sync_connection + ) + + def begin(self) -> AsyncSessionTransaction: + """Return an :class:`_asyncio.AsyncSessionTransaction` object. + + The underlying :class:`_orm.Session` will perform the + "begin" action when the :class:`_asyncio.AsyncSessionTransaction` + object is entered:: + + async with async_session.begin(): + # .. ORM transaction is begun + + Note that database IO will not normally occur when the session-level + transaction is begun, as database transactions begin on an + on-demand basis. However, the begin block is async to accommodate + for a :meth:`_orm.SessionEvents.after_transaction_create` + event hook that may perform IO. + + For a general description of ORM begin, see + :meth:`_orm.Session.begin`. + + """ + + return AsyncSessionTransaction(self) + + def begin_nested(self) -> AsyncSessionTransaction: + """Return an :class:`_asyncio.AsyncSessionTransaction` object + which will begin a "nested" transaction, e.g. SAVEPOINT. + + Behavior is the same as that of :meth:`_asyncio.AsyncSession.begin`. + + For a general description of ORM begin nested, see + :meth:`_orm.Session.begin_nested`. + + .. seealso:: + + :ref:`aiosqlite_serializable` - special workarounds required + with the SQLite asyncio driver in order for SAVEPOINT to work + correctly. + + """ + + return AsyncSessionTransaction(self, nested=True) + + async def rollback(self) -> None: + """Rollback the current transaction in progress. + + .. seealso:: + + :meth:`_orm.Session.rollback` - main documentation for + "rollback" + """ + await greenlet_spawn(self.sync_session.rollback) + + async def commit(self) -> None: + """Commit the current transaction in progress. + + .. seealso:: + + :meth:`_orm.Session.commit` - main documentation for + "commit" + """ + await greenlet_spawn(self.sync_session.commit) + + async def close(self) -> None: + """Close out the transactional resources and ORM objects used by this + :class:`_asyncio.AsyncSession`. + + .. seealso:: + + :meth:`_orm.Session.close` - main documentation for + "close" + + :ref:`session_closing` - detail on the semantics of + :meth:`_asyncio.AsyncSession.close` and + :meth:`_asyncio.AsyncSession.reset`. + + """ + await greenlet_spawn(self.sync_session.close) + + async def reset(self) -> None: + """Close out the transactional resources and ORM objects used by this + :class:`_orm.Session`, resetting the session to its initial state. + + .. versionadded:: 2.0.22 + + .. seealso:: + + :meth:`_orm.Session.reset` - main documentation for + "reset" + + :ref:`session_closing` - detail on the semantics of + :meth:`_asyncio.AsyncSession.close` and + :meth:`_asyncio.AsyncSession.reset`. + + """ + await greenlet_spawn(self.sync_session.reset) + + async def aclose(self) -> None: + """A synonym for :meth:`_asyncio.AsyncSession.close`. + + The :meth:`_asyncio.AsyncSession.aclose` name is specifically + to support the Python standard library ``@contextlib.aclosing`` + context manager function. + + .. versionadded:: 2.0.20 + + """ + await self.close() + + async def invalidate(self) -> None: + """Close this Session, using connection invalidation. + + For a complete description, see :meth:`_orm.Session.invalidate`. + """ + await greenlet_spawn(self.sync_session.invalidate) + + @classmethod + @util.deprecated( + "2.0", + "The :meth:`.AsyncSession.close_all` method is deprecated and will be " + "removed in a future release. Please refer to " + ":func:`_asyncio.close_all_sessions`.", + ) + async def close_all(cls) -> None: + """Close all :class:`_asyncio.AsyncSession` sessions.""" + await close_all_sessions() + + async def __aenter__(self: _AS) -> _AS: + return self + + async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None: + task = asyncio.create_task(self.close()) + await asyncio.shield(task) + + def _maker_context_manager(self: _AS) -> _AsyncSessionContextManager[_AS]: + return _AsyncSessionContextManager(self) + + # START PROXY METHODS AsyncSession + + # code within this block is **programmatically, + # statically generated** by tools/generate_proxy_methods.py + + def __contains__(self, instance: object) -> bool: + r"""Return True if the instance is associated with this session. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + The instance may be pending or persistent within the Session for a + result of True. + + + """ # noqa: E501 + + return self._proxied.__contains__(instance) + + def __iter__(self) -> Iterator[object]: + r"""Iterate over all pending or persistent instances within this + Session. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + + """ # noqa: E501 + + return self._proxied.__iter__() + + def add(self, instance: object, _warn: bool = True) -> None: + r"""Place an object into this :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + Objects that are in the :term:`transient` state when passed to the + :meth:`_orm.Session.add` method will move to the + :term:`pending` state, until the next flush, at which point they + will move to the :term:`persistent` state. + + Objects that are in the :term:`detached` state when passed to the + :meth:`_orm.Session.add` method will move to the :term:`persistent` + state directly. + + If the transaction used by the :class:`_orm.Session` is rolled back, + objects which were transient when they were passed to + :meth:`_orm.Session.add` will be moved back to the + :term:`transient` state, and will no longer be present within this + :class:`_orm.Session`. + + .. seealso:: + + :meth:`_orm.Session.add_all` + + :ref:`session_adding` - at :ref:`session_basics` + + + """ # noqa: E501 + + return self._proxied.add(instance, _warn=_warn) + + def add_all(self, instances: Iterable[object]) -> None: + r"""Add the given collection of instances to this :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + See the documentation for :meth:`_orm.Session.add` for a general + behavioral description. + + .. seealso:: + + :meth:`_orm.Session.add` + + :ref:`session_adding` - at :ref:`session_basics` + + + """ # noqa: E501 + + return self._proxied.add_all(instances) + + def expire( + self, instance: object, attribute_names: Optional[Iterable[str]] = None + ) -> None: + r"""Expire the attributes on an instance. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + Marks the attributes of an instance as out of date. When an expired + attribute is next accessed, a query will be issued to the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire all objects in the :class:`.Session` simultaneously, + use :meth:`Session.expire_all`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire` only makes sense for the specific + case that a non-ORM SQL statement was emitted in the current + transaction. + + :param instance: The instance to be refreshed. + :param attribute_names: optional list of string attribute names + indicating a subset of attributes to be expired. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + + """ # noqa: E501 + + return self._proxied.expire(instance, attribute_names=attribute_names) + + def expire_all(self) -> None: + r"""Expires all persistent instances within this Session. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + When any attributes on a persistent instance is next accessed, + a query will be issued using the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire individual objects and individual attributes + on those objects, use :meth:`Session.expire`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire_all` is not usually needed, + assuming the transaction is isolated. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + + """ # noqa: E501 + + return self._proxied.expire_all() + + def expunge(self, instance: object) -> None: + r"""Remove the `instance` from this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This will free all internal references to the instance. Cascading + will be applied according to the *expunge* cascade rule. + + + """ # noqa: E501 + + return self._proxied.expunge(instance) + + def expunge_all(self) -> None: + r"""Remove all object instances from this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This is equivalent to calling ``expunge(obj)`` on all objects in this + ``Session``. + + + """ # noqa: E501 + + return self._proxied.expunge_all() + + def is_modified( + self, instance: object, include_collections: bool = True + ) -> bool: + r"""Return ``True`` if the given instance has locally + modified attributes. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This method retrieves the history for each instrumented + attribute on the instance and performs a comparison of the current + value to its previously flushed or committed value, if any. + + It is in effect a more expensive and accurate + version of checking for the given instance in the + :attr:`.Session.dirty` collection; a full test for + each attribute's net "dirty" status is performed. + + E.g.:: + + return session.is_modified(someobject) + + A few caveats to this method apply: + + * Instances present in the :attr:`.Session.dirty` collection may + report ``False`` when tested with this method. This is because + the object may have received change events via attribute mutation, + thus placing it in :attr:`.Session.dirty`, but ultimately the state + is the same as that loaded from the database, resulting in no net + change here. + * Scalar attributes may not have recorded the previously set + value when a new value was applied, if the attribute was not loaded, + or was expired, at the time the new value was received - in these + cases, the attribute is assumed to have a change, even if there is + ultimately no net change against its database value. SQLAlchemy in + most cases does not need the "old" value when a set event occurs, so + it skips the expense of a SQL call if the old value isn't present, + based on the assumption that an UPDATE of the scalar value is + usually needed, and in those few cases where it isn't, is less + expensive on average than issuing a defensive SELECT. + + The "old" value is fetched unconditionally upon set only if the + attribute container has the ``active_history`` flag set to ``True``. + This flag is set typically for primary key attributes and scalar + object references that are not a simple many-to-one. To set this + flag for any arbitrary mapped column, use the ``active_history`` + argument with :func:`.column_property`. + + :param instance: mapped instance to be tested for pending changes. + :param include_collections: Indicates if multivalued collections + should be included in the operation. Setting this to ``False`` is a + way to detect only local-column based properties (i.e. scalar columns + or many-to-one foreign keys) that would result in an UPDATE for this + instance upon flush. + + + """ # noqa: E501 + + return self._proxied.is_modified( + instance, include_collections=include_collections + ) + + def in_transaction(self) -> bool: + r"""Return True if this :class:`_orm.Session` has begun a transaction. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_orm.Session.is_active` + + + + """ # noqa: E501 + + return self._proxied.in_transaction() + + def in_nested_transaction(self) -> bool: + r"""Return True if this :class:`_orm.Session` has begun a nested + transaction, e.g. SAVEPOINT. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + .. versionadded:: 1.4 + + + """ # noqa: E501 + + return self._proxied.in_nested_transaction() + + @property + def dirty(self) -> Any: + r"""The set of all persistent instances considered dirty. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + E.g.:: + + some_mapped_object in session.dirty + + Instances are considered dirty when they were modified but not + deleted. + + Note that this 'dirty' calculation is 'optimistic'; most + attribute-setting or collection modification operations will + mark an instance as 'dirty' and place it in this set, even if + there is no net change to the attribute's value. At flush + time, the value of each attribute is compared to its + previously saved value, and if there's no net change, no SQL + operation will occur (this is a more expensive operation so + it's only done at flush time). + + To check if an instance has actionable net changes to its + attributes, use the :meth:`.Session.is_modified` method. + + + """ # noqa: E501 + + return self._proxied.dirty + + @property + def deleted(self) -> Any: + r"""The set of all instances marked as 'deleted' within this ``Session`` + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + """ # noqa: E501 + + return self._proxied.deleted + + @property + def new(self) -> Any: + r"""The set of all instances marked as 'new' within this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + """ # noqa: E501 + + return self._proxied.new + + @property + def identity_map(self) -> IdentityMap: + r"""Proxy for the :attr:`_orm.Session.identity_map` attribute + on behalf of the :class:`_asyncio.AsyncSession` class. + + """ # noqa: E501 + + return self._proxied.identity_map + + @identity_map.setter + def identity_map(self, attr: IdentityMap) -> None: + self._proxied.identity_map = attr + + @property + def is_active(self) -> Any: + r"""True if this :class:`.Session` not in "partial rollback" state. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + .. versionchanged:: 1.4 The :class:`_orm.Session` no longer begins + a new transaction immediately, so this attribute will be False + when the :class:`_orm.Session` is first instantiated. + + "partial rollback" state typically indicates that the flush process + of the :class:`_orm.Session` has failed, and that the + :meth:`_orm.Session.rollback` method must be emitted in order to + fully roll back the transaction. + + If this :class:`_orm.Session` is not in a transaction at all, the + :class:`_orm.Session` will autobegin when it is first used, so in this + case :attr:`_orm.Session.is_active` will return True. + + Otherwise, if this :class:`_orm.Session` is within a transaction, + and that transaction has not been rolled back internally, the + :attr:`_orm.Session.is_active` will also return True. + + .. seealso:: + + :ref:`faq_session_rollback` + + :meth:`_orm.Session.in_transaction` + + + """ # noqa: E501 + + return self._proxied.is_active + + @property + def autoflush(self) -> bool: + r"""Proxy for the :attr:`_orm.Session.autoflush` attribute + on behalf of the :class:`_asyncio.AsyncSession` class. + + """ # noqa: E501 + + return self._proxied.autoflush + + @autoflush.setter + def autoflush(self, attr: bool) -> None: + self._proxied.autoflush = attr + + @property + def no_autoflush(self) -> Any: + r"""Return a context manager that disables autoflush. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + e.g.:: + + with session.no_autoflush: + + some_object = SomeClass() + session.add(some_object) + # won't autoflush + some_object.related_thing = session.query(SomeRelated).first() + + Operations that proceed within the ``with:`` block + will not be subject to flushes occurring upon query + access. This is useful when initializing a series + of objects which involve existing database queries, + where the uncompleted object should not yet be flushed. + + + """ # noqa: E501 + + return self._proxied.no_autoflush + + @property + def info(self) -> Any: + r"""A user-modifiable dictionary. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_asyncio.AsyncSession` class. + + The initial value of this dictionary can be populated using the + ``info`` argument to the :class:`.Session` constructor or + :class:`.sessionmaker` constructor or factory methods. The dictionary + here is always local to this :class:`.Session` and can be modified + independently of all other :class:`.Session` objects. + + + """ # noqa: E501 + + return self._proxied.info + + @classmethod + def object_session(cls, instance: object) -> Optional[Session]: + r"""Return the :class:`.Session` to which an object belongs. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This is an alias of :func:`.object_session`. + + + """ # noqa: E501 + + return Session.object_session(instance) + + @classmethod + def identity_key( + cls, + class_: Optional[Type[Any]] = None, + ident: Union[Any, Tuple[Any, ...]] = None, + *, + instance: Optional[Any] = None, + row: Optional[Union[Row[Any], RowMapping]] = None, + identity_token: Optional[Any] = None, + ) -> _IdentityKeyType[Any]: + r"""Return an identity key. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_asyncio.AsyncSession` class. + + This is an alias of :func:`.util.identity_key`. + + + """ # noqa: E501 + + return Session.identity_key( + class_=class_, + ident=ident, + instance=instance, + row=row, + identity_token=identity_token, + ) + + # END PROXY METHODS AsyncSession + + +_AS = TypeVar("_AS", bound="AsyncSession") + + +class async_sessionmaker(Generic[_AS]): + """A configurable :class:`.AsyncSession` factory. + + The :class:`.async_sessionmaker` factory works in the same way as the + :class:`.sessionmaker` factory, to generate new :class:`.AsyncSession` + objects when called, creating them given + the configurational arguments established here. + + e.g.:: + + from sqlalchemy.ext.asyncio import create_async_engine + from sqlalchemy.ext.asyncio import AsyncSession + from sqlalchemy.ext.asyncio import async_sessionmaker + + async def run_some_sql(async_session: async_sessionmaker[AsyncSession]) -> None: + async with async_session() as session: + session.add(SomeObject(data="object")) + session.add(SomeOtherObject(name="other object")) + await session.commit() + + async def main() -> None: + # an AsyncEngine, which the AsyncSession will use for connection + # resources + engine = create_async_engine('postgresql+asyncpg://scott:tiger@localhost/') + + # create a reusable factory for new AsyncSession instances + async_session = async_sessionmaker(engine) + + await run_some_sql(async_session) + + await engine.dispose() + + The :class:`.async_sessionmaker` is useful so that different parts + of a program can create new :class:`.AsyncSession` objects with a + fixed configuration established up front. Note that :class:`.AsyncSession` + objects may also be instantiated directly when not using + :class:`.async_sessionmaker`. + + .. versionadded:: 2.0 :class:`.async_sessionmaker` provides a + :class:`.sessionmaker` class that's dedicated to the + :class:`.AsyncSession` object, including pep-484 typing support. + + .. seealso:: + + :ref:`asyncio_orm` - shows example use + + :class:`.sessionmaker` - general overview of the + :class:`.sessionmaker` architecture + + + :ref:`session_getting` - introductory text on creating + sessions using :class:`.sessionmaker`. + + """ # noqa E501 + + class_: Type[_AS] + + @overload + def __init__( + self, + bind: Optional[_AsyncSessionBind] = ..., + *, + class_: Type[_AS], + autoflush: bool = ..., + expire_on_commit: bool = ..., + info: Optional[_InfoType] = ..., + **kw: Any, + ): ... + + @overload + def __init__( + self: "async_sessionmaker[AsyncSession]", + bind: Optional[_AsyncSessionBind] = ..., + *, + autoflush: bool = ..., + expire_on_commit: bool = ..., + info: Optional[_InfoType] = ..., + **kw: Any, + ): ... + + def __init__( + self, + bind: Optional[_AsyncSessionBind] = None, + *, + class_: Type[_AS] = AsyncSession, # type: ignore + autoflush: bool = True, + expire_on_commit: bool = True, + info: Optional[_InfoType] = None, + **kw: Any, + ): + r"""Construct a new :class:`.async_sessionmaker`. + + All arguments here except for ``class_`` correspond to arguments + accepted by :class:`.Session` directly. See the + :meth:`.AsyncSession.__init__` docstring for more details on + parameters. + + + """ + kw["bind"] = bind + kw["autoflush"] = autoflush + kw["expire_on_commit"] = expire_on_commit + if info is not None: + kw["info"] = info + self.kw = kw + self.class_ = class_ + + def begin(self) -> _AsyncSessionContextManager[_AS]: + """Produce a context manager that both provides a new + :class:`_orm.AsyncSession` as well as a transaction that commits. + + + e.g.:: + + async def main(): + Session = async_sessionmaker(some_engine) + + async with Session.begin() as session: + session.add(some_object) + + # commits transaction, closes session + + + """ + + session = self() + return session._maker_context_manager() + + def __call__(self, **local_kw: Any) -> _AS: + """Produce a new :class:`.AsyncSession` object using the configuration + established in this :class:`.async_sessionmaker`. + + In Python, the ``__call__`` method is invoked on an object when + it is "called" in the same way as a function:: + + AsyncSession = async_sessionmaker(async_engine, expire_on_commit=False) + session = AsyncSession() # invokes sessionmaker.__call__() + + """ # noqa E501 + for k, v in self.kw.items(): + if k == "info" and "info" in local_kw: + d = v.copy() + d.update(local_kw["info"]) + local_kw["info"] = d + else: + local_kw.setdefault(k, v) + return self.class_(**local_kw) + + def configure(self, **new_kw: Any) -> None: + """(Re)configure the arguments for this async_sessionmaker. + + e.g.:: + + AsyncSession = async_sessionmaker(some_engine) + + AsyncSession.configure(bind=create_async_engine('sqlite+aiosqlite://')) + """ # noqa E501 + + self.kw.update(new_kw) + + def __repr__(self) -> str: + return "%s(class_=%r, %s)" % ( + self.__class__.__name__, + self.class_.__name__, + ", ".join("%s=%r" % (k, v) for k, v in self.kw.items()), + ) + + +class _AsyncSessionContextManager(Generic[_AS]): + __slots__ = ("async_session", "trans") + + async_session: _AS + trans: AsyncSessionTransaction + + def __init__(self, async_session: _AS): + self.async_session = async_session + + async def __aenter__(self) -> _AS: + self.trans = self.async_session.begin() + await self.trans.__aenter__() + return self.async_session + + async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None: + async def go() -> None: + await self.trans.__aexit__(type_, value, traceback) + await self.async_session.__aexit__(type_, value, traceback) + + task = asyncio.create_task(go()) + await asyncio.shield(task) + + +class AsyncSessionTransaction( + ReversibleProxy[SessionTransaction], + StartableContext["AsyncSessionTransaction"], +): + """A wrapper for the ORM :class:`_orm.SessionTransaction` object. + + This object is provided so that a transaction-holding object + for the :meth:`_asyncio.AsyncSession.begin` may be returned. + + The object supports both explicit calls to + :meth:`_asyncio.AsyncSessionTransaction.commit` and + :meth:`_asyncio.AsyncSessionTransaction.rollback`, as well as use as an + async context manager. + + + .. versionadded:: 1.4 + + """ + + __slots__ = ("session", "sync_transaction", "nested") + + session: AsyncSession + sync_transaction: Optional[SessionTransaction] + + def __init__(self, session: AsyncSession, nested: bool = False): + self.session = session + self.nested = nested + self.sync_transaction = None + + @property + def is_active(self) -> bool: + return ( + self._sync_transaction() is not None + and self._sync_transaction().is_active + ) + + def _sync_transaction(self) -> SessionTransaction: + if not self.sync_transaction: + self._raise_for_not_started() + return self.sync_transaction + + async def rollback(self) -> None: + """Roll back this :class:`_asyncio.AsyncTransaction`.""" + await greenlet_spawn(self._sync_transaction().rollback) + + async def commit(self) -> None: + """Commit this :class:`_asyncio.AsyncTransaction`.""" + + await greenlet_spawn(self._sync_transaction().commit) + + async def start( + self, is_ctxmanager: bool = False + ) -> AsyncSessionTransaction: + self.sync_transaction = self._assign_proxied( + await greenlet_spawn( + self.session.sync_session.begin_nested # type: ignore + if self.nested + else self.session.sync_session.begin + ) + ) + if is_ctxmanager: + self.sync_transaction.__enter__() + return self + + async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None: + await greenlet_spawn( + self._sync_transaction().__exit__, type_, value, traceback + ) + + +def async_object_session(instance: object) -> Optional[AsyncSession]: + """Return the :class:`_asyncio.AsyncSession` to which the given instance + belongs. + + This function makes use of the sync-API function + :class:`_orm.object_session` to retrieve the :class:`_orm.Session` which + refers to the given instance, and from there links it to the original + :class:`_asyncio.AsyncSession`. + + If the :class:`_asyncio.AsyncSession` has been garbage collected, the + return value is ``None``. + + This functionality is also available from the + :attr:`_orm.InstanceState.async_session` accessor. + + :param instance: an ORM mapped instance + :return: an :class:`_asyncio.AsyncSession` object, or ``None``. + + .. versionadded:: 1.4.18 + + """ + + session = object_session(instance) + if session is not None: + return async_session(session) + else: + return None + + +def async_session(session: Session) -> Optional[AsyncSession]: + """Return the :class:`_asyncio.AsyncSession` which is proxying the given + :class:`_orm.Session` object, if any. + + :param session: a :class:`_orm.Session` instance. + :return: a :class:`_asyncio.AsyncSession` instance, or ``None``. + + .. versionadded:: 1.4.18 + + """ + return AsyncSession._retrieve_proxy_for_target(session, regenerate=False) + + +async def close_all_sessions() -> None: + """Close all :class:`_asyncio.AsyncSession` sessions. + + .. versionadded:: 2.0.23 + + .. seealso:: + + :func:`.session.close_all_sessions` + + """ + await greenlet_spawn(_sync_close_all_sessions) + + +_instance_state._async_provider = async_session # type: ignore diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/automap.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/automap.py new file mode 100644 index 00000000..70b0fe62 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/automap.py @@ -0,0 +1,1691 @@ +# ext/automap.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +r"""Define an extension to the :mod:`sqlalchemy.ext.declarative` system +which automatically generates mapped classes and relationships from a database +schema, typically though not necessarily one which is reflected. + +It is hoped that the :class:`.AutomapBase` system provides a quick +and modernized solution to the problem that the very famous +`SQLSoup `_ +also tries to solve, that of generating a quick and rudimentary object +model from an existing database on the fly. By addressing the issue strictly +at the mapper configuration level, and integrating fully with existing +Declarative class techniques, :class:`.AutomapBase` seeks to provide +a well-integrated approach to the issue of expediently auto-generating ad-hoc +mappings. + +.. tip:: The :ref:`automap_toplevel` extension is geared towards a + "zero declaration" approach, where a complete ORM model including classes + and pre-named relationships can be generated on the fly from a database + schema. For applications that still want to use explicit class declarations + including explicit relationship definitions in conjunction with reflection + of tables, the :class:`.DeferredReflection` class, described at + :ref:`orm_declarative_reflected_deferred_reflection`, is a better choice. + +.. _automap_basic_use: + +Basic Use +========= + +The simplest usage is to reflect an existing database into a new model. +We create a new :class:`.AutomapBase` class in a similar manner as to how +we create a declarative base class, using :func:`.automap_base`. +We then call :meth:`.AutomapBase.prepare` on the resulting base class, +asking it to reflect the schema and produce mappings:: + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy.orm import Session + from sqlalchemy import create_engine + + Base = automap_base() + + # engine, suppose it has two tables 'user' and 'address' set up + engine = create_engine("sqlite:///mydatabase.db") + + # reflect the tables + Base.prepare(autoload_with=engine) + + # mapped classes are now created with names by default + # matching that of the table name. + User = Base.classes.user + Address = Base.classes.address + + session = Session(engine) + + # rudimentary relationships are produced + session.add(Address(email_address="foo@bar.com", user=User(name="foo"))) + session.commit() + + # collection-based relationships are by default named + # "_collection" + u1 = session.query(User).first() + print(u1.address_collection) + +Above, calling :meth:`.AutomapBase.prepare` while passing along the +:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the +:meth:`_schema.MetaData.reflect` +method will be called on this declarative base +classes' :class:`_schema.MetaData` collection; then, each **viable** +:class:`_schema.Table` within the :class:`_schema.MetaData` +will get a new mapped class +generated automatically. The :class:`_schema.ForeignKeyConstraint` +objects which +link the various tables together will be used to produce new, bidirectional +:func:`_orm.relationship` objects between classes. +The classes and relationships +follow along a default naming scheme that we can customize. At this point, +our basic mapping consisting of related ``User`` and ``Address`` classes is +ready to use in the traditional way. + +.. note:: By **viable**, we mean that for a table to be mapped, it must + specify a primary key. Additionally, if the table is detected as being + a pure association table between two other tables, it will not be directly + mapped and will instead be configured as a many-to-many table between + the mappings for the two referring tables. + +Generating Mappings from an Existing MetaData +============================================= + +We can pass a pre-declared :class:`_schema.MetaData` object to +:func:`.automap_base`. +This object can be constructed in any way, including programmatically, from +a serialized file, or from itself being reflected using +:meth:`_schema.MetaData.reflect`. +Below we illustrate a combination of reflection and +explicit table declaration:: + + from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey + from sqlalchemy.ext.automap import automap_base + + engine = create_engine("sqlite:///mydatabase.db") + + # produce our own MetaData object + metadata = MetaData() + + # we can reflect it ourselves from a database, using options + # such as 'only' to limit what tables we look at... + metadata.reflect(engine, only=["user", "address"]) + + # ... or just define our own Table objects with it (or combine both) + Table( + "user_order", + metadata, + Column("id", Integer, primary_key=True), + Column("user_id", ForeignKey("user.id")), + ) + + # we can then produce a set of mappings from this MetaData. + Base = automap_base(metadata=metadata) + + # calling prepare() just sets up mapped classes and relationships. + Base.prepare() + + # mapped classes are ready + User = Base.classes.user + Address = Base.classes.address + Order = Base.classes.user_order + +.. _automap_by_module: + +Generating Mappings from Multiple Schemas +========================================= + +The :meth:`.AutomapBase.prepare` method when used with reflection may reflect +tables from one schema at a time at most, using the +:paramref:`.AutomapBase.prepare.schema` parameter to indicate the name of a +schema to be reflected from. In order to populate the :class:`.AutomapBase` +with tables from multiple schemas, :meth:`.AutomapBase.prepare` may be invoked +multiple times, each time passing a different name to the +:paramref:`.AutomapBase.prepare.schema` parameter. The +:meth:`.AutomapBase.prepare` method keeps an internal list of +:class:`_schema.Table` objects that have already been mapped, and will add new +mappings only for those :class:`_schema.Table` objects that are new since the +last time :meth:`.AutomapBase.prepare` was run:: + + e = create_engine("postgresql://scott:tiger@localhost/test") + + Base.metadata.create_all(e) + + Base = automap_base() + + Base.prepare(e) + Base.prepare(e, schema="test_schema") + Base.prepare(e, schema="test_schema_2") + +.. versionadded:: 2.0 The :meth:`.AutomapBase.prepare` method may be called + any number of times; only newly added tables will be mapped + on each run. Previously in version 1.4 and earlier, multiple calls would + cause errors as it would attempt to re-map an already mapped class. + The previous workaround approach of invoking + :meth:`_schema.MetaData.reflect` directly remains available as well. + +Automapping same-named tables across multiple schemas +----------------------------------------------------- + +For the common case where multiple schemas may have same-named tables and +therefore would generate same-named classes, conflicts can be resolved either +through use of the :paramref:`.AutomapBase.prepare.classname_for_table` hook to +apply different classnames on a per-schema basis, or by using the +:paramref:`.AutomapBase.prepare.modulename_for_table` hook, which allows +disambiguation of same-named classes by changing their effective ``__module__`` +attribute. In the example below, this hook is used to create a ``__module__`` +attribute for all classes that is of the form ``mymodule.``, where +the schema name ``default`` is used if no schema is present:: + + e = create_engine("postgresql://scott:tiger@localhost/test") + + Base.metadata.create_all(e) + + + def module_name_for_table(cls, tablename, table): + if table.schema is not None: + return f"mymodule.{table.schema}" + else: + return f"mymodule.default" + + + Base = automap_base() + + Base.prepare(e, modulename_for_table=module_name_for_table) + Base.prepare(e, schema="test_schema", modulename_for_table=module_name_for_table) + Base.prepare(e, schema="test_schema_2", modulename_for_table=module_name_for_table) + +The same named-classes are organized into a hierarchical collection available +at :attr:`.AutomapBase.by_module`. This collection is traversed using the +dot-separated name of a particular package/module down into the desired +class name. + +.. note:: When using the :paramref:`.AutomapBase.prepare.modulename_for_table` + hook to return a new ``__module__`` that is not ``None``, the class is + **not** placed into the :attr:`.AutomapBase.classes` collection; only + classes that were not given an explicit modulename are placed here, as the + collection cannot represent same-named classes individually. + +In the example above, if the database contained a table named ``accounts`` in +all three of the default schema, the ``test_schema`` schema, and the +``test_schema_2`` schema, three separate classes will be available as:: + + Base.by_module.mymodule.default.accounts + Base.by_module.mymodule.test_schema.accounts + Base.by_module.mymodule.test_schema_2.accounts + +The default module namespace generated for all :class:`.AutomapBase` classes is +``sqlalchemy.ext.automap``. If no +:paramref:`.AutomapBase.prepare.modulename_for_table` hook is used, the +contents of :attr:`.AutomapBase.by_module` will be entirely within the +``sqlalchemy.ext.automap`` namespace (e.g. +``MyBase.by_module.sqlalchemy.ext.automap.``), which would contain +the same series of classes as what would be seen in +:attr:`.AutomapBase.classes`. Therefore it's generally only necessary to use +:attr:`.AutomapBase.by_module` when explicit ``__module__`` conventions are +present. + +.. versionadded: 2.0 + + Added the :attr:`.AutomapBase.by_module` collection, which stores + classes within a named hierarchy based on dot-separated module names, + as well as the :paramref:`.Automap.prepare.modulename_for_table` parameter + which allows for custom ``__module__`` schemes for automapped + classes. + + + +Specifying Classes Explicitly +============================= + +.. tip:: If explicit classes are expected to be prominent in an application, + consider using :class:`.DeferredReflection` instead. + +The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined +explicitly, in a way similar to that of the :class:`.DeferredReflection` class. +Classes that extend from :class:`.AutomapBase` act like regular declarative +classes, but are not immediately mapped after their construction, and are +instead mapped when we call :meth:`.AutomapBase.prepare`. The +:meth:`.AutomapBase.prepare` method will make use of the classes we've +established based on the table name we use. If our schema contains tables +``user`` and ``address``, we can define one or both of the classes to be used:: + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy import create_engine + + # automap base + Base = automap_base() + + + # pre-declare User for the 'user' table + class User(Base): + __tablename__ = "user" + + # override schema elements like Columns + user_name = Column("name", String) + + # override relationships too, if desired. + # we must use the same name that automap would use for the + # relationship, and also must refer to the class name that automap will + # generate for "address" + address_collection = relationship("address", collection_class=set) + + + # reflect + engine = create_engine("sqlite:///mydatabase.db") + Base.prepare(autoload_with=engine) + + # we still have Address generated from the tablename "address", + # but User is the same as Base.classes.User now + + Address = Base.classes.address + + u1 = session.query(User).first() + print(u1.address_collection) + + # the backref is still there: + a1 = session.query(Address).first() + print(a1.user) + +Above, one of the more intricate details is that we illustrated overriding +one of the :func:`_orm.relationship` objects that automap would have created. +To do this, we needed to make sure the names match up with what automap +would normally generate, in that the relationship name would be +``User.address_collection`` and the name of the class referred to, from +automap's perspective, is called ``address``, even though we are referring to +it as ``Address`` within our usage of this class. + +Overriding Naming Schemes +========================= + +:mod:`.sqlalchemy.ext.automap` is tasked with producing mapped classes and +relationship names based on a schema, which means it has decision points in how +these names are determined. These three decision points are provided using +functions which can be passed to the :meth:`.AutomapBase.prepare` method, and +are known as :func:`.classname_for_table`, +:func:`.name_for_scalar_relationship`, +and :func:`.name_for_collection_relationship`. Any or all of these +functions are provided as in the example below, where we use a "camel case" +scheme for class names and a "pluralizer" for collection names using the +`Inflect `_ package:: + + import re + import inflect + + + def camelize_classname(base, tablename, table): + "Produce a 'camelized' class name, e.g." + "'words_and_underscores' -> 'WordsAndUnderscores'" + + return str( + tablename[0].upper() + + re.sub( + r"_([a-z])", + lambda m: m.group(1).upper(), + tablename[1:], + ) + ) + + + _pluralizer = inflect.engine() + + + def pluralize_collection(base, local_cls, referred_cls, constraint): + "Produce an 'uncamelized', 'pluralized' class name, e.g." + "'SomeTerm' -> 'some_terms'" + + referred_name = referred_cls.__name__ + uncamelized = re.sub( + r"[A-Z]", + lambda m: "_%s" % m.group(0).lower(), + referred_name, + )[1:] + pluralized = _pluralizer.plural(uncamelized) + return pluralized + + + from sqlalchemy.ext.automap import automap_base + + Base = automap_base() + + engine = create_engine("sqlite:///mydatabase.db") + + Base.prepare( + autoload_with=engine, + classname_for_table=camelize_classname, + name_for_collection_relationship=pluralize_collection, + ) + +From the above mapping, we would now have classes ``User`` and ``Address``, +where the collection from ``User`` to ``Address`` is called +``User.addresses``:: + + User, Address = Base.classes.User, Base.classes.Address + + u1 = User(addresses=[Address(email="foo@bar.com")]) + +Relationship Detection +====================== + +The vast majority of what automap accomplishes is the generation of +:func:`_orm.relationship` structures based on foreign keys. The mechanism +by which this works for many-to-one and one-to-many relationships is as +follows: + +1. A given :class:`_schema.Table`, known to be mapped to a particular class, + is examined for :class:`_schema.ForeignKeyConstraint` objects. + +2. From each :class:`_schema.ForeignKeyConstraint`, the remote + :class:`_schema.Table` + object present is matched up to the class to which it is to be mapped, + if any, else it is skipped. + +3. As the :class:`_schema.ForeignKeyConstraint` + we are examining corresponds to a + reference from the immediate mapped class, the relationship will be set up + as a many-to-one referring to the referred class; a corresponding + one-to-many backref will be created on the referred class referring + to this class. + +4. If any of the columns that are part of the + :class:`_schema.ForeignKeyConstraint` + are not nullable (e.g. ``nullable=False``), a + :paramref:`_orm.relationship.cascade` keyword argument + of ``all, delete-orphan`` will be added to the keyword arguments to + be passed to the relationship or backref. If the + :class:`_schema.ForeignKeyConstraint` reports that + :paramref:`_schema.ForeignKeyConstraint.ondelete` + is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable + set of columns, the option :paramref:`_orm.relationship.passive_deletes` + flag is set to ``True`` in the set of relationship keyword arguments. + Note that not all backends support reflection of ON DELETE. + +5. The names of the relationships are determined using the + :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and + :paramref:`.AutomapBase.prepare.name_for_collection_relationship` + callable functions. It is important to note that the default relationship + naming derives the name from the **the actual class name**. If you've + given a particular class an explicit name by declaring it, or specified an + alternate class naming scheme, that's the name from which the relationship + name will be derived. + +6. The classes are inspected for an existing mapped property matching these + names. If one is detected on one side, but none on the other side, + :class:`.AutomapBase` attempts to create a relationship on the missing side, + then uses the :paramref:`_orm.relationship.back_populates` + parameter in order to + point the new relationship to the other side. + +7. In the usual case where no relationship is on either side, + :meth:`.AutomapBase.prepare` produces a :func:`_orm.relationship` on the + "many-to-one" side and matches it to the other using the + :paramref:`_orm.relationship.backref` parameter. + +8. Production of the :func:`_orm.relationship` and optionally the + :func:`.backref` + is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship` + function, which can be supplied by the end-user in order to augment + the arguments passed to :func:`_orm.relationship` or :func:`.backref` or to + make use of custom implementations of these functions. + +Custom Relationship Arguments +----------------------------- + +The :paramref:`.AutomapBase.prepare.generate_relationship` hook can be used +to add parameters to relationships. For most cases, we can make use of the +existing :func:`.automap.generate_relationship` function to return +the object, after augmenting the given keyword dictionary with our own +arguments. + +Below is an illustration of how to send +:paramref:`_orm.relationship.cascade` and +:paramref:`_orm.relationship.passive_deletes` +options along to all one-to-many relationships:: + + from sqlalchemy.ext.automap import generate_relationship + from sqlalchemy.orm import interfaces + + + def _gen_relationship( + base, direction, return_fn, attrname, local_cls, referred_cls, **kw + ): + if direction is interfaces.ONETOMANY: + kw["cascade"] = "all, delete-orphan" + kw["passive_deletes"] = True + # make use of the built-in function to actually return + # the result. + return generate_relationship( + base, direction, return_fn, attrname, local_cls, referred_cls, **kw + ) + + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy import create_engine + + # automap base + Base = automap_base() + + engine = create_engine("sqlite:///mydatabase.db") + Base.prepare(autoload_with=engine, generate_relationship=_gen_relationship) + +Many-to-Many relationships +-------------------------- + +:mod:`.sqlalchemy.ext.automap` will generate many-to-many relationships, e.g. +those which contain a ``secondary`` argument. The process for producing these +is as follows: + +1. A given :class:`_schema.Table` is examined for + :class:`_schema.ForeignKeyConstraint` + objects, before any mapped class has been assigned to it. + +2. If the table contains two and exactly two + :class:`_schema.ForeignKeyConstraint` + objects, and all columns within this table are members of these two + :class:`_schema.ForeignKeyConstraint` objects, the table is assumed to be a + "secondary" table, and will **not be mapped directly**. + +3. The two (or one, for self-referential) external tables to which the + :class:`_schema.Table` + refers to are matched to the classes to which they will be + mapped, if any. + +4. If mapped classes for both sides are located, a many-to-many bi-directional + :func:`_orm.relationship` / :func:`.backref` + pair is created between the two + classes. + +5. The override logic for many-to-many works the same as that of one-to-many/ + many-to-one; the :func:`.generate_relationship` function is called upon + to generate the structures and existing attributes will be maintained. + +Relationships with Inheritance +------------------------------ + +:mod:`.sqlalchemy.ext.automap` will not generate any relationships between +two classes that are in an inheritance relationship. That is, with two +classes given as follows:: + + class Employee(Base): + __tablename__ = "employee" + id = Column(Integer, primary_key=True) + type = Column(String(50)) + __mapper_args__ = { + "polymorphic_identity": "employee", + "polymorphic_on": type, + } + + + class Engineer(Employee): + __tablename__ = "engineer" + id = Column(Integer, ForeignKey("employee.id"), primary_key=True) + __mapper_args__ = { + "polymorphic_identity": "engineer", + } + +The foreign key from ``Engineer`` to ``Employee`` is used not for a +relationship, but to establish joined inheritance between the two classes. + +Note that this means automap will not generate *any* relationships +for foreign keys that link from a subclass to a superclass. If a mapping +has actual relationships from subclass to superclass as well, those +need to be explicit. Below, as we have two separate foreign keys +from ``Engineer`` to ``Employee``, we need to set up both the relationship +we want as well as the ``inherit_condition``, as these are not things +SQLAlchemy can guess:: + + class Employee(Base): + __tablename__ = "employee" + id = Column(Integer, primary_key=True) + type = Column(String(50)) + + __mapper_args__ = { + "polymorphic_identity": "employee", + "polymorphic_on": type, + } + + + class Engineer(Employee): + __tablename__ = "engineer" + id = Column(Integer, ForeignKey("employee.id"), primary_key=True) + favorite_employee_id = Column(Integer, ForeignKey("employee.id")) + + favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id) + + __mapper_args__ = { + "polymorphic_identity": "engineer", + "inherit_condition": id == Employee.id, + } + +Handling Simple Naming Conflicts +-------------------------------- + +In the case of naming conflicts during mapping, override any of +:func:`.classname_for_table`, :func:`.name_for_scalar_relationship`, +and :func:`.name_for_collection_relationship` as needed. For example, if +automap is attempting to name a many-to-one relationship the same as an +existing column, an alternate convention can be conditionally selected. Given +a schema: + +.. sourcecode:: sql + + CREATE TABLE table_a ( + id INTEGER PRIMARY KEY + ); + + CREATE TABLE table_b ( + id INTEGER PRIMARY KEY, + table_a INTEGER, + FOREIGN KEY(table_a) REFERENCES table_a(id) + ); + +The above schema will first automap the ``table_a`` table as a class named +``table_a``; it will then automap a relationship onto the class for ``table_b`` +with the same name as this related class, e.g. ``table_a``. This +relationship name conflicts with the mapping column ``table_b.table_a``, +and will emit an error on mapping. + +We can resolve this conflict by using an underscore as follows:: + + def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + name = referred_cls.__name__.lower() + local_table = local_cls.__table__ + if name in local_table.columns: + newname = name + "_" + warnings.warn("Already detected name %s present. using %s" % (name, newname)) + return newname + return name + + + Base.prepare( + autoload_with=engine, + name_for_scalar_relationship=name_for_scalar_relationship, + ) + +Alternatively, we can change the name on the column side. The columns +that are mapped can be modified using the technique described at +:ref:`mapper_column_distinct_names`, by assigning the column explicitly +to a new name:: + + Base = automap_base() + + + class TableB(Base): + __tablename__ = "table_b" + _table_a = Column("table_a", ForeignKey("table_a.id")) + + + Base.prepare(autoload_with=engine) + +Using Automap with Explicit Declarations +======================================== + +As noted previously, automap has no dependency on reflection, and can make +use of any collection of :class:`_schema.Table` objects within a +:class:`_schema.MetaData` +collection. From this, it follows that automap can also be used +generate missing relationships given an otherwise complete model that fully +defines table metadata:: + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy import Column, Integer, String, ForeignKey + + Base = automap_base() + + + class User(Base): + __tablename__ = "user" + + id = Column(Integer, primary_key=True) + name = Column(String) + + + class Address(Base): + __tablename__ = "address" + + id = Column(Integer, primary_key=True) + email = Column(String) + user_id = Column(ForeignKey("user.id")) + + + # produce relationships + Base.prepare() + + # mapping is complete, with "address_collection" and + # "user" relationships + a1 = Address(email="u1") + a2 = Address(email="u2") + u1 = User(address_collection=[a1, a2]) + assert a1.user is u1 + +Above, given mostly complete ``User`` and ``Address`` mappings, the +:class:`_schema.ForeignKey` which we defined on ``Address.user_id`` allowed a +bidirectional relationship pair ``Address.user`` and +``User.address_collection`` to be generated on the mapped classes. + +Note that when subclassing :class:`.AutomapBase`, +the :meth:`.AutomapBase.prepare` method is required; if not called, the classes +we've declared are in an un-mapped state. + + +.. _automap_intercepting_columns: + +Intercepting Column Definitions +=============================== + +The :class:`_schema.MetaData` and :class:`_schema.Table` objects support an +event hook :meth:`_events.DDLEvents.column_reflect` that may be used to intercept +the information reflected about a database column before the :class:`_schema.Column` +object is constructed. For example if we wanted to map columns using a +naming convention such as ``"attr_"``, the event could +be applied as:: + + @event.listens_for(Base.metadata, "column_reflect") + def column_reflect(inspector, table, column_info): + # set column.key = "attr_" + column_info["key"] = "attr_%s" % column_info["name"].lower() + + + # run reflection + Base.prepare(autoload_with=engine) + +.. versionadded:: 1.4.0b2 the :meth:`_events.DDLEvents.column_reflect` event + may be applied to a :class:`_schema.MetaData` object. + +.. seealso:: + + :meth:`_events.DDLEvents.column_reflect` + + :ref:`mapper_automated_reflection_schemes` - in the ORM mapping documentation + + +""" # noqa +from __future__ import annotations + +import dataclasses +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import List +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .. import util +from ..orm import backref +from ..orm import declarative_base as _declarative_base +from ..orm import exc as orm_exc +from ..orm import interfaces +from ..orm import relationship +from ..orm.decl_base import _DeferredMapperConfig +from ..orm.mapper import _CONFIGURE_MUTEX +from ..schema import ForeignKeyConstraint +from ..sql import and_ +from ..util import Properties +from ..util.typing import Protocol + +if TYPE_CHECKING: + from ..engine.base import Engine + from ..orm.base import RelationshipDirection + from ..orm.relationships import ORMBackrefArgument + from ..orm.relationships import Relationship + from ..sql.schema import Column + from ..sql.schema import MetaData + from ..sql.schema import Table + from ..util import immutabledict + + +_KT = TypeVar("_KT", bound=Any) +_VT = TypeVar("_VT", bound=Any) + + +class PythonNameForTableType(Protocol): + def __call__( + self, base: Type[Any], tablename: str, table: Table + ) -> str: ... + + +def classname_for_table( + base: Type[Any], + tablename: str, + table: Table, +) -> str: + """Return the class name that should be used, given the name + of a table. + + The default implementation is:: + + return str(tablename) + + Alternate implementations can be specified using the + :paramref:`.AutomapBase.prepare.classname_for_table` + parameter. + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param tablename: string name of the :class:`_schema.Table`. + + :param table: the :class:`_schema.Table` object itself. + + :return: a string class name. + + .. note:: + + In Python 2, the string used for the class name **must** be a + non-Unicode object, e.g. a ``str()`` object. The ``.name`` attribute + of :class:`_schema.Table` is typically a Python unicode subclass, + so the + ``str()`` function should be applied to this name, after accounting for + any non-ASCII characters. + + """ + return str(tablename) + + +class NameForScalarRelationshipType(Protocol): + def __call__( + self, + base: Type[Any], + local_cls: Type[Any], + referred_cls: Type[Any], + constraint: ForeignKeyConstraint, + ) -> str: ... + + +def name_for_scalar_relationship( + base: Type[Any], + local_cls: Type[Any], + referred_cls: Type[Any], + constraint: ForeignKeyConstraint, +) -> str: + """Return the attribute name that should be used to refer from one + class to another, for a scalar object reference. + + The default implementation is:: + + return referred_cls.__name__.lower() + + Alternate implementations can be specified using the + :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` + parameter. + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param local_cls: the class to be mapped on the local side. + + :param referred_cls: the class to be mapped on the referring side. + + :param constraint: the :class:`_schema.ForeignKeyConstraint` that is being + inspected to produce this relationship. + + """ + return referred_cls.__name__.lower() + + +class NameForCollectionRelationshipType(Protocol): + def __call__( + self, + base: Type[Any], + local_cls: Type[Any], + referred_cls: Type[Any], + constraint: ForeignKeyConstraint, + ) -> str: ... + + +def name_for_collection_relationship( + base: Type[Any], + local_cls: Type[Any], + referred_cls: Type[Any], + constraint: ForeignKeyConstraint, +) -> str: + """Return the attribute name that should be used to refer from one + class to another, for a collection reference. + + The default implementation is:: + + return referred_cls.__name__.lower() + "_collection" + + Alternate implementations + can be specified using the + :paramref:`.AutomapBase.prepare.name_for_collection_relationship` + parameter. + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param local_cls: the class to be mapped on the local side. + + :param referred_cls: the class to be mapped on the referring side. + + :param constraint: the :class:`_schema.ForeignKeyConstraint` that is being + inspected to produce this relationship. + + """ + return referred_cls.__name__.lower() + "_collection" + + +class GenerateRelationshipType(Protocol): + @overload + def __call__( + self, + base: Type[Any], + direction: RelationshipDirection, + return_fn: Callable[..., Relationship[Any]], + attrname: str, + local_cls: Type[Any], + referred_cls: Type[Any], + **kw: Any, + ) -> Relationship[Any]: ... + + @overload + def __call__( + self, + base: Type[Any], + direction: RelationshipDirection, + return_fn: Callable[..., ORMBackrefArgument], + attrname: str, + local_cls: Type[Any], + referred_cls: Type[Any], + **kw: Any, + ) -> ORMBackrefArgument: ... + + def __call__( + self, + base: Type[Any], + direction: RelationshipDirection, + return_fn: Union[ + Callable[..., Relationship[Any]], Callable[..., ORMBackrefArgument] + ], + attrname: str, + local_cls: Type[Any], + referred_cls: Type[Any], + **kw: Any, + ) -> Union[ORMBackrefArgument, Relationship[Any]]: ... + + +@overload +def generate_relationship( + base: Type[Any], + direction: RelationshipDirection, + return_fn: Callable[..., Relationship[Any]], + attrname: str, + local_cls: Type[Any], + referred_cls: Type[Any], + **kw: Any, +) -> Relationship[Any]: ... + + +@overload +def generate_relationship( + base: Type[Any], + direction: RelationshipDirection, + return_fn: Callable[..., ORMBackrefArgument], + attrname: str, + local_cls: Type[Any], + referred_cls: Type[Any], + **kw: Any, +) -> ORMBackrefArgument: ... + + +def generate_relationship( + base: Type[Any], + direction: RelationshipDirection, + return_fn: Union[ + Callable[..., Relationship[Any]], Callable[..., ORMBackrefArgument] + ], + attrname: str, + local_cls: Type[Any], + referred_cls: Type[Any], + **kw: Any, +) -> Union[Relationship[Any], ORMBackrefArgument]: + r"""Generate a :func:`_orm.relationship` or :func:`.backref` + on behalf of two + mapped classes. + + An alternate implementation of this function can be specified using the + :paramref:`.AutomapBase.prepare.generate_relationship` parameter. + + The default implementation of this function is as follows:: + + if return_fn is backref: + return return_fn(attrname, **kw) + elif return_fn is relationship: + return return_fn(referred_cls, **kw) + else: + raise TypeError("Unknown relationship function: %s" % return_fn) + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param direction: indicate the "direction" of the relationship; this will + be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`. + + :param return_fn: the function that is used by default to create the + relationship. This will be either :func:`_orm.relationship` or + :func:`.backref`. The :func:`.backref` function's result will be used to + produce a new :func:`_orm.relationship` in a second step, + so it is critical + that user-defined implementations correctly differentiate between the two + functions, if a custom relationship function is being used. + + :param attrname: the attribute name to which this relationship is being + assigned. If the value of :paramref:`.generate_relationship.return_fn` is + the :func:`.backref` function, then this name is the name that is being + assigned to the backref. + + :param local_cls: the "local" class to which this relationship or backref + will be locally present. + + :param referred_cls: the "referred" class to which the relationship or + backref refers to. + + :param \**kw: all additional keyword arguments are passed along to the + function. + + :return: a :func:`_orm.relationship` or :func:`.backref` construct, + as dictated + by the :paramref:`.generate_relationship.return_fn` parameter. + + """ + + if return_fn is backref: + return return_fn(attrname, **kw) + elif return_fn is relationship: + return return_fn(referred_cls, **kw) + else: + raise TypeError("Unknown relationship function: %s" % return_fn) + + +ByModuleProperties = Properties[Union["ByModuleProperties", Type[Any]]] + + +class AutomapBase: + """Base class for an "automap" schema. + + The :class:`.AutomapBase` class can be compared to the "declarative base" + class that is produced by the :func:`.declarative.declarative_base` + function. In practice, the :class:`.AutomapBase` class is always used + as a mixin along with an actual declarative base. + + A new subclassable :class:`.AutomapBase` is typically instantiated + using the :func:`.automap_base` function. + + .. seealso:: + + :ref:`automap_toplevel` + + """ + + __abstract__ = True + + classes: ClassVar[Properties[Type[Any]]] + """An instance of :class:`.util.Properties` containing classes. + + This object behaves much like the ``.c`` collection on a table. Classes + are present under the name they were given, e.g.:: + + Base = automap_base() + Base.prepare(autoload_with=some_engine) + + User, Address = Base.classes.User, Base.classes.Address + + For class names that overlap with a method name of + :class:`.util.Properties`, such as ``items()``, the getitem form + is also supported:: + + Item = Base.classes["items"] + + """ + + by_module: ClassVar[ByModuleProperties] + """An instance of :class:`.util.Properties` containing a hierarchal + structure of dot-separated module names linked to classes. + + This collection is an alternative to the :attr:`.AutomapBase.classes` + collection that is useful when making use of the + :paramref:`.AutomapBase.prepare.modulename_for_table` parameter, which will + apply distinct ``__module__`` attributes to generated classes. + + The default ``__module__`` an automap-generated class is + ``sqlalchemy.ext.automap``; to access this namespace using + :attr:`.AutomapBase.by_module` looks like:: + + User = Base.by_module.sqlalchemy.ext.automap.User + + If a class had a ``__module__`` of ``mymodule.account``, accessing + this namespace looks like:: + + MyClass = Base.by_module.mymodule.account.MyClass + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`automap_by_module` + + """ + + metadata: ClassVar[MetaData] + """Refers to the :class:`_schema.MetaData` collection that will be used + for new :class:`_schema.Table` objects. + + .. seealso:: + + :ref:`orm_declarative_metadata` + + """ + + _sa_automapbase_bookkeeping: ClassVar[_Bookkeeping] + + @classmethod + @util.deprecated_params( + engine=( + "2.0", + "The :paramref:`_automap.AutomapBase.prepare.engine` parameter " + "is deprecated and will be removed in a future release. " + "Please use the " + ":paramref:`_automap.AutomapBase.prepare.autoload_with` " + "parameter.", + ), + reflect=( + "2.0", + "The :paramref:`_automap.AutomapBase.prepare.reflect` " + "parameter is deprecated and will be removed in a future " + "release. Reflection is enabled when " + ":paramref:`_automap.AutomapBase.prepare.autoload_with` " + "is passed.", + ), + ) + def prepare( + cls: Type[AutomapBase], + autoload_with: Optional[Engine] = None, + engine: Optional[Any] = None, + reflect: bool = False, + schema: Optional[str] = None, + classname_for_table: Optional[PythonNameForTableType] = None, + modulename_for_table: Optional[PythonNameForTableType] = None, + collection_class: Optional[Any] = None, + name_for_scalar_relationship: Optional[ + NameForScalarRelationshipType + ] = None, + name_for_collection_relationship: Optional[ + NameForCollectionRelationshipType + ] = None, + generate_relationship: Optional[GenerateRelationshipType] = None, + reflection_options: Union[ + Dict[_KT, _VT], immutabledict[_KT, _VT] + ] = util.EMPTY_DICT, + ) -> None: + """Extract mapped classes and relationships from the + :class:`_schema.MetaData` and perform mappings. + + For full documentation and examples see + :ref:`automap_basic_use`. + + :param autoload_with: an :class:`_engine.Engine` or + :class:`_engine.Connection` with which + to perform schema reflection; when specified, the + :meth:`_schema.MetaData.reflect` method will be invoked within + the scope of this method. + + :param engine: legacy; use :paramref:`.AutomapBase.autoload_with`. + Used to indicate the :class:`_engine.Engine` or + :class:`_engine.Connection` with which to reflect tables with, + if :paramref:`.AutomapBase.reflect` is True. + + :param reflect: legacy; use :paramref:`.AutomapBase.autoload_with`. + Indicates that :meth:`_schema.MetaData.reflect` should be invoked. + + :param classname_for_table: callable function which will be used to + produce new class names, given a table name. Defaults to + :func:`.classname_for_table`. + + :param modulename_for_table: callable function which will be used to + produce the effective ``__module__`` for an internally generated + class, to allow for multiple classes of the same name in a single + automap base which would be in different "modules". + + Defaults to ``None``, which will indicate that ``__module__`` will not + be set explicitly; the Python runtime will use the value + ``sqlalchemy.ext.automap`` for these classes. + + When assigning ``__module__`` to generated classes, they can be + accessed based on dot-separated module names using the + :attr:`.AutomapBase.by_module` collection. Classes that have + an explicit ``__module_`` assigned using this hook do **not** get + placed into the :attr:`.AutomapBase.classes` collection, only + into :attr:`.AutomapBase.by_module`. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`automap_by_module` + + :param name_for_scalar_relationship: callable function which will be + used to produce relationship names for scalar relationships. Defaults + to :func:`.name_for_scalar_relationship`. + + :param name_for_collection_relationship: callable function which will + be used to produce relationship names for collection-oriented + relationships. Defaults to :func:`.name_for_collection_relationship`. + + :param generate_relationship: callable function which will be used to + actually generate :func:`_orm.relationship` and :func:`.backref` + constructs. Defaults to :func:`.generate_relationship`. + + :param collection_class: the Python collection class that will be used + when a new :func:`_orm.relationship` + object is created that represents a + collection. Defaults to ``list``. + + :param schema: Schema name to reflect when reflecting tables using + the :paramref:`.AutomapBase.prepare.autoload_with` parameter. The name + is passed to the :paramref:`_schema.MetaData.reflect.schema` parameter + of :meth:`_schema.MetaData.reflect`. When omitted, the default schema + in use by the database connection is used. + + .. note:: The :paramref:`.AutomapBase.prepare.schema` + parameter supports reflection of a single schema at a time. + In order to include tables from many schemas, use + multiple calls to :meth:`.AutomapBase.prepare`. + + For an overview of multiple-schema automap including the use + of additional naming conventions to resolve table name + conflicts, see the section :ref:`automap_by_module`. + + .. versionadded:: 2.0 :meth:`.AutomapBase.prepare` supports being + directly invoked any number of times, keeping track of tables + that have already been processed to avoid processing them + a second time. + + :param reflection_options: When present, this dictionary of options + will be passed to :meth:`_schema.MetaData.reflect` + to supply general reflection-specific options like ``only`` and/or + dialect-specific options like ``oracle_resolve_synonyms``. + + .. versionadded:: 1.4 + + """ + + for mr in cls.__mro__: + if "_sa_automapbase_bookkeeping" in mr.__dict__: + automap_base = cast("Type[AutomapBase]", mr) + break + else: + assert False, "Can't locate automap base in class hierarchy" + + glbls = globals() + if classname_for_table is None: + classname_for_table = glbls["classname_for_table"] + if name_for_scalar_relationship is None: + name_for_scalar_relationship = glbls[ + "name_for_scalar_relationship" + ] + if name_for_collection_relationship is None: + name_for_collection_relationship = glbls[ + "name_for_collection_relationship" + ] + if generate_relationship is None: + generate_relationship = glbls["generate_relationship"] + if collection_class is None: + collection_class = list + + if autoload_with: + reflect = True + + if engine: + autoload_with = engine + + if reflect: + assert autoload_with + opts = dict( + schema=schema, + extend_existing=True, + autoload_replace=False, + ) + if reflection_options: + opts.update(reflection_options) + cls.metadata.reflect(autoload_with, **opts) # type: ignore[arg-type] # noqa: E501 + + with _CONFIGURE_MUTEX: + table_to_map_config: Union[ + Dict[Optional[Table], _DeferredMapperConfig], + Dict[Table, _DeferredMapperConfig], + ] = { + cast("Table", m.local_table): m + for m in _DeferredMapperConfig.classes_for_base( + cls, sort=False + ) + } + + many_to_many: List[ + Tuple[Table, Table, List[ForeignKeyConstraint], Table] + ] + many_to_many = [] + + bookkeeping = automap_base._sa_automapbase_bookkeeping + metadata_tables = cls.metadata.tables + + for table_key in set(metadata_tables).difference( + bookkeeping.table_keys + ): + table = metadata_tables[table_key] + bookkeeping.table_keys.add(table_key) + + lcl_m2m, rem_m2m, m2m_const = _is_many_to_many(cls, table) + if lcl_m2m is not None: + assert rem_m2m is not None + assert m2m_const is not None + many_to_many.append((lcl_m2m, rem_m2m, m2m_const, table)) + elif not table.primary_key: + continue + elif table not in table_to_map_config: + clsdict: Dict[str, Any] = {"__table__": table} + if modulename_for_table is not None: + new_module = modulename_for_table( + cls, table.name, table + ) + if new_module is not None: + clsdict["__module__"] = new_module + else: + new_module = None + + newname = classname_for_table(cls, table.name, table) + if new_module is None and newname in cls.classes: + util.warn( + "Ignoring duplicate class name " + f"'{newname}' " + "received in automap base for table " + f"{table.key} without " + "``__module__`` being set; consider using the " + "``modulename_for_table`` hook" + ) + continue + + mapped_cls = type( + newname, + (automap_base,), + clsdict, + ) + map_config = _DeferredMapperConfig.config_for_cls( + mapped_cls + ) + assert map_config.cls.__name__ == newname + if new_module is None: + cls.classes[newname] = mapped_cls + + by_module_properties: ByModuleProperties = cls.by_module + for token in map_config.cls.__module__.split("."): + if token not in by_module_properties: + by_module_properties[token] = util.Properties({}) + + props = by_module_properties[token] + + # we can assert this because the clsregistry + # module would have raised if there was a mismatch + # between modules/classes already. + # see test_cls_schema_name_conflict + assert isinstance(props, Properties) + by_module_properties = props + + by_module_properties[map_config.cls.__name__] = mapped_cls + + table_to_map_config[table] = map_config + + for map_config in table_to_map_config.values(): + _relationships_for_fks( + automap_base, + map_config, + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship, + ) + + for lcl_m2m, rem_m2m, m2m_const, table in many_to_many: + _m2m_relationship( + automap_base, + lcl_m2m, + rem_m2m, + m2m_const, + table, + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship, + ) + + for map_config in _DeferredMapperConfig.classes_for_base( + automap_base + ): + map_config.map() + + _sa_decl_prepare = True + """Indicate that the mapping of classes should be deferred. + + The presence of this attribute name indicates to declarative + that the call to mapper() should not occur immediately; instead, + information about the table and attributes to be mapped are gathered + into an internal structure called _DeferredMapperConfig. These + objects can be collected later using classes_for_base(), additional + mapping decisions can be made, and then the map() method will actually + apply the mapping. + + The only real reason this deferral of the whole + thing is needed is to support primary key columns that aren't reflected + yet when the class is declared; everything else can theoretically be + added to the mapper later. However, the _DeferredMapperConfig is a + nice interface in any case which exists at that not usually exposed point + at which declarative has the class and the Table but hasn't called + mapper() yet. + + """ + + @classmethod + def _sa_raise_deferred_config(cls) -> NoReturn: + raise orm_exc.UnmappedClassError( + cls, + msg="Class %s is a subclass of AutomapBase. " + "Mappings are not produced until the .prepare() " + "method is called on the class hierarchy." + % orm_exc._safe_cls_name(cls), + ) + + +@dataclasses.dataclass +class _Bookkeeping: + __slots__ = ("table_keys",) + + table_keys: Set[str] + + +def automap_base( + declarative_base: Optional[Type[Any]] = None, **kw: Any +) -> Any: + r"""Produce a declarative automap base. + + This function produces a new base class that is a product of the + :class:`.AutomapBase` class as well a declarative base produced by + :func:`.declarative.declarative_base`. + + All parameters other than ``declarative_base`` are keyword arguments + that are passed directly to the :func:`.declarative.declarative_base` + function. + + :param declarative_base: an existing class produced by + :func:`.declarative.declarative_base`. When this is passed, the function + no longer invokes :func:`.declarative.declarative_base` itself, and all + other keyword arguments are ignored. + + :param \**kw: keyword arguments are passed along to + :func:`.declarative.declarative_base`. + + """ + if declarative_base is None: + Base = _declarative_base(**kw) + else: + Base = declarative_base + + return type( + Base.__name__, + (AutomapBase, Base), + { + "__abstract__": True, + "classes": util.Properties({}), + "by_module": util.Properties({}), + "_sa_automapbase_bookkeeping": _Bookkeeping(set()), + }, + ) + + +def _is_many_to_many( + automap_base: Type[Any], table: Table +) -> Tuple[ + Optional[Table], Optional[Table], Optional[list[ForeignKeyConstraint]] +]: + fk_constraints = [ + const + for const in table.constraints + if isinstance(const, ForeignKeyConstraint) + ] + if len(fk_constraints) != 2: + return None, None, None + + cols: List[Column[Any]] = sum( + [ + [fk.parent for fk in fk_constraint.elements] + for fk_constraint in fk_constraints + ], + [], + ) + + if set(cols) != set(table.c): + return None, None, None + + return ( + fk_constraints[0].elements[0].column.table, + fk_constraints[1].elements[0].column.table, + fk_constraints, + ) + + +def _relationships_for_fks( + automap_base: Type[Any], + map_config: _DeferredMapperConfig, + table_to_map_config: Union[ + Dict[Optional[Table], _DeferredMapperConfig], + Dict[Table, _DeferredMapperConfig], + ], + collection_class: type, + name_for_scalar_relationship: NameForScalarRelationshipType, + name_for_collection_relationship: NameForCollectionRelationshipType, + generate_relationship: GenerateRelationshipType, +) -> None: + local_table = cast("Optional[Table]", map_config.local_table) + local_cls = cast( + "Optional[Type[Any]]", map_config.cls + ) # derived from a weakref, may be None + + if local_table is None or local_cls is None: + return + for constraint in local_table.constraints: + if isinstance(constraint, ForeignKeyConstraint): + fks = constraint.elements + referred_table = fks[0].column.table + referred_cfg = table_to_map_config.get(referred_table, None) + if referred_cfg is None: + continue + referred_cls = referred_cfg.cls + + if local_cls is not referred_cls and issubclass( + local_cls, referred_cls + ): + continue + + relationship_name = name_for_scalar_relationship( + automap_base, local_cls, referred_cls, constraint + ) + backref_name = name_for_collection_relationship( + automap_base, referred_cls, local_cls, constraint + ) + + o2m_kws: Dict[str, Union[str, bool]] = {} + nullable = False not in {fk.parent.nullable for fk in fks} + if not nullable: + o2m_kws["cascade"] = "all, delete-orphan" + + if ( + constraint.ondelete + and constraint.ondelete.lower() == "cascade" + ): + o2m_kws["passive_deletes"] = True + else: + if ( + constraint.ondelete + and constraint.ondelete.lower() == "set null" + ): + o2m_kws["passive_deletes"] = True + + create_backref = backref_name not in referred_cfg.properties + + if relationship_name not in map_config.properties: + if create_backref: + backref_obj = generate_relationship( + automap_base, + interfaces.ONETOMANY, + backref, + backref_name, + referred_cls, + local_cls, + collection_class=collection_class, + **o2m_kws, + ) + else: + backref_obj = None + rel = generate_relationship( + automap_base, + interfaces.MANYTOONE, + relationship, + relationship_name, + local_cls, + referred_cls, + foreign_keys=[fk.parent for fk in constraint.elements], + backref=backref_obj, + remote_side=[fk.column for fk in constraint.elements], + ) + if rel is not None: + map_config.properties[relationship_name] = rel + if not create_backref: + referred_cfg.properties[ + backref_name + ].back_populates = relationship_name # type: ignore[union-attr] # noqa: E501 + elif create_backref: + rel = generate_relationship( + automap_base, + interfaces.ONETOMANY, + relationship, + backref_name, + referred_cls, + local_cls, + foreign_keys=[fk.parent for fk in constraint.elements], + back_populates=relationship_name, + collection_class=collection_class, + **o2m_kws, + ) + if rel is not None: + referred_cfg.properties[backref_name] = rel + map_config.properties[ + relationship_name + ].back_populates = backref_name # type: ignore[union-attr] + + +def _m2m_relationship( + automap_base: Type[Any], + lcl_m2m: Table, + rem_m2m: Table, + m2m_const: List[ForeignKeyConstraint], + table: Table, + table_to_map_config: Union[ + Dict[Optional[Table], _DeferredMapperConfig], + Dict[Table, _DeferredMapperConfig], + ], + collection_class: type, + name_for_scalar_relationship: NameForCollectionRelationshipType, + name_for_collection_relationship: NameForCollectionRelationshipType, + generate_relationship: GenerateRelationshipType, +) -> None: + map_config = table_to_map_config.get(lcl_m2m, None) + referred_cfg = table_to_map_config.get(rem_m2m, None) + if map_config is None or referred_cfg is None: + return + + local_cls = map_config.cls + referred_cls = referred_cfg.cls + + relationship_name = name_for_collection_relationship( + automap_base, local_cls, referred_cls, m2m_const[0] + ) + backref_name = name_for_collection_relationship( + automap_base, referred_cls, local_cls, m2m_const[1] + ) + + create_backref = backref_name not in referred_cfg.properties + + if table in table_to_map_config: + overlaps = "__*" + else: + overlaps = None + + if relationship_name not in map_config.properties: + if create_backref: + backref_obj = generate_relationship( + automap_base, + interfaces.MANYTOMANY, + backref, + backref_name, + referred_cls, + local_cls, + collection_class=collection_class, + overlaps=overlaps, + ) + else: + backref_obj = None + + rel = generate_relationship( + automap_base, + interfaces.MANYTOMANY, + relationship, + relationship_name, + local_cls, + referred_cls, + overlaps=overlaps, + secondary=table, + primaryjoin=and_( + fk.column == fk.parent for fk in m2m_const[0].elements + ), # type: ignore [arg-type] + secondaryjoin=and_( + fk.column == fk.parent for fk in m2m_const[1].elements + ), # type: ignore [arg-type] + backref=backref_obj, + collection_class=collection_class, + ) + if rel is not None: + map_config.properties[relationship_name] = rel + + if not create_backref: + referred_cfg.properties[ + backref_name + ].back_populates = relationship_name # type: ignore[union-attr] # noqa: E501 + elif create_backref: + rel = generate_relationship( + automap_base, + interfaces.MANYTOMANY, + relationship, + backref_name, + referred_cls, + local_cls, + overlaps=overlaps, + secondary=table, + primaryjoin=and_( + fk.column == fk.parent for fk in m2m_const[1].elements + ), # type: ignore [arg-type] + secondaryjoin=and_( + fk.column == fk.parent for fk in m2m_const[0].elements + ), # type: ignore [arg-type] + back_populates=relationship_name, + collection_class=collection_class, + ) + if rel is not None: + referred_cfg.properties[backref_name] = rel + map_config.properties[ + relationship_name + ].back_populates = backref_name # type: ignore[union-attr] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/baked.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/baked.py new file mode 100644 index 00000000..60f7ae66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/baked.py @@ -0,0 +1,574 @@ +# ext/baked.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""Baked query extension. + +Provides a creational pattern for the :class:`.query.Query` object which +allows the fully constructed object, Core select statement, and string +compiled result to be fully cached. + + +""" + +import collections.abc as collections_abc +import logging + +from .. import exc as sa_exc +from .. import util +from ..orm import exc as orm_exc +from ..orm.query import Query +from ..orm.session import Session +from ..sql import func +from ..sql import literal_column +from ..sql import util as sql_util + + +log = logging.getLogger(__name__) + + +class Bakery: + """Callable which returns a :class:`.BakedQuery`. + + This object is returned by the class method + :meth:`.BakedQuery.bakery`. It exists as an object + so that the "cache" can be easily inspected. + + .. versionadded:: 1.2 + + + """ + + __slots__ = "cls", "cache" + + def __init__(self, cls_, cache): + self.cls = cls_ + self.cache = cache + + def __call__(self, initial_fn, *args): + return self.cls(self.cache, initial_fn, args) + + +class BakedQuery: + """A builder object for :class:`.query.Query` objects.""" + + __slots__ = "steps", "_bakery", "_cache_key", "_spoiled" + + def __init__(self, bakery, initial_fn, args=()): + self._cache_key = () + self._update_cache_key(initial_fn, args) + self.steps = [initial_fn] + self._spoiled = False + self._bakery = bakery + + @classmethod + def bakery(cls, size=200, _size_alert=None): + """Construct a new bakery. + + :return: an instance of :class:`.Bakery` + + """ + + return Bakery(cls, util.LRUCache(size, size_alert=_size_alert)) + + def _clone(self): + b1 = BakedQuery.__new__(BakedQuery) + b1._cache_key = self._cache_key + b1.steps = list(self.steps) + b1._bakery = self._bakery + b1._spoiled = self._spoiled + return b1 + + def _update_cache_key(self, fn, args=()): + self._cache_key += (fn.__code__,) + args + + def __iadd__(self, other): + if isinstance(other, tuple): + self.add_criteria(*other) + else: + self.add_criteria(other) + return self + + def __add__(self, other): + if isinstance(other, tuple): + return self.with_criteria(*other) + else: + return self.with_criteria(other) + + def add_criteria(self, fn, *args): + """Add a criteria function to this :class:`.BakedQuery`. + + This is equivalent to using the ``+=`` operator to + modify a :class:`.BakedQuery` in-place. + + """ + self._update_cache_key(fn, args) + self.steps.append(fn) + return self + + def with_criteria(self, fn, *args): + """Add a criteria function to a :class:`.BakedQuery` cloned from this + one. + + This is equivalent to using the ``+`` operator to + produce a new :class:`.BakedQuery` with modifications. + + """ + return self._clone().add_criteria(fn, *args) + + def for_session(self, session): + """Return a :class:`_baked.Result` object for this + :class:`.BakedQuery`. + + This is equivalent to calling the :class:`.BakedQuery` as a + Python callable, e.g. ``result = my_baked_query(session)``. + + """ + return Result(self, session) + + def __call__(self, session): + return self.for_session(session) + + def spoil(self, full=False): + """Cancel any query caching that will occur on this BakedQuery object. + + The BakedQuery can continue to be used normally, however additional + creational functions will not be cached; they will be called + on every invocation. + + This is to support the case where a particular step in constructing + a baked query disqualifies the query from being cacheable, such + as a variant that relies upon some uncacheable value. + + :param full: if False, only functions added to this + :class:`.BakedQuery` object subsequent to the spoil step will be + non-cached; the state of the :class:`.BakedQuery` up until + this point will be pulled from the cache. If True, then the + entire :class:`_query.Query` object is built from scratch each + time, with all creational functions being called on each + invocation. + + """ + if not full and not self._spoiled: + _spoil_point = self._clone() + _spoil_point._cache_key += ("_query_only",) + self.steps = [_spoil_point._retrieve_baked_query] + self._spoiled = True + return self + + def _effective_key(self, session): + """Return the key that actually goes into the cache dictionary for + this :class:`.BakedQuery`, taking into account the given + :class:`.Session`. + + This basically means we also will include the session's query_class, + as the actual :class:`_query.Query` object is part of what's cached + and needs to match the type of :class:`_query.Query` that a later + session will want to use. + + """ + return self._cache_key + (session._query_cls,) + + def _with_lazyload_options(self, options, effective_path, cache_path=None): + """Cloning version of _add_lazyload_options.""" + q = self._clone() + q._add_lazyload_options(options, effective_path, cache_path=cache_path) + return q + + def _add_lazyload_options(self, options, effective_path, cache_path=None): + """Used by per-state lazy loaders to add options to the + "lazy load" query from a parent query. + + Creates a cache key based on given load path and query options; + if a repeatable cache key cannot be generated, the query is + "spoiled" so that it won't use caching. + + """ + + key = () + + if not cache_path: + cache_path = effective_path + + for opt in options: + if opt._is_legacy_option or opt._is_compile_state: + ck = opt._generate_cache_key() + if ck is None: + self.spoil(full=True) + else: + assert not ck[1], ( + "loader options with variable bound parameters " + "not supported with baked queries. Please " + "use new-style select() statements for cached " + "ORM queries." + ) + key += ck[0] + + self.add_criteria( + lambda q: q._with_current_path(effective_path).options(*options), + cache_path.path, + key, + ) + + def _retrieve_baked_query(self, session): + query = self._bakery.get(self._effective_key(session), None) + if query is None: + query = self._as_query(session) + self._bakery[self._effective_key(session)] = query.with_session( + None + ) + return query.with_session(session) + + def _bake(self, session): + query = self._as_query(session) + query.session = None + + # in 1.4, this is where before_compile() event is + # invoked + statement = query._statement_20() + + # if the query is not safe to cache, we still do everything as though + # we did cache it, since the receiver of _bake() assumes subqueryload + # context was set up, etc. + # + # note also we want to cache the statement itself because this + # allows the statement itself to hold onto its cache key that is + # used by the Connection, which in itself is more expensive to + # generate than what BakedQuery was able to provide in 1.3 and prior + + if statement._compile_options._bake_ok: + self._bakery[self._effective_key(session)] = ( + query, + statement, + ) + + return query, statement + + def to_query(self, query_or_session): + """Return the :class:`_query.Query` object for use as a subquery. + + This method should be used within the lambda callable being used + to generate a step of an enclosing :class:`.BakedQuery`. The + parameter should normally be the :class:`_query.Query` object that + is passed to the lambda:: + + sub_bq = self.bakery(lambda s: s.query(User.name)) + sub_bq += lambda q: q.filter( + User.id == Address.user_id).correlate(Address) + + main_bq = self.bakery(lambda s: s.query(Address)) + main_bq += lambda q: q.filter( + sub_bq.to_query(q).exists()) + + In the case where the subquery is used in the first callable against + a :class:`.Session`, the :class:`.Session` is also accepted:: + + sub_bq = self.bakery(lambda s: s.query(User.name)) + sub_bq += lambda q: q.filter( + User.id == Address.user_id).correlate(Address) + + main_bq = self.bakery( + lambda s: s.query( + Address.id, sub_bq.to_query(q).scalar_subquery()) + ) + + :param query_or_session: a :class:`_query.Query` object or a class + :class:`.Session` object, that is assumed to be within the context + of an enclosing :class:`.BakedQuery` callable. + + + .. versionadded:: 1.3 + + + """ + + if isinstance(query_or_session, Session): + session = query_or_session + elif isinstance(query_or_session, Query): + session = query_or_session.session + if session is None: + raise sa_exc.ArgumentError( + "Given Query needs to be associated with a Session" + ) + else: + raise TypeError( + "Query or Session object expected, got %r." + % type(query_or_session) + ) + return self._as_query(session) + + def _as_query(self, session): + query = self.steps[0](session) + + for step in self.steps[1:]: + query = step(query) + + return query + + +class Result: + """Invokes a :class:`.BakedQuery` against a :class:`.Session`. + + The :class:`_baked.Result` object is where the actual :class:`.query.Query` + object gets created, or retrieved from the cache, + against a target :class:`.Session`, and is then invoked for results. + + """ + + __slots__ = "bq", "session", "_params", "_post_criteria" + + def __init__(self, bq, session): + self.bq = bq + self.session = session + self._params = {} + self._post_criteria = [] + + def params(self, *args, **kw): + """Specify parameters to be replaced into the string SQL statement.""" + + if len(args) == 1: + kw.update(args[0]) + elif len(args) > 0: + raise sa_exc.ArgumentError( + "params() takes zero or one positional argument, " + "which is a dictionary." + ) + self._params.update(kw) + return self + + def _using_post_criteria(self, fns): + if fns: + self._post_criteria.extend(fns) + return self + + def with_post_criteria(self, fn): + """Add a criteria function that will be applied post-cache. + + This adds a function that will be run against the + :class:`_query.Query` object after it is retrieved from the + cache. This currently includes **only** the + :meth:`_query.Query.params` and :meth:`_query.Query.execution_options` + methods. + + .. warning:: :meth:`_baked.Result.with_post_criteria` + functions are applied + to the :class:`_query.Query` + object **after** the query's SQL statement + object has been retrieved from the cache. Only + :meth:`_query.Query.params` and + :meth:`_query.Query.execution_options` + methods should be used. + + + .. versionadded:: 1.2 + + + """ + return self._using_post_criteria([fn]) + + def _as_query(self): + q = self.bq._as_query(self.session).params(self._params) + for fn in self._post_criteria: + q = fn(q) + return q + + def __str__(self): + return str(self._as_query()) + + def __iter__(self): + return self._iter().__iter__() + + def _iter(self): + bq = self.bq + + if not self.session.enable_baked_queries or bq._spoiled: + return self._as_query()._iter() + + query, statement = bq._bakery.get( + bq._effective_key(self.session), (None, None) + ) + if query is None: + query, statement = bq._bake(self.session) + + if self._params: + q = query.params(self._params) + else: + q = query + for fn in self._post_criteria: + q = fn(q) + + params = q._params + execution_options = dict(q._execution_options) + execution_options.update( + { + "_sa_orm_load_options": q.load_options, + "compiled_cache": bq._bakery, + } + ) + + result = self.session.execute( + statement, params, execution_options=execution_options + ) + if result._attributes.get("is_single_entity", False): + result = result.scalars() + + if result._attributes.get("filtered", False): + result = result.unique() + + return result + + def count(self): + """return the 'count'. + + Equivalent to :meth:`_query.Query.count`. + + Note this uses a subquery to ensure an accurate count regardless + of the structure of the original statement. + + """ + + col = func.count(literal_column("*")) + bq = self.bq.with_criteria(lambda q: q._legacy_from_self(col)) + return bq.for_session(self.session).params(self._params).scalar() + + def scalar(self): + """Return the first element of the first result or None + if no rows present. If multiple rows are returned, + raises MultipleResultsFound. + + Equivalent to :meth:`_query.Query.scalar`. + + """ + try: + ret = self.one() + if not isinstance(ret, collections_abc.Sequence): + return ret + return ret[0] + except orm_exc.NoResultFound: + return None + + def first(self): + """Return the first row. + + Equivalent to :meth:`_query.Query.first`. + + """ + + bq = self.bq.with_criteria(lambda q: q.slice(0, 1)) + return ( + bq.for_session(self.session) + .params(self._params) + ._using_post_criteria(self._post_criteria) + ._iter() + .first() + ) + + def one(self): + """Return exactly one result or raise an exception. + + Equivalent to :meth:`_query.Query.one`. + + """ + return self._iter().one() + + def one_or_none(self): + """Return one or zero results, or raise an exception for multiple + rows. + + Equivalent to :meth:`_query.Query.one_or_none`. + + """ + return self._iter().one_or_none() + + def all(self): + """Return all rows. + + Equivalent to :meth:`_query.Query.all`. + + """ + return self._iter().all() + + def get(self, ident): + """Retrieve an object based on identity. + + Equivalent to :meth:`_query.Query.get`. + + """ + + query = self.bq.steps[0](self.session) + return query._get_impl(ident, self._load_on_pk_identity) + + def _load_on_pk_identity(self, session, query, primary_key_identity, **kw): + """Load the given primary key identity from the database.""" + + mapper = query._raw_columns[0]._annotations["parententity"] + + _get_clause, _get_params = mapper._get_clause + + def setup(query): + _lcl_get_clause = _get_clause + q = query._clone() + q._get_condition() + q._order_by = None + + # None present in ident - turn those comparisons + # into "IS NULL" + if None in primary_key_identity: + nones = { + _get_params[col].key + for col, value in zip( + mapper.primary_key, primary_key_identity + ) + if value is None + } + _lcl_get_clause = sql_util.adapt_criterion_to_null( + _lcl_get_clause, nones + ) + + # TODO: can mapper._get_clause be pre-adapted? + q._where_criteria = ( + sql_util._deep_annotate(_lcl_get_clause, {"_orm_adapt": True}), + ) + + for fn in self._post_criteria: + q = fn(q) + return q + + # cache the query against a key that includes + # which positions in the primary key are NULL + # (remember, we can map to an OUTER JOIN) + bq = self.bq + + # add the clause we got from mapper._get_clause to the cache + # key so that if a race causes multiple calls to _get_clause, + # we've cached on ours + bq = bq._clone() + bq._cache_key += (_get_clause,) + + bq = bq.with_criteria( + setup, tuple(elem is None for elem in primary_key_identity) + ) + + params = { + _get_params[primary_key].key: id_val + for id_val, primary_key in zip( + primary_key_identity, mapper.primary_key + ) + } + + result = list(bq.for_session(self.session).params(**params)) + l = len(result) + if l > 1: + raise orm_exc.MultipleResultsFound() + elif l: + return result[0] + else: + return None + + +bakery = BakedQuery.bakery diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/compiler.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/compiler.py new file mode 100644 index 00000000..01462ad0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/compiler.py @@ -0,0 +1,555 @@ +# ext/compiler.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +r"""Provides an API for creation of custom ClauseElements and compilers. + +Synopsis +======== + +Usage involves the creation of one or more +:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or +more callables defining its compilation:: + + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.sql.expression import ColumnClause + + class MyColumn(ColumnClause): + inherit_cache = True + + @compiles(MyColumn) + def compile_mycolumn(element, compiler, **kw): + return "[%s]" % element.name + +Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`, +the base expression element for named column objects. The ``compiles`` +decorator registers itself with the ``MyColumn`` class so that it is invoked +when the object is compiled to a string:: + + from sqlalchemy import select + + s = select(MyColumn('x'), MyColumn('y')) + print(str(s)) + +Produces:: + + SELECT [x], [y] + +Dialect-specific compilation rules +================================== + +Compilers can also be made dialect-specific. The appropriate compiler will be +invoked for the dialect in use:: + + from sqlalchemy.schema import DDLElement + + class AlterColumn(DDLElement): + inherit_cache = False + + def __init__(self, column, cmd): + self.column = column + self.cmd = cmd + + @compiles(AlterColumn) + def visit_alter_column(element, compiler, **kw): + return "ALTER COLUMN %s ..." % element.column.name + + @compiles(AlterColumn, 'postgresql') + def visit_alter_column(element, compiler, **kw): + return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, + element.column.name) + +The second ``visit_alter_table`` will be invoked when any ``postgresql`` +dialect is used. + +.. _compilerext_compiling_subelements: + +Compiling sub-elements of a custom expression construct +======================================================= + +The ``compiler`` argument is the +:class:`~sqlalchemy.engine.interfaces.Compiled` object in use. This object +can be inspected for any information about the in-progress compilation, +including ``compiler.dialect``, ``compiler.statement`` etc. The +:class:`~sqlalchemy.sql.compiler.SQLCompiler` and +:class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()`` +method which can be used for compilation of embedded attributes:: + + from sqlalchemy.sql.expression import Executable, ClauseElement + + class InsertFromSelect(Executable, ClauseElement): + inherit_cache = False + + def __init__(self, table, select): + self.table = table + self.select = select + + @compiles(InsertFromSelect) + def visit_insert_from_select(element, compiler, **kw): + return "INSERT INTO %s (%s)" % ( + compiler.process(element.table, asfrom=True, **kw), + compiler.process(element.select, **kw) + ) + + insert = InsertFromSelect(t1, select(t1).where(t1.c.x>5)) + print(insert) + +Produces:: + + "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z + FROM mytable WHERE mytable.x > :x_1)" + +.. note:: + + The above ``InsertFromSelect`` construct is only an example, this actual + functionality is already available using the + :meth:`_expression.Insert.from_select` method. + + +Cross Compiling between SQL and DDL compilers +--------------------------------------------- + +SQL and DDL constructs are each compiled using different base compilers - +``SQLCompiler`` and ``DDLCompiler``. A common need is to access the +compilation rules of SQL expressions from within a DDL expression. The +``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as +below where we generate a CHECK constraint that embeds a SQL expression:: + + @compiles(MyConstraint) + def compile_my_constraint(constraint, ddlcompiler, **kw): + kw['literal_binds'] = True + return "CONSTRAINT %s CHECK (%s)" % ( + constraint.name, + ddlcompiler.sql_compiler.process( + constraint.expression, **kw) + ) + +Above, we add an additional flag to the process step as called by +:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This +indicates that any SQL expression which refers to a :class:`.BindParameter` +object or other "literal" object such as those which refer to strings or +integers should be rendered **in-place**, rather than being referred to as +a bound parameter; when emitting DDL, bound parameters are typically not +supported. + + +Changing the default compilation of existing constructs +======================================================= + +The compiler extension applies just as well to the existing constructs. When +overriding the compilation of a built in SQL construct, the @compiles +decorator is invoked upon the appropriate class (be sure to use the class, +i.e. ``Insert`` or ``Select``, instead of the creation function such +as ``insert()`` or ``select()``). + +Within the new compilation function, to get at the "original" compilation +routine, use the appropriate visit_XXX method - this +because compiler.process() will call upon the overriding routine and cause +an endless loop. Such as, to add "prefix" to all insert statements:: + + from sqlalchemy.sql.expression import Insert + + @compiles(Insert) + def prefix_inserts(insert, compiler, **kw): + return compiler.visit_insert(insert.prefix_with("some prefix"), **kw) + +The above compiler will prefix all INSERT statements with "some prefix" when +compiled. + +.. _type_compilation_extension: + +Changing Compilation of Types +============================= + +``compiler`` works for types, too, such as below where we implement the +MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: + + @compiles(String, 'mssql') + @compiles(VARCHAR, 'mssql') + def compile_varchar(element, compiler, **kw): + if element.length == 'max': + return "VARCHAR('max')" + else: + return compiler.visit_VARCHAR(element, **kw) + + foo = Table('foo', metadata, + Column('data', VARCHAR('max')) + ) + +Subclassing Guidelines +====================== + +A big part of using the compiler extension is subclassing SQLAlchemy +expression constructs. To make this easier, the expression and +schema packages feature a set of "bases" intended for common tasks. +A synopsis is as follows: + +* :class:`~sqlalchemy.sql.expression.ClauseElement` - This is the root + expression class. Any SQL expression can be derived from this base, and is + probably the best choice for longer constructs such as specialized INSERT + statements. + +* :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all + "column-like" elements. Anything that you'd place in the "columns" clause of + a SELECT statement (as well as order by and group by) can derive from this - + the object will automatically have Python "comparison" behavior. + + :class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a + ``type`` member which is expression's return type. This can be established + at the instance level in the constructor, or at the class level if its + generally constant:: + + class timestamp(ColumnElement): + type = TIMESTAMP() + inherit_cache = True + +* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a + ``ColumnElement`` and a "from clause" like object, and represents a SQL + function or stored procedure type of call. Since most databases support + statements along the line of "SELECT FROM " + ``FunctionElement`` adds in the ability to be used in the FROM clause of a + ``select()`` construct:: + + from sqlalchemy.sql.expression import FunctionElement + + class coalesce(FunctionElement): + name = 'coalesce' + inherit_cache = True + + @compiles(coalesce) + def compile(element, compiler, **kw): + return "coalesce(%s)" % compiler.process(element.clauses, **kw) + + @compiles(coalesce, 'oracle') + def compile(element, compiler, **kw): + if len(element.clauses) > 2: + raise TypeError("coalesce only supports two arguments on Oracle") + return "nvl(%s)" % compiler.process(element.clauses, **kw) + +* :class:`.ExecutableDDLElement` - The root of all DDL expressions, + like CREATE TABLE, ALTER TABLE, etc. Compilation of + :class:`.ExecutableDDLElement` subclasses is issued by a + :class:`.DDLCompiler` instead of a :class:`.SQLCompiler`. + :class:`.ExecutableDDLElement` can also be used as an event hook in + conjunction with event hooks like :meth:`.DDLEvents.before_create` and + :meth:`.DDLEvents.after_create`, allowing the construct to be invoked + automatically during CREATE TABLE and DROP TABLE sequences. + + .. seealso:: + + :ref:`metadata_ddl_toplevel` - contains examples of associating + :class:`.DDL` objects (which are themselves :class:`.ExecutableDDLElement` + instances) with :class:`.DDLEvents` event hooks. + +* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which + should be used with any expression class that represents a "standalone" + SQL statement that can be passed directly to an ``execute()`` method. It + is already implicit within ``DDLElement`` and ``FunctionElement``. + +Most of the above constructs also respond to SQL statement caching. A +subclassed construct will want to define the caching behavior for the object, +which usually means setting the flag ``inherit_cache`` to the value of +``False`` or ``True``. See the next section :ref:`compilerext_caching` +for background. + + +.. _compilerext_caching: + +Enabling Caching Support for Custom Constructs +============================================== + +SQLAlchemy as of version 1.4 includes a +:ref:`SQL compilation caching facility ` which will allow +equivalent SQL constructs to cache their stringified form, along with other +structural information used to fetch results from the statement. + +For reasons discussed at :ref:`caching_caveats`, the implementation of this +caching system takes a conservative approach towards including custom SQL +constructs and/or subclasses within the caching system. This includes that +any user-defined SQL constructs, including all the examples for this +extension, will not participate in caching by default unless they positively +assert that they are able to do so. The :attr:`.HasCacheKey.inherit_cache` +attribute when set to ``True`` at the class level of a specific subclass +will indicate that instances of this class may be safely cached, using the +cache key generation scheme of the immediate superclass. This applies +for example to the "synopsis" example indicated previously:: + + class MyColumn(ColumnClause): + inherit_cache = True + + @compiles(MyColumn) + def compile_mycolumn(element, compiler, **kw): + return "[%s]" % element.name + +Above, the ``MyColumn`` class does not include any new state that +affects its SQL compilation; the cache key of ``MyColumn`` instances will +make use of that of the ``ColumnClause`` superclass, meaning it will take +into account the class of the object (``MyColumn``), the string name and +datatype of the object:: + + >>> MyColumn("some_name", String())._generate_cache_key() + CacheKey( + key=('0', , + 'name', 'some_name', + 'type', (, + ('length', None), ('collation', None)) + ), bindparams=[]) + +For objects that are likely to be **used liberally as components within many +larger statements**, such as :class:`_schema.Column` subclasses and custom SQL +datatypes, it's important that **caching be enabled as much as possible**, as +this may otherwise negatively affect performance. + +An example of an object that **does** contain state which affects its SQL +compilation is the one illustrated at :ref:`compilerext_compiling_subelements`; +this is an "INSERT FROM SELECT" construct that combines together a +:class:`_schema.Table` as well as a :class:`_sql.Select` construct, each of +which independently affect the SQL string generation of the construct. For +this class, the example illustrates that it simply does not participate in +caching:: + + class InsertFromSelect(Executable, ClauseElement): + inherit_cache = False + + def __init__(self, table, select): + self.table = table + self.select = select + + @compiles(InsertFromSelect) + def visit_insert_from_select(element, compiler, **kw): + return "INSERT INTO %s (%s)" % ( + compiler.process(element.table, asfrom=True, **kw), + compiler.process(element.select, **kw) + ) + +While it is also possible that the above ``InsertFromSelect`` could be made to +produce a cache key that is composed of that of the :class:`_schema.Table` and +:class:`_sql.Select` components together, the API for this is not at the moment +fully public. However, for an "INSERT FROM SELECT" construct, which is only +used by itself for specific operations, caching is not as critical as in the +previous example. + +For objects that are **used in relative isolation and are generally +standalone**, such as custom :term:`DML` constructs like an "INSERT FROM +SELECT", **caching is generally less critical** as the lack of caching for such +a construct will have only localized implications for that specific operation. + + +Further Examples +================ + +"UTC timestamp" function +------------------------- + +A function that works like "CURRENT_TIMESTAMP" except applies the +appropriate conversions so that the time is in UTC time. Timestamps are best +stored in relational databases as UTC, without time zones. UTC so that your +database doesn't think time has gone backwards in the hour when daylight +savings ends, without timezones because timezones are like character +encodings - they're best applied only at the endpoints of an application +(i.e. convert to UTC upon user input, re-apply desired timezone upon display). + +For PostgreSQL and Microsoft SQL Server:: + + from sqlalchemy.sql import expression + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.types import DateTime + + class utcnow(expression.FunctionElement): + type = DateTime() + inherit_cache = True + + @compiles(utcnow, 'postgresql') + def pg_utcnow(element, compiler, **kw): + return "TIMEZONE('utc', CURRENT_TIMESTAMP)" + + @compiles(utcnow, 'mssql') + def ms_utcnow(element, compiler, **kw): + return "GETUTCDATE()" + +Example usage:: + + from sqlalchemy import ( + Table, Column, Integer, String, DateTime, MetaData + ) + metadata = MetaData() + event = Table("event", metadata, + Column("id", Integer, primary_key=True), + Column("description", String(50), nullable=False), + Column("timestamp", DateTime, server_default=utcnow()) + ) + +"GREATEST" function +------------------- + +The "GREATEST" function is given any number of arguments and returns the one +that is of the highest value - its equivalent to Python's ``max`` +function. A SQL standard version versus a CASE based version which only +accommodates two arguments:: + + from sqlalchemy.sql import expression, case + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.types import Numeric + + class greatest(expression.FunctionElement): + type = Numeric() + name = 'greatest' + inherit_cache = True + + @compiles(greatest) + def default_greatest(element, compiler, **kw): + return compiler.visit_function(element) + + @compiles(greatest, 'sqlite') + @compiles(greatest, 'mssql') + @compiles(greatest, 'oracle') + def case_greatest(element, compiler, **kw): + arg1, arg2 = list(element.clauses) + return compiler.process(case((arg1 > arg2, arg1), else_=arg2), **kw) + +Example usage:: + + Session.query(Account).\ + filter( + greatest( + Account.checking_balance, + Account.savings_balance) > 10000 + ) + +"false" expression +------------------ + +Render a "false" constant expression, rendering as "0" on platforms that +don't have a "false" constant:: + + from sqlalchemy.sql import expression + from sqlalchemy.ext.compiler import compiles + + class sql_false(expression.ColumnElement): + inherit_cache = True + + @compiles(sql_false) + def default_false(element, compiler, **kw): + return "false" + + @compiles(sql_false, 'mssql') + @compiles(sql_false, 'mysql') + @compiles(sql_false, 'oracle') + def int_false(element, compiler, **kw): + return "0" + +Example usage:: + + from sqlalchemy import select, union_all + + exp = union_all( + select(users.c.name, sql_false().label("enrolled")), + select(customers.c.name, customers.c.enrolled) + ) + +""" +from .. import exc +from ..sql import sqltypes + + +def compiles(class_, *specs): + """Register a function as a compiler for a + given :class:`_expression.ClauseElement` type.""" + + def decorate(fn): + # get an existing @compiles handler + existing = class_.__dict__.get("_compiler_dispatcher", None) + + # get the original handler. All ClauseElement classes have one + # of these, but some TypeEngine classes will not. + existing_dispatch = getattr(class_, "_compiler_dispatch", None) + + if not existing: + existing = _dispatcher() + + if existing_dispatch: + + def _wrap_existing_dispatch(element, compiler, **kw): + try: + return existing_dispatch(element, compiler, **kw) + except exc.UnsupportedCompilationError as uce: + raise exc.UnsupportedCompilationError( + compiler, + type(element), + message="%s construct has no default " + "compilation handler." % type(element), + ) from uce + + existing.specs["default"] = _wrap_existing_dispatch + + # TODO: why is the lambda needed ? + setattr( + class_, + "_compiler_dispatch", + lambda *arg, **kw: existing(*arg, **kw), + ) + setattr(class_, "_compiler_dispatcher", existing) + + if specs: + for s in specs: + existing.specs[s] = fn + + else: + existing.specs["default"] = fn + return fn + + return decorate + + +def deregister(class_): + """Remove all custom compilers associated with a given + :class:`_expression.ClauseElement` type. + + """ + + if hasattr(class_, "_compiler_dispatcher"): + class_._compiler_dispatch = class_._original_compiler_dispatch + del class_._compiler_dispatcher + + +class _dispatcher: + def __init__(self): + self.specs = {} + + def __call__(self, element, compiler, **kw): + # TODO: yes, this could also switch off of DBAPI in use. + fn = self.specs.get(compiler.dialect.name, None) + if not fn: + try: + fn = self.specs["default"] + except KeyError as ke: + raise exc.UnsupportedCompilationError( + compiler, + type(element), + message="%s construct has no default " + "compilation handler." % type(element), + ) from ke + + # if compilation includes add_to_result_map, collect add_to_result_map + # arguments from the user-defined callable, which are probably none + # because this is not public API. if it wasn't called, then call it + # ourselves. + arm = kw.get("add_to_result_map", None) + if arm: + arm_collection = [] + kw["add_to_result_map"] = lambda *args: arm_collection.append(args) + + expr = fn(element, compiler, **kw) + + if arm: + if not arm_collection: + arm_collection.append( + (None, None, (element,), sqltypes.NULLTYPE) + ) + for tup in arm_collection: + arm(*tup) + return expr diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__init__.py new file mode 100644 index 00000000..37da4037 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__init__.py @@ -0,0 +1,65 @@ +# ext/declarative/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from .extensions import AbstractConcreteBase +from .extensions import ConcreteBase +from .extensions import DeferredReflection +from ... import util +from ...orm.decl_api import as_declarative as _as_declarative +from ...orm.decl_api import declarative_base as _declarative_base +from ...orm.decl_api import DeclarativeMeta +from ...orm.decl_api import declared_attr +from ...orm.decl_api import has_inherited_table as _has_inherited_table +from ...orm.decl_api import synonym_for as _synonym_for + + +@util.moved_20( + "The ``declarative_base()`` function is now available as " + ":func:`sqlalchemy.orm.declarative_base`." +) +def declarative_base(*arg, **kw): + return _declarative_base(*arg, **kw) + + +@util.moved_20( + "The ``as_declarative()`` function is now available as " + ":func:`sqlalchemy.orm.as_declarative`" +) +def as_declarative(*arg, **kw): + return _as_declarative(*arg, **kw) + + +@util.moved_20( + "The ``has_inherited_table()`` function is now available as " + ":func:`sqlalchemy.orm.has_inherited_table`." +) +def has_inherited_table(*arg, **kw): + return _has_inherited_table(*arg, **kw) + + +@util.moved_20( + "The ``synonym_for()`` function is now available as " + ":func:`sqlalchemy.orm.synonym_for`" +) +def synonym_for(*arg, **kw): + return _synonym_for(*arg, **kw) + + +__all__ = [ + "declarative_base", + "synonym_for", + "has_inherited_table", + "instrument_declarative", + "declared_attr", + "as_declarative", + "ConcreteBase", + "AbstractConcreteBase", + "DeclarativeMeta", + "DeferredReflection", +] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..4842283e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc new file mode 100644 index 00000000..f615f5ae Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/extensions.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/extensions.py new file mode 100644 index 00000000..c0f7e340 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/declarative/extensions.py @@ -0,0 +1,548 @@ +# ext/declarative/extensions.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""Public API functions and helpers for declarative.""" +from __future__ import annotations + +import collections +import contextlib +from typing import Any +from typing import Callable +from typing import TYPE_CHECKING +from typing import Union + +from ... import exc as sa_exc +from ...engine import Connection +from ...engine import Engine +from ...orm import exc as orm_exc +from ...orm import relationships +from ...orm.base import _mapper_or_none +from ...orm.clsregistry import _resolver +from ...orm.decl_base import _DeferredMapperConfig +from ...orm.util import polymorphic_union +from ...schema import Table +from ...util import OrderedDict + +if TYPE_CHECKING: + from ...sql.schema import MetaData + + +class ConcreteBase: + """A helper class for 'concrete' declarative mappings. + + :class:`.ConcreteBase` will use the :func:`.polymorphic_union` + function automatically, against all tables mapped as a subclass + to this class. The function is called via the + ``__declare_last__()`` function, which is essentially + a hook for the :meth:`.after_configured` event. + + :class:`.ConcreteBase` produces a mapped + table for the class itself. Compare to :class:`.AbstractConcreteBase`, + which does not. + + Example:: + + from sqlalchemy.ext.declarative import ConcreteBase + + class Employee(ConcreteBase, Base): + __tablename__ = 'employee' + employee_id = Column(Integer, primary_key=True) + name = Column(String(50)) + __mapper_args__ = { + 'polymorphic_identity':'employee', + 'concrete':True} + + class Manager(Employee): + __tablename__ = 'manager' + employee_id = Column(Integer, primary_key=True) + name = Column(String(50)) + manager_data = Column(String(40)) + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + + The name of the discriminator column used by :func:`.polymorphic_union` + defaults to the name ``type``. To suit the use case of a mapping where an + actual column in a mapped table is already named ``type``, the + discriminator name can be configured by setting the + ``_concrete_discriminator_name`` attribute:: + + class Employee(ConcreteBase, Base): + _concrete_discriminator_name = '_concrete_discriminator' + + .. versionadded:: 1.3.19 Added the ``_concrete_discriminator_name`` + attribute to :class:`_declarative.ConcreteBase` so that the + virtual discriminator column name can be customized. + + .. versionchanged:: 1.4.2 The ``_concrete_discriminator_name`` attribute + need only be placed on the basemost class to take correct effect for + all subclasses. An explicit error message is now raised if the + mapped column names conflict with the discriminator name, whereas + in the 1.3.x series there would be some warnings and then a non-useful + query would be generated. + + .. seealso:: + + :class:`.AbstractConcreteBase` + + :ref:`concrete_inheritance` + + + """ + + @classmethod + def _create_polymorphic_union(cls, mappers, discriminator_name): + return polymorphic_union( + OrderedDict( + (mp.polymorphic_identity, mp.local_table) for mp in mappers + ), + discriminator_name, + "pjoin", + ) + + @classmethod + def __declare_first__(cls): + m = cls.__mapper__ + if m.with_polymorphic: + return + + discriminator_name = ( + getattr(cls, "_concrete_discriminator_name", None) or "type" + ) + + mappers = list(m.self_and_descendants) + pjoin = cls._create_polymorphic_union(mappers, discriminator_name) + m._set_with_polymorphic(("*", pjoin)) + m._set_polymorphic_on(pjoin.c[discriminator_name]) + + +class AbstractConcreteBase(ConcreteBase): + """A helper class for 'concrete' declarative mappings. + + :class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union` + function automatically, against all tables mapped as a subclass + to this class. The function is called via the + ``__declare_first__()`` function, which is essentially + a hook for the :meth:`.before_configured` event. + + :class:`.AbstractConcreteBase` applies :class:`_orm.Mapper` for its + immediately inheriting class, as would occur for any other + declarative mapped class. However, the :class:`_orm.Mapper` is not + mapped to any particular :class:`.Table` object. Instead, it's + mapped directly to the "polymorphic" selectable produced by + :func:`.polymorphic_union`, and performs no persistence operations on its + own. Compare to :class:`.ConcreteBase`, which maps its + immediately inheriting class to an actual + :class:`.Table` that stores rows directly. + + .. note:: + + The :class:`.AbstractConcreteBase` delays the mapper creation of the + base class until all the subclasses have been defined, + as it needs to create a mapping against a selectable that will include + all subclass tables. In order to achieve this, it waits for the + **mapper configuration event** to occur, at which point it scans + through all the configured subclasses and sets up a mapping that will + query against all subclasses at once. + + While this event is normally invoked automatically, in the case of + :class:`.AbstractConcreteBase`, it may be necessary to invoke it + explicitly after **all** subclass mappings are defined, if the first + operation is to be a query against this base class. To do so, once all + the desired classes have been configured, the + :meth:`_orm.registry.configure` method on the :class:`_orm.registry` + in use can be invoked, which is available in relation to a particular + declarative base class:: + + Base.registry.configure() + + Example:: + + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.ext.declarative import AbstractConcreteBase + + class Base(DeclarativeBase): + pass + + class Employee(AbstractConcreteBase, Base): + pass + + class Manager(Employee): + __tablename__ = 'manager' + employee_id = Column(Integer, primary_key=True) + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True + } + + Base.registry.configure() + + The abstract base class is handled by declarative in a special way; + at class configuration time, it behaves like a declarative mixin + or an ``__abstract__`` base class. Once classes are configured + and mappings are produced, it then gets mapped itself, but + after all of its descendants. This is a very unique system of mapping + not found in any other SQLAlchemy API feature. + + Using this approach, we can specify columns and properties + that will take place on mapped subclasses, in the way that + we normally do as in :ref:`declarative_mixins`:: + + from sqlalchemy.ext.declarative import AbstractConcreteBase + + class Company(Base): + __tablename__ = 'company' + id = Column(Integer, primary_key=True) + + class Employee(AbstractConcreteBase, Base): + strict_attrs = True + + employee_id = Column(Integer, primary_key=True) + + @declared_attr + def company_id(cls): + return Column(ForeignKey('company.id')) + + @declared_attr + def company(cls): + return relationship("Company") + + class Manager(Employee): + __tablename__ = 'manager' + + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True + } + + Base.registry.configure() + + When we make use of our mappings however, both ``Manager`` and + ``Employee`` will have an independently usable ``.company`` attribute:: + + session.execute( + select(Employee).filter(Employee.company.has(id=5)) + ) + + :param strict_attrs: when specified on the base class, "strict" attribute + mode is enabled which attempts to limit ORM mapped attributes on the + base class to only those that are immediately present, while still + preserving "polymorphic" loading behavior. + + .. versionadded:: 2.0 + + .. seealso:: + + :class:`.ConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`abstract_concrete_base` + + """ + + __no_table__ = True + + @classmethod + def __declare_first__(cls): + cls._sa_decl_prepare_nocascade() + + @classmethod + def _sa_decl_prepare_nocascade(cls): + if getattr(cls, "__mapper__", None): + return + + to_map = _DeferredMapperConfig.config_for_cls(cls) + + # can't rely on 'self_and_descendants' here + # since technically an immediate subclass + # might not be mapped, but a subclass + # may be. + mappers = [] + stack = list(cls.__subclasses__()) + while stack: + klass = stack.pop() + stack.extend(klass.__subclasses__()) + mn = _mapper_or_none(klass) + if mn is not None: + mappers.append(mn) + + discriminator_name = ( + getattr(cls, "_concrete_discriminator_name", None) or "type" + ) + pjoin = cls._create_polymorphic_union(mappers, discriminator_name) + + # For columns that were declared on the class, these + # are normally ignored with the "__no_table__" mapping, + # unless they have a different attribute key vs. col name + # and are in the properties argument. + # In that case, ensure we update the properties entry + # to the correct column from the pjoin target table. + declared_cols = set(to_map.declared_columns) + declared_col_keys = {c.key for c in declared_cols} + for k, v in list(to_map.properties.items()): + if v in declared_cols: + to_map.properties[k] = pjoin.c[v.key] + declared_col_keys.remove(v.key) + + to_map.local_table = pjoin + + strict_attrs = cls.__dict__.get("strict_attrs", False) + + m_args = to_map.mapper_args_fn or dict + + def mapper_args(): + args = m_args() + args["polymorphic_on"] = pjoin.c[discriminator_name] + args["polymorphic_abstract"] = True + if strict_attrs: + args["include_properties"] = ( + set(pjoin.primary_key) + | declared_col_keys + | {discriminator_name} + ) + args["with_polymorphic"] = ("*", pjoin) + return args + + to_map.mapper_args_fn = mapper_args + + to_map.map() + + stack = [cls] + while stack: + scls = stack.pop(0) + stack.extend(scls.__subclasses__()) + sm = _mapper_or_none(scls) + if sm and sm.concrete and sm.inherits is None: + for sup_ in scls.__mro__[1:]: + sup_sm = _mapper_or_none(sup_) + if sup_sm: + sm._set_concrete_base(sup_sm) + break + + @classmethod + def _sa_raise_deferred_config(cls): + raise orm_exc.UnmappedClassError( + cls, + msg="Class %s is a subclass of AbstractConcreteBase and " + "has a mapping pending until all subclasses are defined. " + "Call the sqlalchemy.orm.configure_mappers() function after " + "all subclasses have been defined to " + "complete the mapping of this class." + % orm_exc._safe_cls_name(cls), + ) + + +class DeferredReflection: + """A helper class for construction of mappings based on + a deferred reflection step. + + Normally, declarative can be used with reflection by + setting a :class:`_schema.Table` object using autoload_with=engine + as the ``__table__`` attribute on a declarative class. + The caveat is that the :class:`_schema.Table` must be fully + reflected, or at the very least have a primary key column, + at the point at which a normal declarative mapping is + constructed, meaning the :class:`_engine.Engine` must be available + at class declaration time. + + The :class:`.DeferredReflection` mixin moves the construction + of mappers to be at a later point, after a specific + method is called which first reflects all :class:`_schema.Table` + objects created so far. Classes can define it as such:: + + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import DeferredReflection + Base = declarative_base() + + class MyClass(DeferredReflection, Base): + __tablename__ = 'mytable' + + Above, ``MyClass`` is not yet mapped. After a series of + classes have been defined in the above fashion, all tables + can be reflected and mappings created using + :meth:`.prepare`:: + + engine = create_engine("someengine://...") + DeferredReflection.prepare(engine) + + The :class:`.DeferredReflection` mixin can be applied to individual + classes, used as the base for the declarative base itself, + or used in a custom abstract class. Using an abstract base + allows that only a subset of classes to be prepared for a + particular prepare step, which is necessary for applications + that use more than one engine. For example, if an application + has two engines, you might use two bases, and prepare each + separately, e.g.:: + + class ReflectedOne(DeferredReflection, Base): + __abstract__ = True + + class ReflectedTwo(DeferredReflection, Base): + __abstract__ = True + + class MyClass(ReflectedOne): + __tablename__ = 'mytable' + + class MyOtherClass(ReflectedOne): + __tablename__ = 'myothertable' + + class YetAnotherClass(ReflectedTwo): + __tablename__ = 'yetanothertable' + + # ... etc. + + Above, the class hierarchies for ``ReflectedOne`` and + ``ReflectedTwo`` can be configured separately:: + + ReflectedOne.prepare(engine_one) + ReflectedTwo.prepare(engine_two) + + .. seealso:: + + :ref:`orm_declarative_reflected_deferred_reflection` - in the + :ref:`orm_declarative_table_config_toplevel` section. + + """ + + @classmethod + def prepare( + cls, bind: Union[Engine, Connection], **reflect_kw: Any + ) -> None: + r"""Reflect all :class:`_schema.Table` objects for all current + :class:`.DeferredReflection` subclasses + + :param bind: :class:`_engine.Engine` or :class:`_engine.Connection` + instance + + ..versionchanged:: 2.0.16 a :class:`_engine.Connection` is also + accepted. + + :param \**reflect_kw: additional keyword arguments passed to + :meth:`_schema.MetaData.reflect`, such as + :paramref:`_schema.MetaData.reflect.views`. + + .. versionadded:: 2.0.16 + + """ + + to_map = _DeferredMapperConfig.classes_for_base(cls) + + metadata_to_table = collections.defaultdict(set) + + # first collect the primary __table__ for each class into a + # collection of metadata/schemaname -> table names + for thingy in to_map: + if thingy.local_table is not None: + metadata_to_table[ + (thingy.local_table.metadata, thingy.local_table.schema) + ].add(thingy.local_table.name) + + # then reflect all those tables into their metadatas + + if isinstance(bind, Connection): + conn = bind + ctx = contextlib.nullcontext(enter_result=conn) + elif isinstance(bind, Engine): + ctx = bind.connect() + else: + raise sa_exc.ArgumentError( + f"Expected Engine or Connection, got {bind!r}" + ) + + with ctx as conn: + for (metadata, schema), table_names in metadata_to_table.items(): + metadata.reflect( + conn, + only=table_names, + schema=schema, + extend_existing=True, + autoload_replace=False, + **reflect_kw, + ) + + metadata_to_table.clear() + + # .map() each class, then go through relationships and look + # for secondary + for thingy in to_map: + thingy.map() + + mapper = thingy.cls.__mapper__ + metadata = mapper.class_.metadata + + for rel in mapper._props.values(): + if ( + isinstance(rel, relationships.RelationshipProperty) + and rel._init_args.secondary._is_populated() + ): + secondary_arg = rel._init_args.secondary + + if isinstance(secondary_arg.argument, Table): + secondary_table = secondary_arg.argument + metadata_to_table[ + ( + secondary_table.metadata, + secondary_table.schema, + ) + ].add(secondary_table.name) + elif isinstance(secondary_arg.argument, str): + _, resolve_arg = _resolver(rel.parent.class_, rel) + + resolver = resolve_arg( + secondary_arg.argument, True + ) + metadata_to_table[ + (metadata, thingy.local_table.schema) + ].add(secondary_arg.argument) + + resolver._resolvers += ( + cls._sa_deferred_table_resolver(metadata), + ) + + secondary_arg.argument = resolver() + + for (metadata, schema), table_names in metadata_to_table.items(): + metadata.reflect( + conn, + only=table_names, + schema=schema, + extend_existing=True, + autoload_replace=False, + ) + + @classmethod + def _sa_deferred_table_resolver( + cls, metadata: MetaData + ) -> Callable[[str], Table]: + def _resolve(key: str) -> Table: + # reflection has already occurred so this Table would have + # its contents already + return Table(key, metadata) + + return _resolve + + _sa_decl_prepare = True + + @classmethod + def _sa_raise_deferred_config(cls): + raise orm_exc.UnmappedClassError( + cls, + msg="Class %s is a subclass of DeferredReflection. " + "Mappings are not produced until the .prepare() " + "method is called on the class hierarchy." + % orm_exc._safe_cls_name(cls), + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/horizontal_shard.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/horizontal_shard.py new file mode 100644 index 00000000..d8ee819f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/horizontal_shard.py @@ -0,0 +1,481 @@ +# ext/horizontal_shard.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Horizontal sharding support. + +Defines a rudimental 'horizontal sharding' system which allows a Session to +distribute queries and persistence operations across multiple databases. + +For a usage example, see the :ref:`examples_sharding` example included in +the source distribution. + +.. deepalchemy:: The horizontal sharding extension is an advanced feature, + involving a complex statement -> database interaction as well as + use of semi-public APIs for non-trivial cases. Simpler approaches to + refering to multiple database "shards", most commonly using a distinct + :class:`_orm.Session` per "shard", should always be considered first + before using this more complex and less-production-tested system. + + + +""" +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .. import event +from .. import exc +from .. import inspect +from .. import util +from ..orm import PassiveFlag +from ..orm._typing import OrmExecuteOptionsParameter +from ..orm.interfaces import ORMOption +from ..orm.mapper import Mapper +from ..orm.query import Query +from ..orm.session import _BindArguments +from ..orm.session import _PKIdentityArgument +from ..orm.session import Session +from ..util.typing import Protocol +from ..util.typing import Self + +if TYPE_CHECKING: + from ..engine.base import Connection + from ..engine.base import Engine + from ..engine.base import OptionEngine + from ..engine.result import IteratorResult + from ..engine.result import Result + from ..orm import LoaderCallableStatus + from ..orm._typing import _O + from ..orm.bulk_persistence import BulkUDCompileState + from ..orm.context import QueryContext + from ..orm.session import _EntityBindKey + from ..orm.session import _SessionBind + from ..orm.session import ORMExecuteState + from ..orm.state import InstanceState + from ..sql import Executable + from ..sql._typing import _TP + from ..sql.elements import ClauseElement + +__all__ = ["ShardedSession", "ShardedQuery"] + +_T = TypeVar("_T", bound=Any) + + +ShardIdentifier = str + + +class ShardChooser(Protocol): + def __call__( + self, + mapper: Optional[Mapper[_T]], + instance: Any, + clause: Optional[ClauseElement], + ) -> Any: ... + + +class IdentityChooser(Protocol): + def __call__( + self, + mapper: Mapper[_T], + primary_key: _PKIdentityArgument, + *, + lazy_loaded_from: Optional[InstanceState[Any]], + execution_options: OrmExecuteOptionsParameter, + bind_arguments: _BindArguments, + **kw: Any, + ) -> Any: ... + + +class ShardedQuery(Query[_T]): + """Query class used with :class:`.ShardedSession`. + + .. legacy:: The :class:`.ShardedQuery` is a subclass of the legacy + :class:`.Query` class. The :class:`.ShardedSession` now supports + 2.0 style execution via the :meth:`.ShardedSession.execute` method. + + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + assert isinstance(self.session, ShardedSession) + + self.identity_chooser = self.session.identity_chooser + self.execute_chooser = self.session.execute_chooser + self._shard_id = None + + def set_shard(self, shard_id: ShardIdentifier) -> Self: + """Return a new query, limited to a single shard ID. + + All subsequent operations with the returned query will + be against the single shard regardless of other state. + + The shard_id can be passed for a 2.0 style execution to the + bind_arguments dictionary of :meth:`.Session.execute`:: + + results = session.execute( + stmt, + bind_arguments={"shard_id": "my_shard"} + ) + + """ + return self.execution_options(_sa_shard_id=shard_id) + + +class ShardedSession(Session): + shard_chooser: ShardChooser + identity_chooser: IdentityChooser + execute_chooser: Callable[[ORMExecuteState], Iterable[Any]] + + def __init__( + self, + shard_chooser: ShardChooser, + identity_chooser: Optional[IdentityChooser] = None, + execute_chooser: Optional[ + Callable[[ORMExecuteState], Iterable[Any]] + ] = None, + shards: Optional[Dict[str, Any]] = None, + query_cls: Type[Query[_T]] = ShardedQuery, + *, + id_chooser: Optional[ + Callable[[Query[_T], Iterable[_T]], Iterable[Any]] + ] = None, + query_chooser: Optional[Callable[[Executable], Iterable[Any]]] = None, + **kwargs: Any, + ) -> None: + """Construct a ShardedSession. + + :param shard_chooser: A callable which, passed a Mapper, a mapped + instance, and possibly a SQL clause, returns a shard ID. This id + may be based off of the attributes present within the object, or on + some round-robin scheme. If the scheme is based on a selection, it + should set whatever state on the instance to mark it in the future as + participating in that shard. + + :param identity_chooser: A callable, passed a Mapper and primary key + argument, which should return a list of shard ids where this + primary key might reside. + + .. versionchanged:: 2.0 The ``identity_chooser`` parameter + supersedes the ``id_chooser`` parameter. + + :param execute_chooser: For a given :class:`.ORMExecuteState`, + returns the list of shard_ids + where the query should be issued. Results from all shards returned + will be combined together into a single listing. + + .. versionchanged:: 1.4 The ``execute_chooser`` parameter + supersedes the ``query_chooser`` parameter. + + :param shards: A dictionary of string shard names + to :class:`~sqlalchemy.engine.Engine` objects. + + """ + super().__init__(query_cls=query_cls, **kwargs) + + event.listen( + self, "do_orm_execute", execute_and_instances, retval=True + ) + self.shard_chooser = shard_chooser + + if id_chooser: + _id_chooser = id_chooser + util.warn_deprecated( + "The ``id_chooser`` parameter is deprecated; " + "please use ``identity_chooser``.", + "2.0", + ) + + def _legacy_identity_chooser( + mapper: Mapper[_T], + primary_key: _PKIdentityArgument, + *, + lazy_loaded_from: Optional[InstanceState[Any]], + execution_options: OrmExecuteOptionsParameter, + bind_arguments: _BindArguments, + **kw: Any, + ) -> Any: + q = self.query(mapper) + if lazy_loaded_from: + q = q._set_lazyload_from(lazy_loaded_from) + return _id_chooser(q, primary_key) + + self.identity_chooser = _legacy_identity_chooser + elif identity_chooser: + self.identity_chooser = identity_chooser + else: + raise exc.ArgumentError( + "identity_chooser or id_chooser is required" + ) + + if query_chooser: + _query_chooser = query_chooser + util.warn_deprecated( + "The ``query_chooser`` parameter is deprecated; " + "please use ``execute_chooser``.", + "1.4", + ) + if execute_chooser: + raise exc.ArgumentError( + "Can't pass query_chooser and execute_chooser " + "at the same time." + ) + + def _default_execute_chooser( + orm_context: ORMExecuteState, + ) -> Iterable[Any]: + return _query_chooser(orm_context.statement) + + if execute_chooser is None: + execute_chooser = _default_execute_chooser + + if execute_chooser is None: + raise exc.ArgumentError( + "execute_chooser or query_chooser is required" + ) + self.execute_chooser = execute_chooser + self.__shards: Dict[ShardIdentifier, _SessionBind] = {} + if shards is not None: + for k in shards: + self.bind_shard(k, shards[k]) + + def _identity_lookup( + self, + mapper: Mapper[_O], + primary_key_identity: Union[Any, Tuple[Any, ...]], + identity_token: Optional[Any] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + lazy_loaded_from: Optional[InstanceState[Any]] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Union[Optional[_O], LoaderCallableStatus]: + """override the default :meth:`.Session._identity_lookup` method so + that we search for a given non-token primary key identity across all + possible identity tokens (e.g. shard ids). + + .. versionchanged:: 1.4 Moved :meth:`.Session._identity_lookup` from + the :class:`_query.Query` object to the :class:`.Session`. + + """ + + if identity_token is not None: + obj = super()._identity_lookup( + mapper, + primary_key_identity, + identity_token=identity_token, + **kw, + ) + + return obj + else: + for shard_id in self.identity_chooser( + mapper, + primary_key_identity, + lazy_loaded_from=lazy_loaded_from, + execution_options=execution_options, + bind_arguments=dict(bind_arguments) if bind_arguments else {}, + ): + obj2 = super()._identity_lookup( + mapper, + primary_key_identity, + identity_token=shard_id, + lazy_loaded_from=lazy_loaded_from, + **kw, + ) + if obj2 is not None: + return obj2 + + return None + + def _choose_shard_and_assign( + self, + mapper: Optional[_EntityBindKey[_O]], + instance: Any, + **kw: Any, + ) -> Any: + if instance is not None: + state = inspect(instance) + if state.key: + token = state.key[2] + assert token is not None + return token + elif state.identity_token: + return state.identity_token + + assert isinstance(mapper, Mapper) + shard_id = self.shard_chooser(mapper, instance, **kw) + if instance is not None: + state.identity_token = shard_id + return shard_id + + def connection_callable( # type: ignore [override] + self, + mapper: Optional[Mapper[_T]] = None, + instance: Optional[Any] = None, + shard_id: Optional[ShardIdentifier] = None, + **kw: Any, + ) -> Connection: + """Provide a :class:`_engine.Connection` to use in the unit of work + flush process. + + """ + + if shard_id is None: + shard_id = self._choose_shard_and_assign(mapper, instance) + + if self.in_transaction(): + trans = self.get_transaction() + assert trans is not None + return trans.connection(mapper, shard_id=shard_id) + else: + bind = self.get_bind( + mapper=mapper, shard_id=shard_id, instance=instance + ) + + if isinstance(bind, Engine): + return bind.connect(**kw) + else: + assert isinstance(bind, Connection) + return bind + + def get_bind( + self, + mapper: Optional[_EntityBindKey[_O]] = None, + *, + shard_id: Optional[ShardIdentifier] = None, + instance: Optional[Any] = None, + clause: Optional[ClauseElement] = None, + **kw: Any, + ) -> _SessionBind: + if shard_id is None: + shard_id = self._choose_shard_and_assign( + mapper, instance=instance, clause=clause + ) + assert shard_id is not None + return self.__shards[shard_id] + + def bind_shard( + self, shard_id: ShardIdentifier, bind: Union[Engine, OptionEngine] + ) -> None: + self.__shards[shard_id] = bind + + +class set_shard_id(ORMOption): + """a loader option for statements to apply a specific shard id to the + primary query as well as for additional relationship and column + loaders. + + The :class:`_horizontal.set_shard_id` option may be applied using + the :meth:`_sql.Executable.options` method of any executable statement:: + + stmt = ( + select(MyObject). + where(MyObject.name == 'some name'). + options(set_shard_id("shard1")) + ) + + Above, the statement when invoked will limit to the "shard1" shard + identifier for the primary query as well as for all relationship and + column loading strategies, including eager loaders such as + :func:`_orm.selectinload`, deferred column loaders like :func:`_orm.defer`, + and the lazy relationship loader :func:`_orm.lazyload`. + + In this way, the :class:`_horizontal.set_shard_id` option has much wider + scope than using the "shard_id" argument within the + :paramref:`_orm.Session.execute.bind_arguments` dictionary. + + + .. versionadded:: 2.0.0 + + """ + + __slots__ = ("shard_id", "propagate_to_loaders") + + def __init__( + self, shard_id: ShardIdentifier, propagate_to_loaders: bool = True + ): + """Construct a :class:`_horizontal.set_shard_id` option. + + :param shard_id: shard identifier + :param propagate_to_loaders: if left at its default of ``True``, the + shard option will take place for lazy loaders such as + :func:`_orm.lazyload` and :func:`_orm.defer`; if False, the option + will not be propagated to loaded objects. Note that :func:`_orm.defer` + always limits to the shard_id of the parent row in any case, so the + parameter only has a net effect on the behavior of the + :func:`_orm.lazyload` strategy. + + """ + self.shard_id = shard_id + self.propagate_to_loaders = propagate_to_loaders + + +def execute_and_instances( + orm_context: ORMExecuteState, +) -> Union[Result[_T], IteratorResult[_TP]]: + active_options: Union[ + None, + QueryContext.default_load_options, + Type[QueryContext.default_load_options], + BulkUDCompileState.default_update_options, + Type[BulkUDCompileState.default_update_options], + ] + + if orm_context.is_select: + active_options = orm_context.load_options + + elif orm_context.is_update or orm_context.is_delete: + active_options = orm_context.update_delete_options + else: + active_options = None + + session = orm_context.session + assert isinstance(session, ShardedSession) + + def iter_for_shard( + shard_id: ShardIdentifier, + ) -> Union[Result[_T], IteratorResult[_TP]]: + bind_arguments = dict(orm_context.bind_arguments) + bind_arguments["shard_id"] = shard_id + + orm_context.update_execution_options(identity_token=shard_id) + return orm_context.invoke_statement(bind_arguments=bind_arguments) + + for orm_opt in orm_context._non_compile_orm_options: + # TODO: if we had an ORMOption that gets applied at ORM statement + # execution time, that would allow this to be more generalized. + # for now just iterate and look for our options + if isinstance(orm_opt, set_shard_id): + shard_id = orm_opt.shard_id + break + else: + if active_options and active_options._identity_token is not None: + shard_id = active_options._identity_token + elif "_sa_shard_id" in orm_context.execution_options: + shard_id = orm_context.execution_options["_sa_shard_id"] + elif "shard_id" in orm_context.bind_arguments: + shard_id = orm_context.bind_arguments["shard_id"] + else: + shard_id = None + + if shard_id is not None: + return iter_for_shard(shard_id) + else: + partial = [] + for shard_id in session.execute_chooser(orm_context): + result_ = iter_for_shard(shard_id) + partial.append(result_) + return partial[0].merge(*partial[1:]) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/hybrid.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/hybrid.py new file mode 100644 index 00000000..ee8d6a78 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/hybrid.py @@ -0,0 +1,1514 @@ +# ext/hybrid.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +r"""Define attributes on ORM-mapped classes that have "hybrid" behavior. + +"hybrid" means the attribute has distinct behaviors defined at the +class level and at the instance level. + +The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of +method decorator and has minimal dependencies on the rest of SQLAlchemy. +Its basic theory of operation can work with any descriptor-based expression +system. + +Consider a mapping ``Interval``, representing integer ``start`` and ``end`` +values. We can define higher level functions on mapped classes that produce SQL +expressions at the class level, and Python expression evaluation at the +instance level. Below, each function decorated with :class:`.hybrid_method` or +:class:`.hybrid_property` may receive ``self`` as an instance of the class, or +may receive the class directly, depending on context:: + + from __future__ import annotations + + from sqlalchemy.ext.hybrid import hybrid_method + from sqlalchemy.ext.hybrid import hybrid_property + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + + + class Base(DeclarativeBase): + pass + + class Interval(Base): + __tablename__ = 'interval' + + id: Mapped[int] = mapped_column(primary_key=True) + start: Mapped[int] + end: Mapped[int] + + def __init__(self, start: int, end: int): + self.start = start + self.end = end + + @hybrid_property + def length(self) -> int: + return self.end - self.start + + @hybrid_method + def contains(self, point: int) -> bool: + return (self.start <= point) & (point <= self.end) + + @hybrid_method + def intersects(self, other: Interval) -> bool: + return self.contains(other.start) | self.contains(other.end) + + +Above, the ``length`` property returns the difference between the +``end`` and ``start`` attributes. With an instance of ``Interval``, +this subtraction occurs in Python, using normal Python descriptor +mechanics:: + + >>> i1 = Interval(5, 10) + >>> i1.length + 5 + +When dealing with the ``Interval`` class itself, the :class:`.hybrid_property` +descriptor evaluates the function body given the ``Interval`` class as +the argument, which when evaluated with SQLAlchemy expression mechanics +returns a new SQL expression: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import select + >>> print(select(Interval.length)) + {printsql}SELECT interval."end" - interval.start AS length + FROM interval{stop} + + + >>> print(select(Interval).filter(Interval.length > 10)) + {printsql}SELECT interval.id, interval.start, interval."end" + FROM interval + WHERE interval."end" - interval.start > :param_1 + +Filtering methods such as :meth:`.Select.filter_by` are supported +with hybrid attributes as well: + +.. sourcecode:: pycon+sql + + >>> print(select(Interval).filter_by(length=5)) + {printsql}SELECT interval.id, interval.start, interval."end" + FROM interval + WHERE interval."end" - interval.start = :param_1 + +The ``Interval`` class example also illustrates two methods, +``contains()`` and ``intersects()``, decorated with +:class:`.hybrid_method`. This decorator applies the same idea to +methods that :class:`.hybrid_property` applies to attributes. The +methods return boolean values, and take advantage of the Python ``|`` +and ``&`` bitwise operators to produce equivalent instance-level and +SQL expression-level boolean behavior: + +.. sourcecode:: pycon+sql + + >>> i1.contains(6) + True + >>> i1.contains(15) + False + >>> i1.intersects(Interval(7, 18)) + True + >>> i1.intersects(Interval(25, 29)) + False + + >>> print(select(Interval).filter(Interval.contains(15))) + {printsql}SELECT interval.id, interval.start, interval."end" + FROM interval + WHERE interval.start <= :start_1 AND interval."end" > :end_1{stop} + + >>> ia = aliased(Interval) + >>> print(select(Interval, ia).filter(Interval.intersects(ia))) + {printsql}SELECT interval.id, interval.start, + interval."end", interval_1.id AS interval_1_id, + interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end + FROM interval, interval AS interval_1 + WHERE interval.start <= interval_1.start + AND interval."end" > interval_1.start + OR interval.start <= interval_1."end" + AND interval."end" > interval_1."end"{stop} + +.. _hybrid_distinct_expression: + +Defining Expression Behavior Distinct from Attribute Behavior +-------------------------------------------------------------- + +In the previous section, our usage of the ``&`` and ``|`` bitwise operators +within the ``Interval.contains`` and ``Interval.intersects`` methods was +fortunate, considering our functions operated on two boolean values to return a +new one. In many cases, the construction of an in-Python function and a +SQLAlchemy SQL expression have enough differences that two separate Python +expressions should be defined. The :mod:`~sqlalchemy.ext.hybrid` decorator +defines a **modifier** :meth:`.hybrid_property.expression` for this purpose. As an +example we'll define the radius of the interval, which requires the usage of +the absolute value function:: + + from sqlalchemy import ColumnElement + from sqlalchemy import Float + from sqlalchemy import func + from sqlalchemy import type_coerce + + class Interval(Base): + # ... + + @hybrid_property + def radius(self) -> float: + return abs(self.length) / 2 + + @radius.inplace.expression + @classmethod + def _radius_expression(cls) -> ColumnElement[float]: + return type_coerce(func.abs(cls.length) / 2, Float) + +In the above example, the :class:`.hybrid_property` first assigned to the +name ``Interval.radius`` is amended by a subsequent method called +``Interval._radius_expression``, using the decorator +``@radius.inplace.expression``, which chains together two modifiers +:attr:`.hybrid_property.inplace` and :attr:`.hybrid_property.expression`. +The use of :attr:`.hybrid_property.inplace` indicates that the +:meth:`.hybrid_property.expression` modifier should mutate the +existing hybrid object at ``Interval.radius`` in place, without creating a +new object. Notes on this modifier and its +rationale are discussed in the next section :ref:`hybrid_pep484_naming`. +The use of ``@classmethod`` is optional, and is strictly to give typing +tools a hint that ``cls`` in this case is expected to be the ``Interval`` +class, and not an instance of ``Interval``. + +.. note:: :attr:`.hybrid_property.inplace` as well as the use of ``@classmethod`` + for proper typing support are available as of SQLAlchemy 2.0.4, and will + not work in earlier versions. + +With ``Interval.radius`` now including an expression element, the SQL +function ``ABS()`` is returned when accessing ``Interval.radius`` +at the class level: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import select + >>> print(select(Interval).filter(Interval.radius > 5)) + {printsql}SELECT interval.id, interval.start, interval."end" + FROM interval + WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1 + + +.. _hybrid_pep484_naming: + +Using ``inplace`` to create pep-484 compliant hybrid properties +--------------------------------------------------------------- + +In the previous section, a :class:`.hybrid_property` decorator is illustrated +which includes two separate method-level functions being decorated, both +to produce a single object attribute referenced as ``Interval.radius``. +There are actually several different modifiers we can use for +:class:`.hybrid_property` including :meth:`.hybrid_property.expression`, +:meth:`.hybrid_property.setter` and :meth:`.hybrid_property.update_expression`. + +SQLAlchemy's :class:`.hybrid_property` decorator intends that adding on these +methods may be done in the identical manner as Python's built-in +``@property`` decorator, where idiomatic use is to continue to redefine the +attribute repeatedly, using the **same attribute name** each time, as in the +example below that illustrates the use of :meth:`.hybrid_property.setter` and +:meth:`.hybrid_property.expression` for the ``Interval.radius`` descriptor:: + + # correct use, however is not accepted by pep-484 tooling + + class Interval(Base): + # ... + + @hybrid_property + def radius(self): + return abs(self.length) / 2 + + @radius.setter + def radius(self, value): + self.length = value * 2 + + @radius.expression + def radius(cls): + return type_coerce(func.abs(cls.length) / 2, Float) + +Above, there are three ``Interval.radius`` methods, but as each are decorated, +first by the :class:`.hybrid_property` decorator and then by the +``@radius`` name itself, the end effect is that ``Interval.radius`` is +a single attribute with three different functions contained within it. +This style of use is taken from `Python's documented use of @property +`_. +It is important to note that the way both ``@property`` as well as +:class:`.hybrid_property` work, a **copy of the descriptor is made each time**. +That is, each call to ``@radius.expression``, ``@radius.setter`` etc. +make a new object entirely. This allows the attribute to be re-defined in +subclasses without issue (see :ref:`hybrid_reuse_subclass` later in this +section for how this is used). + +However, the above approach is not compatible with typing tools such as +mypy and pyright. Python's own ``@property`` decorator does not have this +limitation only because +`these tools hardcode the behavior of @property +`_, meaning this syntax +is not available to SQLAlchemy under :pep:`484` compliance. + +In order to produce a reasonable syntax while remaining typing compliant, +the :attr:`.hybrid_property.inplace` decorator allows the same +decorator to be re-used with different method names, while still producing +a single decorator under one name:: + + # correct use which is also accepted by pep-484 tooling + + class Interval(Base): + # ... + + @hybrid_property + def radius(self) -> float: + return abs(self.length) / 2 + + @radius.inplace.setter + def _radius_setter(self, value: float) -> None: + # for example only + self.length = value * 2 + + @radius.inplace.expression + @classmethod + def _radius_expression(cls) -> ColumnElement[float]: + return type_coerce(func.abs(cls.length) / 2, Float) + +Using :attr:`.hybrid_property.inplace` further qualifies the use of the +decorator that a new copy should not be made, thereby maintaining the +``Interval.radius`` name while allowing additional methods +``Interval._radius_setter`` and ``Interval._radius_expression`` to be +differently named. + + +.. versionadded:: 2.0.4 Added :attr:`.hybrid_property.inplace` to allow + less verbose construction of composite :class:`.hybrid_property` objects + while not having to use repeated method names. Additionally allowed the + use of ``@classmethod`` within :attr:`.hybrid_property.expression`, + :attr:`.hybrid_property.update_expression`, and + :attr:`.hybrid_property.comparator` to allow typing tools to identify + ``cls`` as a class and not an instance in the method signature. + + +Defining Setters +---------------- + +The :meth:`.hybrid_property.setter` modifier allows the construction of a +custom setter method, that can modify values on the object:: + + class Interval(Base): + # ... + + @hybrid_property + def length(self) -> int: + return self.end - self.start + + @length.inplace.setter + def _length_setter(self, value: int) -> None: + self.end = self.start + value + +The ``length(self, value)`` method is now called upon set:: + + >>> i1 = Interval(5, 10) + >>> i1.length + 5 + >>> i1.length = 12 + >>> i1.end + 17 + +.. _hybrid_bulk_update: + +Allowing Bulk ORM Update +------------------------ + +A hybrid can define a custom "UPDATE" handler for when using +ORM-enabled updates, allowing the hybrid to be used in the +SET clause of the update. + +Normally, when using a hybrid with :func:`_sql.update`, the SQL +expression is used as the column that's the target of the SET. If our +``Interval`` class had a hybrid ``start_point`` that linked to +``Interval.start``, this could be substituted directly:: + + from sqlalchemy import update + stmt = update(Interval).values({Interval.start_point: 10}) + +However, when using a composite hybrid like ``Interval.length``, this +hybrid represents more than one column. We can set up a handler that will +accommodate a value passed in the VALUES expression which can affect +this, using the :meth:`.hybrid_property.update_expression` decorator. +A handler that works similarly to our setter would be:: + + from typing import List, Tuple, Any + + class Interval(Base): + # ... + + @hybrid_property + def length(self) -> int: + return self.end - self.start + + @length.inplace.setter + def _length_setter(self, value: int) -> None: + self.end = self.start + value + + @length.inplace.update_expression + def _length_update_expression(cls, value: Any) -> List[Tuple[Any, Any]]: + return [ + (cls.end, cls.start + value) + ] + +Above, if we use ``Interval.length`` in an UPDATE expression, we get +a hybrid SET expression: + +.. sourcecode:: pycon+sql + + + >>> from sqlalchemy import update + >>> print(update(Interval).values({Interval.length: 25})) + {printsql}UPDATE interval SET "end"=(interval.start + :start_1) + +This SET expression is accommodated by the ORM automatically. + +.. seealso:: + + :ref:`orm_expression_update_delete` - includes background on ORM-enabled + UPDATE statements + + +Working with Relationships +-------------------------- + +There's no essential difference when creating hybrids that work with +related objects as opposed to column-based data. The need for distinct +expressions tends to be greater. The two variants we'll illustrate +are the "join-dependent" hybrid, and the "correlated subquery" hybrid. + +Join-Dependent Relationship Hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Consider the following declarative +mapping which relates a ``User`` to a ``SavingsAccount``:: + + from __future__ import annotations + + from decimal import Decimal + from typing import cast + from typing import List + from typing import Optional + + from sqlalchemy import ForeignKey + from sqlalchemy import Numeric + from sqlalchemy import String + from sqlalchemy import SQLColumnExpression + from sqlalchemy.ext.hybrid import hybrid_property + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + from sqlalchemy.orm import relationship + + + class Base(DeclarativeBase): + pass + + + class SavingsAccount(Base): + __tablename__ = 'account' + id: Mapped[int] = mapped_column(primary_key=True) + user_id: Mapped[int] = mapped_column(ForeignKey('user.id')) + balance: Mapped[Decimal] = mapped_column(Numeric(15, 5)) + + owner: Mapped[User] = relationship(back_populates="accounts") + + class User(Base): + __tablename__ = 'user' + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] = mapped_column(String(100)) + + accounts: Mapped[List[SavingsAccount]] = relationship( + back_populates="owner", lazy="selectin" + ) + + @hybrid_property + def balance(self) -> Optional[Decimal]: + if self.accounts: + return self.accounts[0].balance + else: + return None + + @balance.inplace.setter + def _balance_setter(self, value: Optional[Decimal]) -> None: + assert value is not None + + if not self.accounts: + account = SavingsAccount(owner=self) + else: + account = self.accounts[0] + account.balance = value + + @balance.inplace.expression + @classmethod + def _balance_expression(cls) -> SQLColumnExpression[Optional[Decimal]]: + return cast("SQLColumnExpression[Optional[Decimal]]", SavingsAccount.balance) + +The above hybrid property ``balance`` works with the first +``SavingsAccount`` entry in the list of accounts for this user. The +in-Python getter/setter methods can treat ``accounts`` as a Python +list available on ``self``. + +.. tip:: The ``User.balance`` getter in the above example accesses the + ``self.acccounts`` collection, which will normally be loaded via the + :func:`.selectinload` loader strategy configured on the ``User.balance`` + :func:`_orm.relationship`. The default loader strategy when not otherwise + stated on :func:`_orm.relationship` is :func:`.lazyload`, which emits SQL on + demand. When using asyncio, on-demand loaders such as :func:`.lazyload` are + not supported, so care should be taken to ensure the ``self.accounts`` + collection is accessible to this hybrid accessor when using asyncio. + +At the expression level, it's expected that the ``User`` class will +be used in an appropriate context such that an appropriate join to +``SavingsAccount`` will be present: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import select + >>> print(select(User, User.balance). + ... join(User.accounts).filter(User.balance > 5000)) + {printsql}SELECT "user".id AS user_id, "user".name AS user_name, + account.balance AS account_balance + FROM "user" JOIN account ON "user".id = account.user_id + WHERE account.balance > :balance_1 + +Note however, that while the instance level accessors need to worry +about whether ``self.accounts`` is even present, this issue expresses +itself differently at the SQL expression level, where we basically +would use an outer join: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import select + >>> from sqlalchemy import or_ + >>> print (select(User, User.balance).outerjoin(User.accounts). + ... filter(or_(User.balance < 5000, User.balance == None))) + {printsql}SELECT "user".id AS user_id, "user".name AS user_name, + account.balance AS account_balance + FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id + WHERE account.balance < :balance_1 OR account.balance IS NULL + +Correlated Subquery Relationship Hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +We can, of course, forego being dependent on the enclosing query's usage +of joins in favor of the correlated subquery, which can portably be packed +into a single column expression. A correlated subquery is more portable, but +often performs more poorly at the SQL level. Using the same technique +illustrated at :ref:`mapper_column_property_sql_expressions`, +we can adjust our ``SavingsAccount`` example to aggregate the balances for +*all* accounts, and use a correlated subquery for the column expression:: + + from __future__ import annotations + + from decimal import Decimal + from typing import List + + from sqlalchemy import ForeignKey + from sqlalchemy import func + from sqlalchemy import Numeric + from sqlalchemy import select + from sqlalchemy import SQLColumnExpression + from sqlalchemy import String + from sqlalchemy.ext.hybrid import hybrid_property + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + from sqlalchemy.orm import relationship + + + class Base(DeclarativeBase): + pass + + + class SavingsAccount(Base): + __tablename__ = 'account' + id: Mapped[int] = mapped_column(primary_key=True) + user_id: Mapped[int] = mapped_column(ForeignKey('user.id')) + balance: Mapped[Decimal] = mapped_column(Numeric(15, 5)) + + owner: Mapped[User] = relationship(back_populates="accounts") + + class User(Base): + __tablename__ = 'user' + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] = mapped_column(String(100)) + + accounts: Mapped[List[SavingsAccount]] = relationship( + back_populates="owner", lazy="selectin" + ) + + @hybrid_property + def balance(self) -> Decimal: + return sum((acc.balance for acc in self.accounts), start=Decimal("0")) + + @balance.inplace.expression + @classmethod + def _balance_expression(cls) -> SQLColumnExpression[Decimal]: + return ( + select(func.sum(SavingsAccount.balance)) + .where(SavingsAccount.user_id == cls.id) + .label("total_balance") + ) + + +The above recipe will give us the ``balance`` column which renders +a correlated SELECT: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import select + >>> print(select(User).filter(User.balance > 400)) + {printsql}SELECT "user".id, "user".name + FROM "user" + WHERE ( + SELECT sum(account.balance) AS sum_1 FROM account + WHERE account.user_id = "user".id + ) > :param_1 + + +.. _hybrid_custom_comparators: + +Building Custom Comparators +--------------------------- + +The hybrid property also includes a helper that allows construction of +custom comparators. A comparator object allows one to customize the +behavior of each SQLAlchemy expression operator individually. They +are useful when creating custom types that have some highly +idiosyncratic behavior on the SQL side. + +.. note:: The :meth:`.hybrid_property.comparator` decorator introduced + in this section **replaces** the use of the + :meth:`.hybrid_property.expression` decorator. + They cannot be used together. + +The example class below allows case-insensitive comparisons on the attribute +named ``word_insensitive``:: + + from __future__ import annotations + + from typing import Any + + from sqlalchemy import ColumnElement + from sqlalchemy import func + from sqlalchemy.ext.hybrid import Comparator + from sqlalchemy.ext.hybrid import hybrid_property + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + + class Base(DeclarativeBase): + pass + + + class CaseInsensitiveComparator(Comparator[str]): + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + return func.lower(self.__clause_element__()) == func.lower(other) + + class SearchWord(Base): + __tablename__ = 'searchword' + + id: Mapped[int] = mapped_column(primary_key=True) + word: Mapped[str] + + @hybrid_property + def word_insensitive(self) -> str: + return self.word.lower() + + @word_insensitive.inplace.comparator + @classmethod + def _word_insensitive_comparator(cls) -> CaseInsensitiveComparator: + return CaseInsensitiveComparator(cls.word) + +Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()`` +SQL function to both sides: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy import select + >>> print(select(SearchWord).filter_by(word_insensitive="Trucks")) + {printsql}SELECT searchword.id, searchword.word + FROM searchword + WHERE lower(searchword.word) = lower(:lower_1) + + +The ``CaseInsensitiveComparator`` above implements part of the +:class:`.ColumnOperators` interface. A "coercion" operation like +lowercasing can be applied to all comparison operations (i.e. ``eq``, +``lt``, ``gt``, etc.) using :meth:`.Operators.operate`:: + + class CaseInsensitiveComparator(Comparator): + def operate(self, op, other, **kwargs): + return op( + func.lower(self.__clause_element__()), + func.lower(other), + **kwargs, + ) + +.. _hybrid_reuse_subclass: + +Reusing Hybrid Properties across Subclasses +------------------------------------------- + +A hybrid can be referred to from a superclass, to allow modifying +methods like :meth:`.hybrid_property.getter`, :meth:`.hybrid_property.setter` +to be used to redefine those methods on a subclass. This is similar to +how the standard Python ``@property`` object works:: + + class FirstNameOnly(Base): + # ... + + first_name: Mapped[str] + + @hybrid_property + def name(self) -> str: + return self.first_name + + @name.inplace.setter + def _name_setter(self, value: str) -> None: + self.first_name = value + + class FirstNameLastName(FirstNameOnly): + # ... + + last_name: Mapped[str] + + # 'inplace' is not used here; calling getter creates a copy + # of FirstNameOnly.name that is local to FirstNameLastName + @FirstNameOnly.name.getter + def name(self) -> str: + return self.first_name + ' ' + self.last_name + + @name.inplace.setter + def _name_setter(self, value: str) -> None: + self.first_name, self.last_name = value.split(' ', 1) + +Above, the ``FirstNameLastName`` class refers to the hybrid from +``FirstNameOnly.name`` to repurpose its getter and setter for the subclass. + +When overriding :meth:`.hybrid_property.expression` and +:meth:`.hybrid_property.comparator` alone as the first reference to the +superclass, these names conflict with the same-named accessors on the class- +level :class:`.QueryableAttribute` object returned at the class level. To +override these methods when referring directly to the parent class descriptor, +add the special qualifier :attr:`.hybrid_property.overrides`, which will de- +reference the instrumented attribute back to the hybrid object:: + + class FirstNameLastName(FirstNameOnly): + # ... + + last_name: Mapped[str] + + @FirstNameOnly.name.overrides.expression + @classmethod + def name(cls): + return func.concat(cls.first_name, ' ', cls.last_name) + + +Hybrid Value Objects +-------------------- + +Note in our previous example, if we were to compare the ``word_insensitive`` +attribute of a ``SearchWord`` instance to a plain Python string, the plain +Python string would not be coerced to lower case - the +``CaseInsensitiveComparator`` we built, being returned by +``@word_insensitive.comparator``, only applies to the SQL side. + +A more comprehensive form of the custom comparator is to construct a *Hybrid +Value Object*. This technique applies the target value or expression to a value +object which is then returned by the accessor in all cases. The value object +allows control of all operations upon the value as well as how compared values +are treated, both on the SQL expression side as well as the Python value side. +Replacing the previous ``CaseInsensitiveComparator`` class with a new +``CaseInsensitiveWord`` class:: + + class CaseInsensitiveWord(Comparator): + "Hybrid value representing a lower case representation of a word." + + def __init__(self, word): + if isinstance(word, basestring): + self.word = word.lower() + elif isinstance(word, CaseInsensitiveWord): + self.word = word.word + else: + self.word = func.lower(word) + + def operate(self, op, other, **kwargs): + if not isinstance(other, CaseInsensitiveWord): + other = CaseInsensitiveWord(other) + return op(self.word, other.word, **kwargs) + + def __clause_element__(self): + return self.word + + def __str__(self): + return self.word + + key = 'word' + "Label to apply to Query tuple results" + +Above, the ``CaseInsensitiveWord`` object represents ``self.word``, which may +be a SQL function, or may be a Python native. By overriding ``operate()`` and +``__clause_element__()`` to work in terms of ``self.word``, all comparison +operations will work against the "converted" form of ``word``, whether it be +SQL side or Python side. Our ``SearchWord`` class can now deliver the +``CaseInsensitiveWord`` object unconditionally from a single hybrid call:: + + class SearchWord(Base): + __tablename__ = 'searchword' + id: Mapped[int] = mapped_column(primary_key=True) + word: Mapped[str] + + @hybrid_property + def word_insensitive(self) -> CaseInsensitiveWord: + return CaseInsensitiveWord(self.word) + +The ``word_insensitive`` attribute now has case-insensitive comparison behavior +universally, including SQL expression vs. Python expression (note the Python +value is converted to lower case on the Python side here): + +.. sourcecode:: pycon+sql + + >>> print(select(SearchWord).filter_by(word_insensitive="Trucks")) + {printsql}SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword + WHERE lower(searchword.word) = :lower_1 + +SQL expression versus SQL expression: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy.orm import aliased + >>> sw1 = aliased(SearchWord) + >>> sw2 = aliased(SearchWord) + >>> print( + ... select(sw1.word_insensitive, sw2.word_insensitive).filter( + ... sw1.word_insensitive > sw2.word_insensitive + ... ) + ... ) + {printsql}SELECT lower(searchword_1.word) AS lower_1, + lower(searchword_2.word) AS lower_2 + FROM searchword AS searchword_1, searchword AS searchword_2 + WHERE lower(searchword_1.word) > lower(searchword_2.word) + +Python only expression:: + + >>> ws1 = SearchWord(word="SomeWord") + >>> ws1.word_insensitive == "sOmEwOrD" + True + >>> ws1.word_insensitive == "XOmEwOrX" + False + >>> print(ws1.word_insensitive) + someword + +The Hybrid Value pattern is very useful for any kind of value that may have +multiple representations, such as timestamps, time deltas, units of +measurement, currencies and encrypted passwords. + +.. seealso:: + + `Hybrids and Value Agnostic Types + `_ + - on the techspot.zzzeek.org blog + + `Value Agnostic Types, Part II + `_ - + on the techspot.zzzeek.org blog + + +""" # noqa + +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import cast +from typing import Generic +from typing import List +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .. import util +from ..orm import attributes +from ..orm import InspectionAttrExtensionType +from ..orm import interfaces +from ..orm import ORMDescriptor +from ..orm.attributes import QueryableAttribute +from ..sql import roles +from ..sql._typing import is_has_clause_element +from ..sql.elements import ColumnElement +from ..sql.elements import SQLCoreOperations +from ..util.typing import Concatenate +from ..util.typing import Literal +from ..util.typing import ParamSpec +from ..util.typing import Protocol +from ..util.typing import Self + +if TYPE_CHECKING: + from ..orm.interfaces import MapperProperty + from ..orm.util import AliasedInsp + from ..sql import SQLColumnExpression + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _DMLColumnArgument + from ..sql._typing import _HasClauseElement + from ..sql._typing import _InfoType + from ..sql.operators import OperatorType + +_P = ParamSpec("_P") +_R = TypeVar("_R") +_T = TypeVar("_T", bound=Any) +_TE = TypeVar("_TE", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) +_T_con = TypeVar("_T_con", bound=Any, contravariant=True) + + +class HybridExtensionType(InspectionAttrExtensionType): + HYBRID_METHOD = "HYBRID_METHOD" + """Symbol indicating an :class:`InspectionAttr` that's + of type :class:`.hybrid_method`. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attribute. + + .. seealso:: + + :attr:`_orm.Mapper.all_orm_attributes` + + """ + + HYBRID_PROPERTY = "HYBRID_PROPERTY" + """Symbol indicating an :class:`InspectionAttr` that's + of type :class:`.hybrid_method`. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attribute. + + .. seealso:: + + :attr:`_orm.Mapper.all_orm_attributes` + + """ + + +class _HybridGetterType(Protocol[_T_co]): + def __call__(s, self: Any) -> _T_co: ... + + +class _HybridSetterType(Protocol[_T_con]): + def __call__(s, self: Any, value: _T_con) -> None: ... + + +class _HybridUpdaterType(Protocol[_T_con]): + def __call__( + s, + cls: Any, + value: Union[_T_con, _ColumnExpressionArgument[_T_con]], + ) -> List[Tuple[_DMLColumnArgument, Any]]: ... + + +class _HybridDeleterType(Protocol[_T_co]): + def __call__(s, self: Any) -> None: ... + + +class _HybridExprCallableType(Protocol[_T_co]): + def __call__( + s, cls: Any + ) -> Union[_HasClauseElement[_T_co], SQLColumnExpression[_T_co]]: ... + + +class _HybridComparatorCallableType(Protocol[_T]): + def __call__(self, cls: Any) -> Comparator[_T]: ... + + +class _HybridClassLevelAccessor(QueryableAttribute[_T]): + """Describe the object returned by a hybrid_property() when + called as a class-level descriptor. + + """ + + if TYPE_CHECKING: + + def getter( + self, fget: _HybridGetterType[_T] + ) -> hybrid_property[_T]: ... + + def setter( + self, fset: _HybridSetterType[_T] + ) -> hybrid_property[_T]: ... + + def deleter( + self, fdel: _HybridDeleterType[_T] + ) -> hybrid_property[_T]: ... + + @property + def overrides(self) -> hybrid_property[_T]: ... + + def update_expression( + self, meth: _HybridUpdaterType[_T] + ) -> hybrid_property[_T]: ... + + +class hybrid_method(interfaces.InspectionAttrInfo, Generic[_P, _R]): + """A decorator which allows definition of a Python object method with both + instance-level and class-level behavior. + + """ + + is_attribute = True + extension_type = HybridExtensionType.HYBRID_METHOD + + def __init__( + self, + func: Callable[Concatenate[Any, _P], _R], + expr: Optional[ + Callable[Concatenate[Any, _P], SQLCoreOperations[_R]] + ] = None, + ): + """Create a new :class:`.hybrid_method`. + + Usage is typically via decorator:: + + from sqlalchemy.ext.hybrid import hybrid_method + + class SomeClass: + @hybrid_method + def value(self, x, y): + return self._value + x + y + + @value.expression + @classmethod + def value(cls, x, y): + return func.some_function(cls._value, x, y) + + """ + self.func = func + if expr is not None: + self.expression(expr) + else: + self.expression(func) # type: ignore + + @property + def inplace(self) -> Self: + """Return the inplace mutator for this :class:`.hybrid_method`. + + The :class:`.hybrid_method` class already performs "in place" mutation + when the :meth:`.hybrid_method.expression` decorator is called, + so this attribute returns Self. + + .. versionadded:: 2.0.4 + + .. seealso:: + + :ref:`hybrid_pep484_naming` + + """ + return self + + @overload + def __get__( + self, instance: Literal[None], owner: Type[object] + ) -> Callable[_P, SQLCoreOperations[_R]]: ... + + @overload + def __get__( + self, instance: object, owner: Type[object] + ) -> Callable[_P, _R]: ... + + def __get__( + self, instance: Optional[object], owner: Type[object] + ) -> Union[Callable[_P, _R], Callable[_P, SQLCoreOperations[_R]]]: + if instance is None: + return self.expr.__get__(owner, owner) # type: ignore + else: + return self.func.__get__(instance, owner) # type: ignore + + def expression( + self, expr: Callable[Concatenate[Any, _P], SQLCoreOperations[_R]] + ) -> hybrid_method[_P, _R]: + """Provide a modifying decorator that defines a + SQL-expression producing method.""" + + self.expr = expr + if not self.expr.__doc__: + self.expr.__doc__ = self.func.__doc__ + return self + + +def _unwrap_classmethod(meth: _T) -> _T: + if isinstance(meth, classmethod): + return meth.__func__ # type: ignore + else: + return meth + + +class hybrid_property(interfaces.InspectionAttrInfo, ORMDescriptor[_T]): + """A decorator which allows definition of a Python descriptor with both + instance-level and class-level behavior. + + """ + + is_attribute = True + extension_type = HybridExtensionType.HYBRID_PROPERTY + + __name__: str + + def __init__( + self, + fget: _HybridGetterType[_T], + fset: Optional[_HybridSetterType[_T]] = None, + fdel: Optional[_HybridDeleterType[_T]] = None, + expr: Optional[_HybridExprCallableType[_T]] = None, + custom_comparator: Optional[Comparator[_T]] = None, + update_expr: Optional[_HybridUpdaterType[_T]] = None, + ): + """Create a new :class:`.hybrid_property`. + + Usage is typically via decorator:: + + from sqlalchemy.ext.hybrid import hybrid_property + + class SomeClass: + @hybrid_property + def value(self): + return self._value + + @value.setter + def value(self, value): + self._value = value + + """ + self.fget = fget + self.fset = fset + self.fdel = fdel + self.expr = _unwrap_classmethod(expr) + self.custom_comparator = _unwrap_classmethod(custom_comparator) + self.update_expr = _unwrap_classmethod(update_expr) + util.update_wrapper(self, fget) # type: ignore[arg-type] + + @overload + def __get__(self, instance: Any, owner: Literal[None]) -> Self: ... + + @overload + def __get__( + self, instance: Literal[None], owner: Type[object] + ) -> _HybridClassLevelAccessor[_T]: ... + + @overload + def __get__(self, instance: object, owner: Type[object]) -> _T: ... + + def __get__( + self, instance: Optional[object], owner: Optional[Type[object]] + ) -> Union[hybrid_property[_T], _HybridClassLevelAccessor[_T], _T]: + if owner is None: + return self + elif instance is None: + return self._expr_comparator(owner) + else: + return self.fget(instance) + + def __set__(self, instance: object, value: Any) -> None: + if self.fset is None: + raise AttributeError("can't set attribute") + self.fset(instance, value) + + def __delete__(self, instance: object) -> None: + if self.fdel is None: + raise AttributeError("can't delete attribute") + self.fdel(instance) + + def _copy(self, **kw: Any) -> hybrid_property[_T]: + defaults = { + key: value + for key, value in self.__dict__.items() + if not key.startswith("_") + } + defaults.update(**kw) + return type(self)(**defaults) + + @property + def overrides(self) -> Self: + """Prefix for a method that is overriding an existing attribute. + + The :attr:`.hybrid_property.overrides` accessor just returns + this hybrid object, which when called at the class level from + a parent class, will de-reference the "instrumented attribute" + normally returned at this level, and allow modifying decorators + like :meth:`.hybrid_property.expression` and + :meth:`.hybrid_property.comparator` + to be used without conflicting with the same-named attributes + normally present on the :class:`.QueryableAttribute`:: + + class SuperClass: + # ... + + @hybrid_property + def foobar(self): + return self._foobar + + class SubClass(SuperClass): + # ... + + @SuperClass.foobar.overrides.expression + def foobar(cls): + return func.subfoobar(self._foobar) + + .. versionadded:: 1.2 + + .. seealso:: + + :ref:`hybrid_reuse_subclass` + + """ + return self + + class _InPlace(Generic[_TE]): + """A builder helper for .hybrid_property. + + .. versionadded:: 2.0.4 + + """ + + __slots__ = ("attr",) + + def __init__(self, attr: hybrid_property[_TE]): + self.attr = attr + + def _set(self, **kw: Any) -> hybrid_property[_TE]: + for k, v in kw.items(): + setattr(self.attr, k, _unwrap_classmethod(v)) + return self.attr + + def getter(self, fget: _HybridGetterType[_TE]) -> hybrid_property[_TE]: + return self._set(fget=fget) + + def setter(self, fset: _HybridSetterType[_TE]) -> hybrid_property[_TE]: + return self._set(fset=fset) + + def deleter( + self, fdel: _HybridDeleterType[_TE] + ) -> hybrid_property[_TE]: + return self._set(fdel=fdel) + + def expression( + self, expr: _HybridExprCallableType[_TE] + ) -> hybrid_property[_TE]: + return self._set(expr=expr) + + def comparator( + self, comparator: _HybridComparatorCallableType[_TE] + ) -> hybrid_property[_TE]: + return self._set(custom_comparator=comparator) + + def update_expression( + self, meth: _HybridUpdaterType[_TE] + ) -> hybrid_property[_TE]: + return self._set(update_expr=meth) + + @property + def inplace(self) -> _InPlace[_T]: + """Return the inplace mutator for this :class:`.hybrid_property`. + + This is to allow in-place mutation of the hybrid, allowing the first + hybrid method of a certain name to be re-used in order to add + more methods without having to name those methods the same, e.g.:: + + class Interval(Base): + # ... + + @hybrid_property + def radius(self) -> float: + return abs(self.length) / 2 + + @radius.inplace.setter + def _radius_setter(self, value: float) -> None: + self.length = value * 2 + + @radius.inplace.expression + def _radius_expression(cls) -> ColumnElement[float]: + return type_coerce(func.abs(cls.length) / 2, Float) + + .. versionadded:: 2.0.4 + + .. seealso:: + + :ref:`hybrid_pep484_naming` + + """ + return hybrid_property._InPlace(self) + + def getter(self, fget: _HybridGetterType[_T]) -> hybrid_property[_T]: + """Provide a modifying decorator that defines a getter method. + + .. versionadded:: 1.2 + + """ + + return self._copy(fget=fget) + + def setter(self, fset: _HybridSetterType[_T]) -> hybrid_property[_T]: + """Provide a modifying decorator that defines a setter method.""" + + return self._copy(fset=fset) + + def deleter(self, fdel: _HybridDeleterType[_T]) -> hybrid_property[_T]: + """Provide a modifying decorator that defines a deletion method.""" + + return self._copy(fdel=fdel) + + def expression( + self, expr: _HybridExprCallableType[_T] + ) -> hybrid_property[_T]: + """Provide a modifying decorator that defines a SQL-expression + producing method. + + When a hybrid is invoked at the class level, the SQL expression given + here is wrapped inside of a specialized :class:`.QueryableAttribute`, + which is the same kind of object used by the ORM to represent other + mapped attributes. The reason for this is so that other class-level + attributes such as docstrings and a reference to the hybrid itself may + be maintained within the structure that's returned, without any + modifications to the original SQL expression passed in. + + .. note:: + + When referring to a hybrid property from an owning class (e.g. + ``SomeClass.some_hybrid``), an instance of + :class:`.QueryableAttribute` is returned, representing the + expression or comparator object as well as this hybrid object. + However, that object itself has accessors called ``expression`` and + ``comparator``; so when attempting to override these decorators on a + subclass, it may be necessary to qualify it using the + :attr:`.hybrid_property.overrides` modifier first. See that + modifier for details. + + .. seealso:: + + :ref:`hybrid_distinct_expression` + + """ + + return self._copy(expr=expr) + + def comparator( + self, comparator: _HybridComparatorCallableType[_T] + ) -> hybrid_property[_T]: + """Provide a modifying decorator that defines a custom + comparator producing method. + + The return value of the decorated method should be an instance of + :class:`~.hybrid.Comparator`. + + .. note:: The :meth:`.hybrid_property.comparator` decorator + **replaces** the use of the :meth:`.hybrid_property.expression` + decorator. They cannot be used together. + + When a hybrid is invoked at the class level, the + :class:`~.hybrid.Comparator` object given here is wrapped inside of a + specialized :class:`.QueryableAttribute`, which is the same kind of + object used by the ORM to represent other mapped attributes. The + reason for this is so that other class-level attributes such as + docstrings and a reference to the hybrid itself may be maintained + within the structure that's returned, without any modifications to the + original comparator object passed in. + + .. note:: + + When referring to a hybrid property from an owning class (e.g. + ``SomeClass.some_hybrid``), an instance of + :class:`.QueryableAttribute` is returned, representing the + expression or comparator object as this hybrid object. However, + that object itself has accessors called ``expression`` and + ``comparator``; so when attempting to override these decorators on a + subclass, it may be necessary to qualify it using the + :attr:`.hybrid_property.overrides` modifier first. See that + modifier for details. + + """ + return self._copy(custom_comparator=comparator) + + def update_expression( + self, meth: _HybridUpdaterType[_T] + ) -> hybrid_property[_T]: + """Provide a modifying decorator that defines an UPDATE tuple + producing method. + + The method accepts a single value, which is the value to be + rendered into the SET clause of an UPDATE statement. The method + should then process this value into individual column expressions + that fit into the ultimate SET clause, and return them as a + sequence of 2-tuples. Each tuple + contains a column expression as the key and a value to be rendered. + + E.g.:: + + class Person(Base): + # ... + + first_name = Column(String) + last_name = Column(String) + + @hybrid_property + def fullname(self): + return first_name + " " + last_name + + @fullname.update_expression + def fullname(cls, value): + fname, lname = value.split(" ", 1) + return [ + (cls.first_name, fname), + (cls.last_name, lname) + ] + + .. versionadded:: 1.2 + + """ + return self._copy(update_expr=meth) + + @util.memoized_property + def _expr_comparator( + self, + ) -> Callable[[Any], _HybridClassLevelAccessor[_T]]: + if self.custom_comparator is not None: + return self._get_comparator(self.custom_comparator) + elif self.expr is not None: + return self._get_expr(self.expr) + else: + return self._get_expr(cast(_HybridExprCallableType[_T], self.fget)) + + def _get_expr( + self, expr: _HybridExprCallableType[_T] + ) -> Callable[[Any], _HybridClassLevelAccessor[_T]]: + def _expr(cls: Any) -> ExprComparator[_T]: + return ExprComparator(cls, expr(cls), self) + + util.update_wrapper(_expr, expr) + + return self._get_comparator(_expr) + + def _get_comparator( + self, comparator: Any + ) -> Callable[[Any], _HybridClassLevelAccessor[_T]]: + proxy_attr = attributes.create_proxied_attribute(self) + + def expr_comparator( + owner: Type[object], + ) -> _HybridClassLevelAccessor[_T]: + # because this is the descriptor protocol, we don't really know + # what our attribute name is. so search for it through the + # MRO. + for lookup in owner.__mro__: + if self.__name__ in lookup.__dict__: + if lookup.__dict__[self.__name__] is self: + name = self.__name__ + break + else: + name = attributes._UNKNOWN_ATTR_KEY # type: ignore[assignment] + + return cast( + "_HybridClassLevelAccessor[_T]", + proxy_attr( + owner, + name, + self, + comparator(owner), + doc=comparator.__doc__ or self.__doc__, + ), + ) + + return expr_comparator + + +class Comparator(interfaces.PropComparator[_T]): + """A helper class that allows easy construction of custom + :class:`~.orm.interfaces.PropComparator` + classes for usage with hybrids.""" + + def __init__( + self, expression: Union[_HasClauseElement[_T], SQLColumnExpression[_T]] + ): + self.expression = expression + + def __clause_element__(self) -> roles.ColumnsClauseRole: + expr = self.expression + if is_has_clause_element(expr): + ret_expr = expr.__clause_element__() + else: + if TYPE_CHECKING: + assert isinstance(expr, ColumnElement) + ret_expr = expr + + if TYPE_CHECKING: + # see test_hybrid->test_expression_isnt_clause_element + # that exercises the usual place this is caught if not + # true + assert isinstance(ret_expr, ColumnElement) + return ret_expr + + @util.non_memoized_property + def property(self) -> interfaces.MapperProperty[_T]: + raise NotImplementedError() + + def adapt_to_entity( + self, adapt_to_entity: AliasedInsp[Any] + ) -> Comparator[_T]: + # interesting.... + return self + + +class ExprComparator(Comparator[_T]): + def __init__( + self, + cls: Type[Any], + expression: Union[_HasClauseElement[_T], SQLColumnExpression[_T]], + hybrid: hybrid_property[_T], + ): + self.cls = cls + self.expression = expression + self.hybrid = hybrid + + def __getattr__(self, key: str) -> Any: + return getattr(self.expression, key) + + @util.ro_non_memoized_property + def info(self) -> _InfoType: + return self.hybrid.info + + def _bulk_update_tuples( + self, value: Any + ) -> Sequence[Tuple[_DMLColumnArgument, Any]]: + if isinstance(self.expression, attributes.QueryableAttribute): + return self.expression._bulk_update_tuples(value) + elif self.hybrid.update_expr is not None: + return self.hybrid.update_expr(self.cls, value) + else: + return [(self.expression, value)] + + @util.non_memoized_property + def property(self) -> MapperProperty[_T]: + # this accessor is not normally used, however is accessed by things + # like ORM synonyms if the hybrid is used in this context; the + # .property attribute is not necessarily accessible + return self.expression.property # type: ignore + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(self.expression, *other, **kwargs) + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(other, self.expression, **kwargs) # type: ignore diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/indexable.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/indexable.py new file mode 100644 index 00000000..3c419308 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/indexable.py @@ -0,0 +1,341 @@ +# ext/indexable.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +"""Define attributes on ORM-mapped classes that have "index" attributes for +columns with :class:`_types.Indexable` types. + +"index" means the attribute is associated with an element of an +:class:`_types.Indexable` column with the predefined index to access it. +The :class:`_types.Indexable` types include types such as +:class:`_types.ARRAY`, :class:`_types.JSON` and +:class:`_postgresql.HSTORE`. + + + +The :mod:`~sqlalchemy.ext.indexable` extension provides +:class:`_schema.Column`-like interface for any element of an +:class:`_types.Indexable` typed column. In simple cases, it can be +treated as a :class:`_schema.Column` - mapped attribute. + +Synopsis +======== + +Given ``Person`` as a model with a primary key and JSON data field. +While this field may have any number of elements encoded within it, +we would like to refer to the element called ``name`` individually +as a dedicated attribute which behaves like a standalone column:: + + from sqlalchemy import Column, JSON, Integer + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.indexable import index_property + + Base = declarative_base() + + class Person(Base): + __tablename__ = 'person' + + id = Column(Integer, primary_key=True) + data = Column(JSON) + + name = index_property('data', 'name') + + +Above, the ``name`` attribute now behaves like a mapped column. We +can compose a new ``Person`` and set the value of ``name``:: + + >>> person = Person(name='Alchemist') + +The value is now accessible:: + + >>> person.name + 'Alchemist' + +Behind the scenes, the JSON field was initialized to a new blank dictionary +and the field was set:: + + >>> person.data + {"name": "Alchemist'} + +The field is mutable in place:: + + >>> person.name = 'Renamed' + >>> person.name + 'Renamed' + >>> person.data + {'name': 'Renamed'} + +When using :class:`.index_property`, the change that we make to the indexable +structure is also automatically tracked as history; we no longer need +to use :class:`~.mutable.MutableDict` in order to track this change +for the unit of work. + +Deletions work normally as well:: + + >>> del person.name + >>> person.data + {} + +Above, deletion of ``person.name`` deletes the value from the dictionary, +but not the dictionary itself. + +A missing key will produce ``AttributeError``:: + + >>> person = Person() + >>> person.name + ... + AttributeError: 'name' + +Unless you set a default value:: + + >>> class Person(Base): + >>> __tablename__ = 'person' + >>> + >>> id = Column(Integer, primary_key=True) + >>> data = Column(JSON) + >>> + >>> name = index_property('data', 'name', default=None) # See default + + >>> person = Person() + >>> print(person.name) + None + + +The attributes are also accessible at the class level. +Below, we illustrate ``Person.name`` used to generate +an indexed SQL criteria:: + + >>> from sqlalchemy.orm import Session + >>> session = Session() + >>> query = session.query(Person).filter(Person.name == 'Alchemist') + +The above query is equivalent to:: + + >>> query = session.query(Person).filter(Person.data['name'] == 'Alchemist') + +Multiple :class:`.index_property` objects can be chained to produce +multiple levels of indexing:: + + from sqlalchemy import Column, JSON, Integer + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.indexable import index_property + + Base = declarative_base() + + class Person(Base): + __tablename__ = 'person' + + id = Column(Integer, primary_key=True) + data = Column(JSON) + + birthday = index_property('data', 'birthday') + year = index_property('birthday', 'year') + month = index_property('birthday', 'month') + day = index_property('birthday', 'day') + +Above, a query such as:: + + q = session.query(Person).filter(Person.year == '1980') + +On a PostgreSQL backend, the above query will render as:: + + SELECT person.id, person.data + FROM person + WHERE person.data -> %(data_1)s -> %(param_1)s = %(param_2)s + +Default Values +============== + +:class:`.index_property` includes special behaviors for when the indexed +data structure does not exist, and a set operation is called: + +* For an :class:`.index_property` that is given an integer index value, + the default data structure will be a Python list of ``None`` values, + at least as long as the index value; the value is then set at its + place in the list. This means for an index value of zero, the list + will be initialized to ``[None]`` before setting the given value, + and for an index value of five, the list will be initialized to + ``[None, None, None, None, None]`` before setting the fifth element + to the given value. Note that an existing list is **not** extended + in place to receive a value. + +* for an :class:`.index_property` that is given any other kind of index + value (e.g. strings usually), a Python dictionary is used as the + default data structure. + +* The default data structure can be set to any Python callable using the + :paramref:`.index_property.datatype` parameter, overriding the previous + rules. + + +Subclassing +=========== + +:class:`.index_property` can be subclassed, in particular for the common +use case of providing coercion of values or SQL expressions as they are +accessed. Below is a common recipe for use with a PostgreSQL JSON type, +where we want to also include automatic casting plus ``astext()``:: + + class pg_json_property(index_property): + def __init__(self, attr_name, index, cast_type): + super(pg_json_property, self).__init__(attr_name, index) + self.cast_type = cast_type + + def expr(self, model): + expr = super(pg_json_property, self).expr(model) + return expr.astext.cast(self.cast_type) + +The above subclass can be used with the PostgreSQL-specific +version of :class:`_postgresql.JSON`:: + + from sqlalchemy import Column, Integer + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.dialects.postgresql import JSON + + Base = declarative_base() + + class Person(Base): + __tablename__ = 'person' + + id = Column(Integer, primary_key=True) + data = Column(JSON) + + age = pg_json_property('data', 'age', Integer) + +The ``age`` attribute at the instance level works as before; however +when rendering SQL, PostgreSQL's ``->>`` operator will be used +for indexed access, instead of the usual index operator of ``->``:: + + >>> query = session.query(Person).filter(Person.age < 20) + +The above query will render:: + + SELECT person.id, person.data + FROM person + WHERE CAST(person.data ->> %(data_1)s AS INTEGER) < %(param_1)s + +""" # noqa +from .. import inspect +from ..ext.hybrid import hybrid_property +from ..orm.attributes import flag_modified + + +__all__ = ["index_property"] + + +class index_property(hybrid_property): # noqa + """A property generator. The generated property describes an object + attribute that corresponds to an :class:`_types.Indexable` + column. + + .. seealso:: + + :mod:`sqlalchemy.ext.indexable` + + """ + + _NO_DEFAULT_ARGUMENT = object() + + def __init__( + self, + attr_name, + index, + default=_NO_DEFAULT_ARGUMENT, + datatype=None, + mutable=True, + onebased=True, + ): + """Create a new :class:`.index_property`. + + :param attr_name: + An attribute name of an `Indexable` typed column, or other + attribute that returns an indexable structure. + :param index: + The index to be used for getting and setting this value. This + should be the Python-side index value for integers. + :param default: + A value which will be returned instead of `AttributeError` + when there is not a value at given index. + :param datatype: default datatype to use when the field is empty. + By default, this is derived from the type of index used; a + Python list for an integer index, or a Python dictionary for + any other style of index. For a list, the list will be + initialized to a list of None values that is at least + ``index`` elements long. + :param mutable: if False, writes and deletes to the attribute will + be disallowed. + :param onebased: assume the SQL representation of this value is + one-based; that is, the first index in SQL is 1, not zero. + """ + + if mutable: + super().__init__(self.fget, self.fset, self.fdel, self.expr) + else: + super().__init__(self.fget, None, None, self.expr) + self.attr_name = attr_name + self.index = index + self.default = default + is_numeric = isinstance(index, int) + onebased = is_numeric and onebased + + if datatype is not None: + self.datatype = datatype + else: + if is_numeric: + self.datatype = lambda: [None for x in range(index + 1)] + else: + self.datatype = dict + self.onebased = onebased + + def _fget_default(self, err=None): + if self.default == self._NO_DEFAULT_ARGUMENT: + raise AttributeError(self.attr_name) from err + else: + return self.default + + def fget(self, instance): + attr_name = self.attr_name + column_value = getattr(instance, attr_name) + if column_value is None: + return self._fget_default() + try: + value = column_value[self.index] + except (KeyError, IndexError) as err: + return self._fget_default(err) + else: + return value + + def fset(self, instance, value): + attr_name = self.attr_name + column_value = getattr(instance, attr_name, None) + if column_value is None: + column_value = self.datatype() + setattr(instance, attr_name, column_value) + column_value[self.index] = value + setattr(instance, attr_name, column_value) + if attr_name in inspect(instance).mapper.attrs: + flag_modified(instance, attr_name) + + def fdel(self, instance): + attr_name = self.attr_name + column_value = getattr(instance, attr_name) + if column_value is None: + raise AttributeError(self.attr_name) + try: + del column_value[self.index] + except KeyError as err: + raise AttributeError(self.attr_name) from err + else: + setattr(instance, attr_name, column_value) + flag_modified(instance, attr_name) + + def expr(self, model): + column = getattr(model, self.attr_name) + index = self.index + if self.onebased: + index += 1 + return column[index] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/instrumentation.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/instrumentation.py new file mode 100644 index 00000000..5f3c7128 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/instrumentation.py @@ -0,0 +1,450 @@ +# ext/instrumentation.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +"""Extensible class instrumentation. + +The :mod:`sqlalchemy.ext.instrumentation` package provides for alternate +systems of class instrumentation within the ORM. Class instrumentation +refers to how the ORM places attributes on the class which maintain +data and track changes to that data, as well as event hooks installed +on the class. + +.. note:: + The extension package is provided for the benefit of integration + with other object management packages, which already perform + their own instrumentation. It is not intended for general use. + +For examples of how the instrumentation extension is used, +see the example :ref:`examples_instrumentation`. + +""" +import weakref + +from .. import util +from ..orm import attributes +from ..orm import base as orm_base +from ..orm import collections +from ..orm import exc as orm_exc +from ..orm import instrumentation as orm_instrumentation +from ..orm import util as orm_util +from ..orm.instrumentation import _default_dict_getter +from ..orm.instrumentation import _default_manager_getter +from ..orm.instrumentation import _default_opt_manager_getter +from ..orm.instrumentation import _default_state_getter +from ..orm.instrumentation import ClassManager +from ..orm.instrumentation import InstrumentationFactory + + +INSTRUMENTATION_MANAGER = "__sa_instrumentation_manager__" +"""Attribute, elects custom instrumentation when present on a mapped class. + +Allows a class to specify a slightly or wildly different technique for +tracking changes made to mapped attributes and collections. + +Only one instrumentation implementation is allowed in a given object +inheritance hierarchy. + +The value of this attribute must be a callable and will be passed a class +object. The callable must return one of: + + - An instance of an :class:`.InstrumentationManager` or subclass + - An object implementing all or some of InstrumentationManager (TODO) + - A dictionary of callables, implementing all or some of the above (TODO) + - An instance of a :class:`.ClassManager` or subclass + +This attribute is consulted by SQLAlchemy instrumentation +resolution, once the :mod:`sqlalchemy.ext.instrumentation` module +has been imported. If custom finders are installed in the global +instrumentation_finders list, they may or may not choose to honor this +attribute. + +""" + + +def find_native_user_instrumentation_hook(cls): + """Find user-specified instrumentation management for a class.""" + return getattr(cls, INSTRUMENTATION_MANAGER, None) + + +instrumentation_finders = [find_native_user_instrumentation_hook] +"""An extensible sequence of callables which return instrumentation +implementations + +When a class is registered, each callable will be passed a class object. +If None is returned, the +next finder in the sequence is consulted. Otherwise the return must be an +instrumentation factory that follows the same guidelines as +sqlalchemy.ext.instrumentation.INSTRUMENTATION_MANAGER. + +By default, the only finder is find_native_user_instrumentation_hook, which +searches for INSTRUMENTATION_MANAGER. If all finders return None, standard +ClassManager instrumentation is used. + +""" + + +class ExtendedInstrumentationRegistry(InstrumentationFactory): + """Extends :class:`.InstrumentationFactory` with additional + bookkeeping, to accommodate multiple types of + class managers. + + """ + + _manager_finders = weakref.WeakKeyDictionary() + _state_finders = weakref.WeakKeyDictionary() + _dict_finders = weakref.WeakKeyDictionary() + _extended = False + + def _locate_extended_factory(self, class_): + for finder in instrumentation_finders: + factory = finder(class_) + if factory is not None: + manager = self._extended_class_manager(class_, factory) + return manager, factory + else: + return None, None + + def _check_conflicts(self, class_, factory): + existing_factories = self._collect_management_factories_for( + class_ + ).difference([factory]) + if existing_factories: + raise TypeError( + "multiple instrumentation implementations specified " + "in %s inheritance hierarchy: %r" + % (class_.__name__, list(existing_factories)) + ) + + def _extended_class_manager(self, class_, factory): + manager = factory(class_) + if not isinstance(manager, ClassManager): + manager = _ClassInstrumentationAdapter(class_, manager) + + if factory != ClassManager and not self._extended: + # somebody invoked a custom ClassManager. + # reinstall global "getter" functions with the more + # expensive ones. + self._extended = True + _install_instrumented_lookups() + + self._manager_finders[class_] = manager.manager_getter() + self._state_finders[class_] = manager.state_getter() + self._dict_finders[class_] = manager.dict_getter() + return manager + + def _collect_management_factories_for(self, cls): + """Return a collection of factories in play or specified for a + hierarchy. + + Traverses the entire inheritance graph of a cls and returns a + collection of instrumentation factories for those classes. Factories + are extracted from active ClassManagers, if available, otherwise + instrumentation_finders is consulted. + + """ + hierarchy = util.class_hierarchy(cls) + factories = set() + for member in hierarchy: + manager = self.opt_manager_of_class(member) + if manager is not None: + factories.add(manager.factory) + else: + for finder in instrumentation_finders: + factory = finder(member) + if factory is not None: + break + else: + factory = None + factories.add(factory) + factories.discard(None) + return factories + + def unregister(self, class_): + super().unregister(class_) + if class_ in self._manager_finders: + del self._manager_finders[class_] + del self._state_finders[class_] + del self._dict_finders[class_] + + def opt_manager_of_class(self, cls): + try: + finder = self._manager_finders.get( + cls, _default_opt_manager_getter + ) + except TypeError: + # due to weakref lookup on invalid object + return None + else: + return finder(cls) + + def manager_of_class(self, cls): + try: + finder = self._manager_finders.get(cls, _default_manager_getter) + except TypeError: + # due to weakref lookup on invalid object + raise orm_exc.UnmappedClassError( + cls, f"Can't locate an instrumentation manager for class {cls}" + ) + else: + manager = finder(cls) + if manager is None: + raise orm_exc.UnmappedClassError( + cls, + f"Can't locate an instrumentation manager for class {cls}", + ) + return manager + + def state_of(self, instance): + if instance is None: + raise AttributeError("None has no persistent state.") + return self._state_finders.get( + instance.__class__, _default_state_getter + )(instance) + + def dict_of(self, instance): + if instance is None: + raise AttributeError("None has no persistent state.") + return self._dict_finders.get( + instance.__class__, _default_dict_getter + )(instance) + + +orm_instrumentation._instrumentation_factory = _instrumentation_factory = ( + ExtendedInstrumentationRegistry() +) +orm_instrumentation.instrumentation_finders = instrumentation_finders + + +class InstrumentationManager: + """User-defined class instrumentation extension. + + :class:`.InstrumentationManager` can be subclassed in order + to change + how class instrumentation proceeds. This class exists for + the purposes of integration with other object management + frameworks which would like to entirely modify the + instrumentation methodology of the ORM, and is not intended + for regular usage. For interception of class instrumentation + events, see :class:`.InstrumentationEvents`. + + The API for this class should be considered as semi-stable, + and may change slightly with new releases. + + """ + + # r4361 added a mandatory (cls) constructor to this interface. + # given that, perhaps class_ should be dropped from all of these + # signatures. + + def __init__(self, class_): + pass + + def manage(self, class_, manager): + setattr(class_, "_default_class_manager", manager) + + def unregister(self, class_, manager): + delattr(class_, "_default_class_manager") + + def manager_getter(self, class_): + def get(cls): + return cls._default_class_manager + + return get + + def instrument_attribute(self, class_, key, inst): + pass + + def post_configure_attribute(self, class_, key, inst): + pass + + def install_descriptor(self, class_, key, inst): + setattr(class_, key, inst) + + def uninstall_descriptor(self, class_, key): + delattr(class_, key) + + def install_member(self, class_, key, implementation): + setattr(class_, key, implementation) + + def uninstall_member(self, class_, key): + delattr(class_, key) + + def instrument_collection_class(self, class_, key, collection_class): + return collections.prepare_instrumentation(collection_class) + + def get_instance_dict(self, class_, instance): + return instance.__dict__ + + def initialize_instance_dict(self, class_, instance): + pass + + def install_state(self, class_, instance, state): + setattr(instance, "_default_state", state) + + def remove_state(self, class_, instance): + delattr(instance, "_default_state") + + def state_getter(self, class_): + return lambda instance: getattr(instance, "_default_state") + + def dict_getter(self, class_): + return lambda inst: self.get_instance_dict(class_, inst) + + +class _ClassInstrumentationAdapter(ClassManager): + """Adapts a user-defined InstrumentationManager to a ClassManager.""" + + def __init__(self, class_, override): + self._adapted = override + self._get_state = self._adapted.state_getter(class_) + self._get_dict = self._adapted.dict_getter(class_) + + ClassManager.__init__(self, class_) + + def manage(self): + self._adapted.manage(self.class_, self) + + def unregister(self): + self._adapted.unregister(self.class_, self) + + def manager_getter(self): + return self._adapted.manager_getter(self.class_) + + def instrument_attribute(self, key, inst, propagated=False): + ClassManager.instrument_attribute(self, key, inst, propagated) + if not propagated: + self._adapted.instrument_attribute(self.class_, key, inst) + + def post_configure_attribute(self, key): + super().post_configure_attribute(key) + self._adapted.post_configure_attribute(self.class_, key, self[key]) + + def install_descriptor(self, key, inst): + self._adapted.install_descriptor(self.class_, key, inst) + + def uninstall_descriptor(self, key): + self._adapted.uninstall_descriptor(self.class_, key) + + def install_member(self, key, implementation): + self._adapted.install_member(self.class_, key, implementation) + + def uninstall_member(self, key): + self._adapted.uninstall_member(self.class_, key) + + def instrument_collection_class(self, key, collection_class): + return self._adapted.instrument_collection_class( + self.class_, key, collection_class + ) + + def initialize_collection(self, key, state, factory): + delegate = getattr(self._adapted, "initialize_collection", None) + if delegate: + return delegate(key, state, factory) + else: + return ClassManager.initialize_collection( + self, key, state, factory + ) + + def new_instance(self, state=None): + instance = self.class_.__new__(self.class_) + self.setup_instance(instance, state) + return instance + + def _new_state_if_none(self, instance): + """Install a default InstanceState if none is present. + + A private convenience method used by the __init__ decorator. + """ + if self.has_state(instance): + return False + else: + return self.setup_instance(instance) + + def setup_instance(self, instance, state=None): + self._adapted.initialize_instance_dict(self.class_, instance) + + if state is None: + state = self._state_constructor(instance, self) + + # the given instance is assumed to have no state + self._adapted.install_state(self.class_, instance, state) + return state + + def teardown_instance(self, instance): + self._adapted.remove_state(self.class_, instance) + + def has_state(self, instance): + try: + self._get_state(instance) + except orm_exc.NO_STATE: + return False + else: + return True + + def state_getter(self): + return self._get_state + + def dict_getter(self): + return self._get_dict + + +def _install_instrumented_lookups(): + """Replace global class/object management functions + with ExtendedInstrumentationRegistry implementations, which + allow multiple types of class managers to be present, + at the cost of performance. + + This function is called only by ExtendedInstrumentationRegistry + and unit tests specific to this behavior. + + The _reinstall_default_lookups() function can be called + after this one to re-establish the default functions. + + """ + _install_lookups( + dict( + instance_state=_instrumentation_factory.state_of, + instance_dict=_instrumentation_factory.dict_of, + manager_of_class=_instrumentation_factory.manager_of_class, + opt_manager_of_class=_instrumentation_factory.opt_manager_of_class, + ) + ) + + +def _reinstall_default_lookups(): + """Restore simplified lookups.""" + _install_lookups( + dict( + instance_state=_default_state_getter, + instance_dict=_default_dict_getter, + manager_of_class=_default_manager_getter, + opt_manager_of_class=_default_opt_manager_getter, + ) + ) + _instrumentation_factory._extended = False + + +def _install_lookups(lookups): + global instance_state, instance_dict + global manager_of_class, opt_manager_of_class + instance_state = lookups["instance_state"] + instance_dict = lookups["instance_dict"] + manager_of_class = lookups["manager_of_class"] + opt_manager_of_class = lookups["opt_manager_of_class"] + orm_base.instance_state = attributes.instance_state = ( + orm_instrumentation.instance_state + ) = instance_state + orm_base.instance_dict = attributes.instance_dict = ( + orm_instrumentation.instance_dict + ) = instance_dict + orm_base.manager_of_class = attributes.manager_of_class = ( + orm_instrumentation.manager_of_class + ) = manager_of_class + orm_base.opt_manager_of_class = orm_util.opt_manager_of_class = ( + attributes.opt_manager_of_class + ) = orm_instrumentation.opt_manager_of_class = opt_manager_of_class diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mutable.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mutable.py new file mode 100644 index 00000000..7da5075a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mutable.py @@ -0,0 +1,1073 @@ +# ext/mutable.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +r"""Provide support for tracking of in-place changes to scalar values, +which are propagated into ORM change events on owning parent objects. + +.. _mutable_scalars: + +Establishing Mutability on Scalar Column Values +=============================================== + +A typical example of a "mutable" structure is a Python dictionary. +Following the example introduced in :ref:`types_toplevel`, we +begin with a custom type that marshals Python dictionaries into +JSON strings before being persisted:: + + from sqlalchemy.types import TypeDecorator, VARCHAR + import json + + class JSONEncodedDict(TypeDecorator): + "Represents an immutable structure as a json-encoded string." + + impl = VARCHAR + + def process_bind_param(self, value, dialect): + if value is not None: + value = json.dumps(value) + return value + + def process_result_value(self, value, dialect): + if value is not None: + value = json.loads(value) + return value + +The usage of ``json`` is only for the purposes of example. The +:mod:`sqlalchemy.ext.mutable` extension can be used +with any type whose target Python type may be mutable, including +:class:`.PickleType`, :class:`_postgresql.ARRAY`, etc. + +When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself +tracks all parents which reference it. Below, we illustrate a simple +version of the :class:`.MutableDict` dictionary object, which applies +the :class:`.Mutable` mixin to a plain Python dictionary:: + + from sqlalchemy.ext.mutable import Mutable + + class MutableDict(Mutable, dict): + @classmethod + def coerce(cls, key, value): + "Convert plain dictionaries to MutableDict." + + if not isinstance(value, MutableDict): + if isinstance(value, dict): + return MutableDict(value) + + # this call will raise ValueError + return Mutable.coerce(key, value) + else: + return value + + def __setitem__(self, key, value): + "Detect dictionary set events and emit change events." + + dict.__setitem__(self, key, value) + self.changed() + + def __delitem__(self, key): + "Detect dictionary del events and emit change events." + + dict.__delitem__(self, key) + self.changed() + +The above dictionary class takes the approach of subclassing the Python +built-in ``dict`` to produce a dict +subclass which routes all mutation events through ``__setitem__``. There are +variants on this approach, such as subclassing ``UserDict.UserDict`` or +``collections.MutableMapping``; the part that's important to this example is +that the :meth:`.Mutable.changed` method is called whenever an in-place +change to the datastructure takes place. + +We also redefine the :meth:`.Mutable.coerce` method which will be used to +convert any values that are not instances of ``MutableDict``, such +as the plain dictionaries returned by the ``json`` module, into the +appropriate type. Defining this method is optional; we could just as well +created our ``JSONEncodedDict`` such that it always returns an instance +of ``MutableDict``, and additionally ensured that all calling code +uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not +overridden, any values applied to a parent object which are not instances +of the mutable type will raise a ``ValueError``. + +Our new ``MutableDict`` type offers a class method +:meth:`~.Mutable.as_mutable` which we can use within column metadata +to associate with types. This method grabs the given type object or +class and associates a listener that will detect all future mappings +of this type, applying event listening instrumentation to the mapped +attribute. Such as, with classical table metadata:: + + from sqlalchemy import Table, Column, Integer + + my_data = Table('my_data', metadata, + Column('id', Integer, primary_key=True), + Column('data', MutableDict.as_mutable(JSONEncodedDict)) + ) + +Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict`` +(if the type object was not an instance already), which will intercept any +attributes which are mapped against this type. Below we establish a simple +mapping against the ``my_data`` table:: + + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + + class Base(DeclarativeBase): + pass + + class MyDataClass(Base): + __tablename__ = 'my_data' + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[dict[str, str]] = mapped_column(MutableDict.as_mutable(JSONEncodedDict)) + +The ``MyDataClass.data`` member will now be notified of in place changes +to its value. + +Any in-place changes to the ``MyDataClass.data`` member +will flag the attribute as "dirty" on the parent object:: + + >>> from sqlalchemy.orm import Session + + >>> sess = Session(some_engine) + >>> m1 = MyDataClass(data={'value1':'foo'}) + >>> sess.add(m1) + >>> sess.commit() + + >>> m1.data['value1'] = 'bar' + >>> assert m1 in sess.dirty + True + +The ``MutableDict`` can be associated with all future instances +of ``JSONEncodedDict`` in one step, using +:meth:`~.Mutable.associate_with`. This is similar to +:meth:`~.Mutable.as_mutable` except it will intercept all occurrences +of ``MutableDict`` in all mappings unconditionally, without +the need to declare it individually:: + + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + + MutableDict.associate_with(JSONEncodedDict) + + class Base(DeclarativeBase): + pass + + class MyDataClass(Base): + __tablename__ = 'my_data' + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[dict[str, str]] = mapped_column(JSONEncodedDict) + + +Supporting Pickling +-------------------- + +The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the +placement of a ``weakref.WeakKeyDictionary`` upon the value object, which +stores a mapping of parent mapped objects keyed to the attribute name under +which they are associated with this value. ``WeakKeyDictionary`` objects are +not picklable, due to the fact that they contain weakrefs and function +callbacks. In our case, this is a good thing, since if this dictionary were +picklable, it could lead to an excessively large pickle size for our value +objects that are pickled by themselves outside of the context of the parent. +The developer responsibility here is only to provide a ``__getstate__`` method +that excludes the :meth:`~MutableBase._parents` collection from the pickle +stream:: + + class MyMutableType(Mutable): + def __getstate__(self): + d = self.__dict__.copy() + d.pop('_parents', None) + return d + +With our dictionary example, we need to return the contents of the dict itself +(and also restore them on __setstate__):: + + class MutableDict(Mutable, dict): + # .... + + def __getstate__(self): + return dict(self) + + def __setstate__(self, state): + self.update(state) + +In the case that our mutable value object is pickled as it is attached to one +or more parent objects that are also part of the pickle, the :class:`.Mutable` +mixin will re-establish the :attr:`.Mutable._parents` collection on each value +object as the owning parents themselves are unpickled. + +Receiving Events +---------------- + +The :meth:`.AttributeEvents.modified` event handler may be used to receive +an event when a mutable scalar emits a change event. This event handler +is called when the :func:`.attributes.flag_modified` function is called +from within the mutable extension:: + + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + from sqlalchemy import event + + class Base(DeclarativeBase): + pass + + class MyDataClass(Base): + __tablename__ = 'my_data' + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[dict[str, str]] = mapped_column(MutableDict.as_mutable(JSONEncodedDict)) + + @event.listens_for(MyDataClass.data, "modified") + def modified_json(instance, initiator): + print("json value modified:", instance.data) + +.. _mutable_composites: + +Establishing Mutability on Composites +===================================== + +Composites are a special ORM feature which allow a single scalar attribute to +be assigned an object value which represents information "composed" from one +or more columns from the underlying mapped table. The usual example is that of +a geometric "point", and is introduced in :ref:`mapper_composite`. + +As is the case with :class:`.Mutable`, the user-defined composite class +subclasses :class:`.MutableComposite` as a mixin, and detects and delivers +change events to its parents via the :meth:`.MutableComposite.changed` method. +In the case of a composite class, the detection is usually via the usage of the +special Python method ``__setattr__()``. In the example below, we expand upon the ``Point`` +class introduced in :ref:`mapper_composite` to include +:class:`.MutableComposite` in its bases and to route attribute set events via +``__setattr__`` to the :meth:`.MutableComposite.changed` method:: + + import dataclasses + from sqlalchemy.ext.mutable import MutableComposite + + @dataclasses.dataclass + class Point(MutableComposite): + x: int + y: int + + def __setattr__(self, key, value): + "Intercept set events" + + # set the attribute + object.__setattr__(self, key, value) + + # alert all parents to the change + self.changed() + + +The :class:`.MutableComposite` class makes use of class mapping events to +automatically establish listeners for any usage of :func:`_orm.composite` that +specifies our ``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` +class, listeners are established which will route change events from ``Point`` +objects to each of the ``Vertex.start`` and ``Vertex.end`` attributes:: + + from sqlalchemy.orm import DeclarativeBase, Mapped + from sqlalchemy.orm import composite, mapped_column + + class Base(DeclarativeBase): + pass + + + class Vertex(Base): + __tablename__ = "vertices" + + id: Mapped[int] = mapped_column(primary_key=True) + + start: Mapped[Point] = composite(mapped_column("x1"), mapped_column("y1")) + end: Mapped[Point] = composite(mapped_column("x2"), mapped_column("y2")) + + def __repr__(self): + return f"Vertex(start={self.start}, end={self.end})" + +Any in-place changes to the ``Vertex.start`` or ``Vertex.end`` members +will flag the attribute as "dirty" on the parent object: + +.. sourcecode:: python+sql + + >>> from sqlalchemy.orm import Session + >>> sess = Session(engine) + >>> v1 = Vertex(start=Point(3, 4), end=Point(12, 15)) + >>> sess.add(v1) + {sql}>>> sess.flush() + BEGIN (implicit) + INSERT INTO vertices (x1, y1, x2, y2) VALUES (?, ?, ?, ?) + [...] (3, 4, 12, 15) + + {stop}>>> v1.end.x = 8 + >>> assert v1 in sess.dirty + True + {sql}>>> sess.commit() + UPDATE vertices SET x2=? WHERE vertices.id = ? + [...] (8, 1) + COMMIT + +Coercing Mutable Composites +--------------------------- + +The :meth:`.MutableBase.coerce` method is also supported on composite types. +In the case of :class:`.MutableComposite`, the :meth:`.MutableBase.coerce` +method is only called for attribute set operations, not load operations. +Overriding the :meth:`.MutableBase.coerce` method is essentially equivalent +to using a :func:`.validates` validation routine for all attributes which +make use of the custom composite type:: + + @dataclasses.dataclass + class Point(MutableComposite): + # other Point methods + # ... + + def coerce(cls, key, value): + if isinstance(value, tuple): + value = Point(*value) + elif not isinstance(value, Point): + raise ValueError("tuple or Point expected") + return value + +Supporting Pickling +-------------------- + +As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper +class uses a ``weakref.WeakKeyDictionary`` available via the +:meth:`MutableBase._parents` attribute which isn't picklable. If we need to +pickle instances of ``Point`` or its owning class ``Vertex``, we at least need +to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary. +Below we define both a ``__getstate__`` and a ``__setstate__`` that package up +the minimal form of our ``Point`` class:: + + @dataclasses.dataclass + class Point(MutableComposite): + # ... + + def __getstate__(self): + return self.x, self.y + + def __setstate__(self, state): + self.x, self.y = state + +As with :class:`.Mutable`, the :class:`.MutableComposite` augments the +pickling process of the parent's object-relational state so that the +:meth:`MutableBase._parents` collection is restored to all ``Point`` objects. + +""" # noqa: E501 + +from __future__ import annotations + +from collections import defaultdict +from typing import AbstractSet +from typing import Any +from typing import Dict +from typing import Iterable +from typing import List +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref +from weakref import WeakKeyDictionary + +from .. import event +from .. import inspect +from .. import types +from .. import util +from ..orm import Mapper +from ..orm._typing import _ExternalEntityType +from ..orm._typing import _O +from ..orm._typing import _T +from ..orm.attributes import AttributeEventToken +from ..orm.attributes import flag_modified +from ..orm.attributes import InstrumentedAttribute +from ..orm.attributes import QueryableAttribute +from ..orm.context import QueryContext +from ..orm.decl_api import DeclarativeAttributeIntercept +from ..orm.state import InstanceState +from ..orm.unitofwork import UOWTransaction +from ..sql.base import SchemaEventTarget +from ..sql.schema import Column +from ..sql.type_api import TypeEngine +from ..util import memoized_property +from ..util.typing import SupportsIndex +from ..util.typing import TypeGuard + +_KT = TypeVar("_KT") # Key type. +_VT = TypeVar("_VT") # Value type. + + +class MutableBase: + """Common base class to :class:`.Mutable` + and :class:`.MutableComposite`. + + """ + + @memoized_property + def _parents(self) -> WeakKeyDictionary[Any, Any]: + """Dictionary of parent object's :class:`.InstanceState`->attribute + name on the parent. + + This attribute is a so-called "memoized" property. It initializes + itself with a new ``weakref.WeakKeyDictionary`` the first time + it is accessed, returning the same object upon subsequent access. + + .. versionchanged:: 1.4 the :class:`.InstanceState` is now used + as the key in the weak dictionary rather than the instance + itself. + + """ + + return weakref.WeakKeyDictionary() + + @classmethod + def coerce(cls, key: str, value: Any) -> Optional[Any]: + """Given a value, coerce it into the target type. + + Can be overridden by custom subclasses to coerce incoming + data into a particular type. + + By default, raises ``ValueError``. + + This method is called in different scenarios depending on if + the parent class is of type :class:`.Mutable` or of type + :class:`.MutableComposite`. In the case of the former, it is called + for both attribute-set operations as well as during ORM loading + operations. For the latter, it is only called during attribute-set + operations; the mechanics of the :func:`.composite` construct + handle coercion during load operations. + + + :param key: string name of the ORM-mapped attribute being set. + :param value: the incoming value. + :return: the method should return the coerced value, or raise + ``ValueError`` if the coercion cannot be completed. + + """ + if value is None: + return None + msg = "Attribute '%s' does not accept objects of type %s" + raise ValueError(msg % (key, type(value))) + + @classmethod + def _get_listen_keys(cls, attribute: QueryableAttribute[Any]) -> Set[str]: + """Given a descriptor attribute, return a ``set()`` of the attribute + keys which indicate a change in the state of this attribute. + + This is normally just ``set([attribute.key])``, but can be overridden + to provide for additional keys. E.g. a :class:`.MutableComposite` + augments this set with the attribute keys associated with the columns + that comprise the composite value. + + This collection is consulted in the case of intercepting the + :meth:`.InstanceEvents.refresh` and + :meth:`.InstanceEvents.refresh_flush` events, which pass along a list + of attribute names that have been refreshed; the list is compared + against this set to determine if action needs to be taken. + + """ + return {attribute.key} + + @classmethod + def _listen_on_attribute( + cls, + attribute: QueryableAttribute[Any], + coerce: bool, + parent_cls: _ExternalEntityType[Any], + ) -> None: + """Establish this type as a mutation listener for the given + mapped descriptor. + + """ + key = attribute.key + if parent_cls is not attribute.class_: + return + + # rely on "propagate" here + parent_cls = attribute.class_ + + listen_keys = cls._get_listen_keys(attribute) + + def load(state: InstanceState[_O], *args: Any) -> None: + """Listen for objects loaded or refreshed. + + Wrap the target data member's value with + ``Mutable``. + + """ + val = state.dict.get(key, None) + if val is not None: + if coerce: + val = cls.coerce(key, val) + state.dict[key] = val + val._parents[state] = key + + def load_attrs( + state: InstanceState[_O], + ctx: Union[object, QueryContext, UOWTransaction], + attrs: Iterable[Any], + ) -> None: + if not attrs or listen_keys.intersection(attrs): + load(state) + + def set_( + target: InstanceState[_O], + value: MutableBase | None, + oldvalue: MutableBase | None, + initiator: AttributeEventToken, + ) -> MutableBase | None: + """Listen for set/replace events on the target + data member. + + Establish a weak reference to the parent object + on the incoming value, remove it for the one + outgoing. + + """ + if value is oldvalue: + return value + + if not isinstance(value, cls): + value = cls.coerce(key, value) + if value is not None: + value._parents[target] = key + if isinstance(oldvalue, cls): + oldvalue._parents.pop(inspect(target), None) + return value + + def pickle( + state: InstanceState[_O], state_dict: Dict[str, Any] + ) -> None: + val = state.dict.get(key, None) + if val is not None: + if "ext.mutable.values" not in state_dict: + state_dict["ext.mutable.values"] = defaultdict(list) + state_dict["ext.mutable.values"][key].append(val) + + def unpickle( + state: InstanceState[_O], state_dict: Dict[str, Any] + ) -> None: + if "ext.mutable.values" in state_dict: + collection = state_dict["ext.mutable.values"] + if isinstance(collection, list): + # legacy format + for val in collection: + val._parents[state] = key + else: + for val in state_dict["ext.mutable.values"][key]: + val._parents[state] = key + + event.listen( + parent_cls, + "_sa_event_merge_wo_load", + load, + raw=True, + propagate=True, + ) + + event.listen(parent_cls, "load", load, raw=True, propagate=True) + event.listen( + parent_cls, "refresh", load_attrs, raw=True, propagate=True + ) + event.listen( + parent_cls, "refresh_flush", load_attrs, raw=True, propagate=True + ) + event.listen( + attribute, "set", set_, raw=True, retval=True, propagate=True + ) + event.listen(parent_cls, "pickle", pickle, raw=True, propagate=True) + event.listen( + parent_cls, "unpickle", unpickle, raw=True, propagate=True + ) + + +class Mutable(MutableBase): + """Mixin that defines transparent propagation of change + events to a parent object. + + See the example in :ref:`mutable_scalars` for usage information. + + """ + + def changed(self) -> None: + """Subclasses should call this method whenever change events occur.""" + + for parent, key in self._parents.items(): + flag_modified(parent.obj(), key) + + @classmethod + def associate_with_attribute( + cls, attribute: InstrumentedAttribute[_O] + ) -> None: + """Establish this type as a mutation listener for the given + mapped descriptor. + + """ + cls._listen_on_attribute(attribute, True, attribute.class_) + + @classmethod + def associate_with(cls, sqltype: type) -> None: + """Associate this wrapper with all future mapped columns + of the given type. + + This is a convenience method that calls + ``associate_with_attribute`` automatically. + + .. warning:: + + The listeners established by this method are *global* + to all mappers, and are *not* garbage collected. Only use + :meth:`.associate_with` for types that are permanent to an + application, not with ad-hoc types else this will cause unbounded + growth in memory usage. + + """ + + def listen_for_type(mapper: Mapper[_O], class_: type) -> None: + if mapper.non_primary: + return + for prop in mapper.column_attrs: + if isinstance(prop.columns[0].type, sqltype): + cls.associate_with_attribute(getattr(class_, prop.key)) + + event.listen(Mapper, "mapper_configured", listen_for_type) + + @classmethod + def as_mutable(cls, sqltype: TypeEngine[_T]) -> TypeEngine[_T]: + """Associate a SQL type with this mutable Python type. + + This establishes listeners that will detect ORM mappings against + the given type, adding mutation event trackers to those mappings. + + The type is returned, unconditionally as an instance, so that + :meth:`.as_mutable` can be used inline:: + + Table('mytable', metadata, + Column('id', Integer, primary_key=True), + Column('data', MyMutableType.as_mutable(PickleType)) + ) + + Note that the returned type is always an instance, even if a class + is given, and that only columns which are declared specifically with + that type instance receive additional instrumentation. + + To associate a particular mutable type with all occurrences of a + particular type, use the :meth:`.Mutable.associate_with` classmethod + of the particular :class:`.Mutable` subclass to establish a global + association. + + .. warning:: + + The listeners established by this method are *global* + to all mappers, and are *not* garbage collected. Only use + :meth:`.as_mutable` for types that are permanent to an application, + not with ad-hoc types else this will cause unbounded growth + in memory usage. + + """ + sqltype = types.to_instance(sqltype) + + # a SchemaType will be copied when the Column is copied, + # and we'll lose our ability to link that type back to the original. + # so track our original type w/ columns + if isinstance(sqltype, SchemaEventTarget): + + @event.listens_for(sqltype, "before_parent_attach") + def _add_column_memo( + sqltyp: TypeEngine[Any], + parent: Column[_T], + ) -> None: + parent.info["_ext_mutable_orig_type"] = sqltyp + + schema_event_check = True + else: + schema_event_check = False + + def listen_for_type( + mapper: Mapper[_T], + class_: Union[DeclarativeAttributeIntercept, type], + ) -> None: + if mapper.non_primary: + return + _APPLIED_KEY = "_ext_mutable_listener_applied" + + for prop in mapper.column_attrs: + if ( + # all Mutable types refer to a Column that's mapped, + # since this is the only kind of Core target the ORM can + # "mutate" + isinstance(prop.expression, Column) + and ( + ( + schema_event_check + and prop.expression.info.get( + "_ext_mutable_orig_type" + ) + is sqltype + ) + or prop.expression.type is sqltype + ) + ): + if not prop.expression.info.get(_APPLIED_KEY, False): + prop.expression.info[_APPLIED_KEY] = True + cls.associate_with_attribute(getattr(class_, prop.key)) + + event.listen(Mapper, "mapper_configured", listen_for_type) + + return sqltype + + +class MutableComposite(MutableBase): + """Mixin that defines transparent propagation of change + events on a SQLAlchemy "composite" object to its + owning parent or parents. + + See the example in :ref:`mutable_composites` for usage information. + + """ + + @classmethod + def _get_listen_keys(cls, attribute: QueryableAttribute[_O]) -> Set[str]: + return {attribute.key}.union(attribute.property._attribute_keys) + + def changed(self) -> None: + """Subclasses should call this method whenever change events occur.""" + + for parent, key in self._parents.items(): + prop = parent.mapper.get_property(key) + for value, attr_name in zip( + prop._composite_values_from_instance(self), + prop._attribute_keys, + ): + setattr(parent.obj(), attr_name, value) + + +def _setup_composite_listener() -> None: + def _listen_for_type(mapper: Mapper[_T], class_: type) -> None: + for prop in mapper.iterate_properties: + if ( + hasattr(prop, "composite_class") + and isinstance(prop.composite_class, type) + and issubclass(prop.composite_class, MutableComposite) + ): + prop.composite_class._listen_on_attribute( + getattr(class_, prop.key), False, class_ + ) + + if not event.contains(Mapper, "mapper_configured", _listen_for_type): + event.listen(Mapper, "mapper_configured", _listen_for_type) + + +_setup_composite_listener() + + +class MutableDict(Mutable, Dict[_KT, _VT]): + """A dictionary type that implements :class:`.Mutable`. + + The :class:`.MutableDict` object implements a dictionary that will + emit change events to the underlying mapping when the contents of + the dictionary are altered, including when values are added or removed. + + Note that :class:`.MutableDict` does **not** apply mutable tracking to the + *values themselves* inside the dictionary. Therefore it is not a sufficient + solution for the use case of tracking deep changes to a *recursive* + dictionary structure, such as a JSON structure. To support this use case, + build a subclass of :class:`.MutableDict` that provides appropriate + coercion to the values placed in the dictionary so that they too are + "mutable", and emit events up to their parent structure. + + .. seealso:: + + :class:`.MutableList` + + :class:`.MutableSet` + + """ + + def __setitem__(self, key: _KT, value: _VT) -> None: + """Detect dictionary set events and emit change events.""" + super().__setitem__(key, value) + self.changed() + + if TYPE_CHECKING: + # from https://github.com/python/mypy/issues/14858 + + @overload + def setdefault( + self: MutableDict[_KT, Optional[_T]], key: _KT, value: None = None + ) -> Optional[_T]: ... + + @overload + def setdefault(self, key: _KT, value: _VT) -> _VT: ... + + def setdefault(self, key: _KT, value: object = None) -> object: ... + + else: + + def setdefault(self, *arg): # noqa: F811 + result = super().setdefault(*arg) + self.changed() + return result + + def __delitem__(self, key: _KT) -> None: + """Detect dictionary del events and emit change events.""" + super().__delitem__(key) + self.changed() + + def update(self, *a: Any, **kw: _VT) -> None: + super().update(*a, **kw) + self.changed() + + if TYPE_CHECKING: + + @overload + def pop(self, __key: _KT) -> _VT: ... + + @overload + def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + + def pop( + self, __key: _KT, __default: _VT | _T | None = None + ) -> _VT | _T: ... + + else: + + def pop(self, *arg): # noqa: F811 + result = super().pop(*arg) + self.changed() + return result + + def popitem(self) -> Tuple[_KT, _VT]: + result = super().popitem() + self.changed() + return result + + def clear(self) -> None: + super().clear() + self.changed() + + @classmethod + def coerce(cls, key: str, value: Any) -> MutableDict[_KT, _VT] | None: + """Convert plain dictionary to instance of this class.""" + if not isinstance(value, cls): + if isinstance(value, dict): + return cls(value) + return Mutable.coerce(key, value) + else: + return value + + def __getstate__(self) -> Dict[_KT, _VT]: + return dict(self) + + def __setstate__( + self, state: Union[Dict[str, int], Dict[str, str]] + ) -> None: + self.update(state) + + +class MutableList(Mutable, List[_T]): + """A list type that implements :class:`.Mutable`. + + The :class:`.MutableList` object implements a list that will + emit change events to the underlying mapping when the contents of + the list are altered, including when values are added or removed. + + Note that :class:`.MutableList` does **not** apply mutable tracking to the + *values themselves* inside the list. Therefore it is not a sufficient + solution for the use case of tracking deep changes to a *recursive* + mutable structure, such as a JSON structure. To support this use case, + build a subclass of :class:`.MutableList` that provides appropriate + coercion to the values placed in the dictionary so that they too are + "mutable", and emit events up to their parent structure. + + .. seealso:: + + :class:`.MutableDict` + + :class:`.MutableSet` + + """ + + def __reduce_ex__( + self, proto: SupportsIndex + ) -> Tuple[type, Tuple[List[int]]]: + return (self.__class__, (list(self),)) + + # needed for backwards compatibility with + # older pickles + def __setstate__(self, state: Iterable[_T]) -> None: + self[:] = state + + def is_scalar(self, value: _T | Iterable[_T]) -> TypeGuard[_T]: + return not util.is_non_string_iterable(value) + + def is_iterable(self, value: _T | Iterable[_T]) -> TypeGuard[Iterable[_T]]: + return util.is_non_string_iterable(value) + + def __setitem__( + self, index: SupportsIndex | slice, value: _T | Iterable[_T] + ) -> None: + """Detect list set events and emit change events.""" + if isinstance(index, SupportsIndex) and self.is_scalar(value): + super().__setitem__(index, value) + elif isinstance(index, slice) and self.is_iterable(value): + super().__setitem__(index, value) + self.changed() + + def __delitem__(self, index: SupportsIndex | slice) -> None: + """Detect list del events and emit change events.""" + super().__delitem__(index) + self.changed() + + def pop(self, *arg: SupportsIndex) -> _T: + result = super().pop(*arg) + self.changed() + return result + + def append(self, x: _T) -> None: + super().append(x) + self.changed() + + def extend(self, x: Iterable[_T]) -> None: + super().extend(x) + self.changed() + + def __iadd__(self, x: Iterable[_T]) -> MutableList[_T]: # type: ignore[override,misc] # noqa: E501 + self.extend(x) + return self + + def insert(self, i: SupportsIndex, x: _T) -> None: + super().insert(i, x) + self.changed() + + def remove(self, i: _T) -> None: + super().remove(i) + self.changed() + + def clear(self) -> None: + super().clear() + self.changed() + + def sort(self, **kw: Any) -> None: + super().sort(**kw) + self.changed() + + def reverse(self) -> None: + super().reverse() + self.changed() + + @classmethod + def coerce( + cls, key: str, value: MutableList[_T] | _T + ) -> Optional[MutableList[_T]]: + """Convert plain list to instance of this class.""" + if not isinstance(value, cls): + if isinstance(value, list): + return cls(value) + return Mutable.coerce(key, value) + else: + return value + + +class MutableSet(Mutable, Set[_T]): + """A set type that implements :class:`.Mutable`. + + The :class:`.MutableSet` object implements a set that will + emit change events to the underlying mapping when the contents of + the set are altered, including when values are added or removed. + + Note that :class:`.MutableSet` does **not** apply mutable tracking to the + *values themselves* inside the set. Therefore it is not a sufficient + solution for the use case of tracking deep changes to a *recursive* + mutable structure. To support this use case, + build a subclass of :class:`.MutableSet` that provides appropriate + coercion to the values placed in the dictionary so that they too are + "mutable", and emit events up to their parent structure. + + .. seealso:: + + :class:`.MutableDict` + + :class:`.MutableList` + + + """ + + def update(self, *arg: Iterable[_T]) -> None: + super().update(*arg) + self.changed() + + def intersection_update(self, *arg: Iterable[Any]) -> None: + super().intersection_update(*arg) + self.changed() + + def difference_update(self, *arg: Iterable[Any]) -> None: + super().difference_update(*arg) + self.changed() + + def symmetric_difference_update(self, *arg: Iterable[_T]) -> None: + super().symmetric_difference_update(*arg) + self.changed() + + def __ior__(self, other: AbstractSet[_T]) -> MutableSet[_T]: # type: ignore[override,misc] # noqa: E501 + self.update(other) + return self + + def __iand__(self, other: AbstractSet[object]) -> MutableSet[_T]: + self.intersection_update(other) + return self + + def __ixor__(self, other: AbstractSet[_T]) -> MutableSet[_T]: # type: ignore[override,misc] # noqa: E501 + self.symmetric_difference_update(other) + return self + + def __isub__(self, other: AbstractSet[object]) -> MutableSet[_T]: # type: ignore[misc] # noqa: E501 + self.difference_update(other) + return self + + def add(self, elem: _T) -> None: + super().add(elem) + self.changed() + + def remove(self, elem: _T) -> None: + super().remove(elem) + self.changed() + + def discard(self, elem: _T) -> None: + super().discard(elem) + self.changed() + + def pop(self, *arg: Any) -> _T: + result = super().pop(*arg) + self.changed() + return result + + def clear(self) -> None: + super().clear() + self.changed() + + @classmethod + def coerce(cls, index: str, value: Any) -> Optional[MutableSet[_T]]: + """Convert plain set to instance of this class.""" + if not isinstance(value, cls): + if isinstance(value, set): + return cls(value) + return Mutable.coerce(index, value) + else: + return value + + def __getstate__(self) -> Set[_T]: + return set(self) + + def __setstate__(self, state: Iterable[_T]) -> None: + self.update(state) + + def __reduce_ex__( + self, proto: SupportsIndex + ) -> Tuple[type, Tuple[List[int]]]: + return (self.__class__, (list(self),)) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__init__.py new file mode 100644 index 00000000..de2c02ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__init__.py @@ -0,0 +1,6 @@ +# ext/mypy/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..54d2c421 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc new file mode 100644 index 00000000..717c7cfd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc new file mode 100644 index 00000000..5e25c4ef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc new file mode 100644 index 00000000..df7e9172 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc new file mode 100644 index 00000000..d9ad2e44 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc new file mode 100644 index 00000000..85fb08d2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..5352ea48 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/apply.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/apply.py new file mode 100644 index 00000000..eb901945 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/apply.py @@ -0,0 +1,320 @@ +# ext/mypy/apply.py +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import List +from typing import Optional +from typing import Union + +from mypy.nodes import ARG_NAMED_OPT +from mypy.nodes import Argument +from mypy.nodes import AssignmentStmt +from mypy.nodes import CallExpr +from mypy.nodes import ClassDef +from mypy.nodes import MDEF +from mypy.nodes import MemberExpr +from mypy.nodes import NameExpr +from mypy.nodes import RefExpr +from mypy.nodes import StrExpr +from mypy.nodes import SymbolTableNode +from mypy.nodes import TempNode +from mypy.nodes import TypeInfo +from mypy.nodes import Var +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.plugins.common import add_method_to_class +from mypy.types import AnyType +from mypy.types import get_proper_type +from mypy.types import Instance +from mypy.types import NoneTyp +from mypy.types import ProperType +from mypy.types import TypeOfAny +from mypy.types import UnboundType +from mypy.types import UnionType + +from . import infer +from . import util +from .names import expr_to_mapped_constructor +from .names import NAMED_TYPE_SQLA_MAPPED + + +def apply_mypy_mapped_attr( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + item: Union[NameExpr, StrExpr], + attributes: List[util.SQLAlchemyAttribute], +) -> None: + if isinstance(item, NameExpr): + name = item.name + elif isinstance(item, StrExpr): + name = item.value + else: + return None + + for stmt in cls.defs.body: + if ( + isinstance(stmt, AssignmentStmt) + and isinstance(stmt.lvalues[0], NameExpr) + and stmt.lvalues[0].name == name + ): + break + else: + util.fail(api, f"Can't find mapped attribute {name}", cls) + return None + + if stmt.type is None: + util.fail( + api, + "Statement linked from _mypy_mapped_attrs has no " + "typing information", + stmt, + ) + return None + + left_hand_explicit_type = get_proper_type(stmt.type) + assert isinstance( + left_hand_explicit_type, (Instance, UnionType, UnboundType) + ) + + attributes.append( + util.SQLAlchemyAttribute( + name=name, + line=item.line, + column=item.column, + typ=left_hand_explicit_type, + info=cls.info, + ) + ) + + apply_type_to_mapped_statement( + api, stmt, stmt.lvalues[0], left_hand_explicit_type, None + ) + + +def re_apply_declarative_assignments( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + attributes: List[util.SQLAlchemyAttribute], +) -> None: + """For multiple class passes, re-apply our left-hand side types as mypy + seems to reset them in place. + + """ + mapped_attr_lookup = {attr.name: attr for attr in attributes} + update_cls_metadata = False + + for stmt in cls.defs.body: + # for a re-apply, all of our statements are AssignmentStmt; + # @declared_attr calls will have been converted and this + # currently seems to be preserved by mypy (but who knows if this + # will change). + if ( + isinstance(stmt, AssignmentStmt) + and isinstance(stmt.lvalues[0], NameExpr) + and stmt.lvalues[0].name in mapped_attr_lookup + and isinstance(stmt.lvalues[0].node, Var) + ): + left_node = stmt.lvalues[0].node + + python_type_for_type = mapped_attr_lookup[ + stmt.lvalues[0].name + ].type + + left_node_proper_type = get_proper_type(left_node.type) + + # if we have scanned an UnboundType and now there's a more + # specific type than UnboundType, call the re-scan so we + # can get that set up correctly + if ( + isinstance(python_type_for_type, UnboundType) + and not isinstance(left_node_proper_type, UnboundType) + and ( + isinstance(stmt.rvalue, CallExpr) + and isinstance(stmt.rvalue.callee, MemberExpr) + and isinstance(stmt.rvalue.callee.expr, NameExpr) + and stmt.rvalue.callee.expr.node is not None + and stmt.rvalue.callee.expr.node.fullname + == NAMED_TYPE_SQLA_MAPPED + and stmt.rvalue.callee.name == "_empty_constructor" + and isinstance(stmt.rvalue.args[0], CallExpr) + and isinstance(stmt.rvalue.args[0].callee, RefExpr) + ) + ): + new_python_type_for_type = ( + infer.infer_type_from_right_hand_nameexpr( + api, + stmt, + left_node, + left_node_proper_type, + stmt.rvalue.args[0].callee, + ) + ) + + if new_python_type_for_type is not None and not isinstance( + new_python_type_for_type, UnboundType + ): + python_type_for_type = new_python_type_for_type + + # update the SQLAlchemyAttribute with the better + # information + mapped_attr_lookup[stmt.lvalues[0].name].type = ( + python_type_for_type + ) + + update_cls_metadata = True + + if ( + not isinstance(left_node.type, Instance) + or left_node.type.type.fullname != NAMED_TYPE_SQLA_MAPPED + ): + assert python_type_for_type is not None + left_node.type = api.named_type( + NAMED_TYPE_SQLA_MAPPED, [python_type_for_type] + ) + + if update_cls_metadata: + util.set_mapped_attributes(cls.info, attributes) + + +def apply_type_to_mapped_statement( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + lvalue: NameExpr, + left_hand_explicit_type: Optional[ProperType], + python_type_for_type: Optional[ProperType], +) -> None: + """Apply the Mapped[] annotation and right hand object to a + declarative assignment statement. + + This converts a Python declarative class statement such as:: + + class User(Base): + # ... + + attrname = Column(Integer) + + To one that describes the final Python behavior to Mypy:: + + class User(Base): + # ... + + attrname : Mapped[Optional[int]] = + + """ + left_node = lvalue.node + assert isinstance(left_node, Var) + + # to be completely honest I have no idea what the difference between + # left_node.type and stmt.type is, what it means if these are different + # vs. the same, why in order to get tests to pass I have to assign + # to stmt.type for the second case and not the first. this is complete + # trying every combination until it works stuff. + + if left_hand_explicit_type is not None: + lvalue.is_inferred_def = False + left_node.type = api.named_type( + NAMED_TYPE_SQLA_MAPPED, [left_hand_explicit_type] + ) + else: + lvalue.is_inferred_def = False + left_node.type = api.named_type( + NAMED_TYPE_SQLA_MAPPED, + ( + [AnyType(TypeOfAny.special_form)] + if python_type_for_type is None + else [python_type_for_type] + ), + ) + + # so to have it skip the right side totally, we can do this: + # stmt.rvalue = TempNode(AnyType(TypeOfAny.special_form)) + + # however, if we instead manufacture a new node that uses the old + # one, then we can still get type checking for the call itself, + # e.g. the Column, relationship() call, etc. + + # rewrite the node as: + # : Mapped[] = + # _sa_Mapped._empty_constructor() + # the original right-hand side is maintained so it gets type checked + # internally + stmt.rvalue = expr_to_mapped_constructor(stmt.rvalue) + + if stmt.type is not None and python_type_for_type is not None: + stmt.type = python_type_for_type + + +def add_additional_orm_attributes( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + attributes: List[util.SQLAlchemyAttribute], +) -> None: + """Apply __init__, __table__ and other attributes to the mapped class.""" + + info = util.info_for_cls(cls, api) + + if info is None: + return + + is_base = util.get_is_base(info) + + if "__init__" not in info.names and not is_base: + mapped_attr_names = {attr.name: attr.type for attr in attributes} + + for base in info.mro[1:-1]: + if "sqlalchemy" not in info.metadata: + continue + + base_cls_attributes = util.get_mapped_attributes(base, api) + if base_cls_attributes is None: + continue + + for attr in base_cls_attributes: + mapped_attr_names.setdefault(attr.name, attr.type) + + arguments = [] + for name, typ in mapped_attr_names.items(): + if typ is None: + typ = AnyType(TypeOfAny.special_form) + arguments.append( + Argument( + variable=Var(name, typ), + type_annotation=typ, + initializer=TempNode(typ), + kind=ARG_NAMED_OPT, + ) + ) + + add_method_to_class(api, cls, "__init__", arguments, NoneTyp()) + + if "__table__" not in info.names and util.get_has_table(info): + _apply_placeholder_attr_to_class( + api, cls, "sqlalchemy.sql.schema.Table", "__table__" + ) + if not is_base: + _apply_placeholder_attr_to_class( + api, cls, "sqlalchemy.orm.mapper.Mapper", "__mapper__" + ) + + +def _apply_placeholder_attr_to_class( + api: SemanticAnalyzerPluginInterface, + cls: ClassDef, + qualified_name: str, + attrname: str, +) -> None: + sym = api.lookup_fully_qualified_or_none(qualified_name) + if sym: + assert isinstance(sym.node, TypeInfo) + type_: ProperType = Instance(sym.node, []) + else: + type_ = AnyType(TypeOfAny.special_form) + var = Var(attrname) + var._fullname = cls.fullname + "." + attrname + var.info = cls.info + var.type = type_ + cls.info.names[attrname] = SymbolTableNode(MDEF, var) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/decl_class.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/decl_class.py new file mode 100644 index 00000000..3d578b34 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/decl_class.py @@ -0,0 +1,515 @@ +# ext/mypy/decl_class.py +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import List +from typing import Optional +from typing import Union + +from mypy.nodes import AssignmentStmt +from mypy.nodes import CallExpr +from mypy.nodes import ClassDef +from mypy.nodes import Decorator +from mypy.nodes import LambdaExpr +from mypy.nodes import ListExpr +from mypy.nodes import MemberExpr +from mypy.nodes import NameExpr +from mypy.nodes import PlaceholderNode +from mypy.nodes import RefExpr +from mypy.nodes import StrExpr +from mypy.nodes import SymbolNode +from mypy.nodes import SymbolTableNode +from mypy.nodes import TempNode +from mypy.nodes import TypeInfo +from mypy.nodes import Var +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.types import AnyType +from mypy.types import CallableType +from mypy.types import get_proper_type +from mypy.types import Instance +from mypy.types import NoneType +from mypy.types import ProperType +from mypy.types import Type +from mypy.types import TypeOfAny +from mypy.types import UnboundType +from mypy.types import UnionType + +from . import apply +from . import infer +from . import names +from . import util + + +def scan_declarative_assignments_and_apply_types( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + is_mixin_scan: bool = False, +) -> Optional[List[util.SQLAlchemyAttribute]]: + info = util.info_for_cls(cls, api) + + if info is None: + # this can occur during cached passes + return None + elif cls.fullname.startswith("builtins"): + return None + + mapped_attributes: Optional[List[util.SQLAlchemyAttribute]] = ( + util.get_mapped_attributes(info, api) + ) + + # used by assign.add_additional_orm_attributes among others + util.establish_as_sqlalchemy(info) + + if mapped_attributes is not None: + # ensure that a class that's mapped is always picked up by + # its mapped() decorator or declarative metaclass before + # it would be detected as an unmapped mixin class + + if not is_mixin_scan: + # mypy can call us more than once. it then *may* have reset the + # left hand side of everything, but not the right that we removed, + # removing our ability to re-scan. but we have the types + # here, so lets re-apply them, or if we have an UnboundType, + # we can re-scan + + apply.re_apply_declarative_assignments(cls, api, mapped_attributes) + + return mapped_attributes + + mapped_attributes = [] + + if not cls.defs.body: + # when we get a mixin class from another file, the body is + # empty (!) but the names are in the symbol table. so use that. + + for sym_name, sym in info.names.items(): + _scan_symbol_table_entry( + cls, api, sym_name, sym, mapped_attributes + ) + else: + for stmt in util.flatten_typechecking(cls.defs.body): + if isinstance(stmt, AssignmentStmt): + _scan_declarative_assignment_stmt( + cls, api, stmt, mapped_attributes + ) + elif isinstance(stmt, Decorator): + _scan_declarative_decorator_stmt( + cls, api, stmt, mapped_attributes + ) + _scan_for_mapped_bases(cls, api) + + if not is_mixin_scan: + apply.add_additional_orm_attributes(cls, api, mapped_attributes) + + util.set_mapped_attributes(info, mapped_attributes) + + return mapped_attributes + + +def _scan_symbol_table_entry( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + name: str, + value: SymbolTableNode, + attributes: List[util.SQLAlchemyAttribute], +) -> None: + """Extract mapping information from a SymbolTableNode that's in the + type.names dictionary. + + """ + value_type = get_proper_type(value.type) + if not isinstance(value_type, Instance): + return + + left_hand_explicit_type = None + type_id = names.type_id_for_named_node(value_type.type) + # type_id = names._type_id_for_unbound_type(value.type.type, cls, api) + + err = False + + # TODO: this is nearly the same logic as that of + # _scan_declarative_decorator_stmt, likely can be merged + if type_id in { + names.MAPPED, + names.RELATIONSHIP, + names.COMPOSITE_PROPERTY, + names.MAPPER_PROPERTY, + names.SYNONYM_PROPERTY, + names.COLUMN_PROPERTY, + }: + if value_type.args: + left_hand_explicit_type = get_proper_type(value_type.args[0]) + else: + err = True + elif type_id is names.COLUMN: + if not value_type.args: + err = True + else: + typeengine_arg: Union[ProperType, TypeInfo] = get_proper_type( + value_type.args[0] + ) + if isinstance(typeengine_arg, Instance): + typeengine_arg = typeengine_arg.type + + if isinstance(typeengine_arg, (UnboundType, TypeInfo)): + sym = api.lookup_qualified(typeengine_arg.name, typeengine_arg) + if sym is not None and isinstance(sym.node, TypeInfo): + if names.has_base_type_id(sym.node, names.TYPEENGINE): + left_hand_explicit_type = UnionType( + [ + infer.extract_python_type_from_typeengine( + api, sym.node, [] + ), + NoneType(), + ] + ) + else: + util.fail( + api, + "Column type should be a TypeEngine " + "subclass not '{}'".format(sym.node.fullname), + value_type, + ) + + if err: + msg = ( + "Can't infer type from attribute {} on class {}. " + "please specify a return type from this function that is " + "one of: Mapped[], relationship[], " + "Column[], MapperProperty[]" + ) + util.fail(api, msg.format(name, cls.name), cls) + + left_hand_explicit_type = AnyType(TypeOfAny.special_form) + + if left_hand_explicit_type is not None: + assert value.node is not None + attributes.append( + util.SQLAlchemyAttribute( + name=name, + line=value.node.line, + column=value.node.column, + typ=left_hand_explicit_type, + info=cls.info, + ) + ) + + +def _scan_declarative_decorator_stmt( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + stmt: Decorator, + attributes: List[util.SQLAlchemyAttribute], +) -> None: + """Extract mapping information from a @declared_attr in a declarative + class. + + E.g.:: + + @reg.mapped + class MyClass: + # ... + + @declared_attr + def updated_at(cls) -> Column[DateTime]: + return Column(DateTime) + + Will resolve in mypy as:: + + @reg.mapped + class MyClass: + # ... + + updated_at: Mapped[Optional[datetime.datetime]] + + """ + for dec in stmt.decorators: + if ( + isinstance(dec, (NameExpr, MemberExpr, SymbolNode)) + and names.type_id_for_named_node(dec) is names.DECLARED_ATTR + ): + break + else: + return + + dec_index = cls.defs.body.index(stmt) + + left_hand_explicit_type: Optional[ProperType] = None + + if util.name_is_dunder(stmt.name): + # for dunder names like __table_args__, __tablename__, + # __mapper_args__ etc., rewrite these as simple assignment + # statements; otherwise mypy doesn't like if the decorated + # function has an annotation like ``cls: Type[Foo]`` because + # it isn't @classmethod + any_ = AnyType(TypeOfAny.special_form) + left_node = NameExpr(stmt.var.name) + left_node.node = stmt.var + new_stmt = AssignmentStmt([left_node], TempNode(any_)) + new_stmt.type = left_node.node.type + cls.defs.body[dec_index] = new_stmt + return + elif isinstance(stmt.func.type, CallableType): + func_type = stmt.func.type.ret_type + if isinstance(func_type, UnboundType): + type_id = names.type_id_for_unbound_type(func_type, cls, api) + else: + # this does not seem to occur unless the type argument is + # incorrect + return + + if ( + type_id + in { + names.MAPPED, + names.RELATIONSHIP, + names.COMPOSITE_PROPERTY, + names.MAPPER_PROPERTY, + names.SYNONYM_PROPERTY, + names.COLUMN_PROPERTY, + } + and func_type.args + ): + left_hand_explicit_type = get_proper_type(func_type.args[0]) + elif type_id is names.COLUMN and func_type.args: + typeengine_arg = func_type.args[0] + if isinstance(typeengine_arg, UnboundType): + sym = api.lookup_qualified(typeengine_arg.name, typeengine_arg) + if sym is not None and isinstance(sym.node, TypeInfo): + if names.has_base_type_id(sym.node, names.TYPEENGINE): + left_hand_explicit_type = UnionType( + [ + infer.extract_python_type_from_typeengine( + api, sym.node, [] + ), + NoneType(), + ] + ) + else: + util.fail( + api, + "Column type should be a TypeEngine " + "subclass not '{}'".format(sym.node.fullname), + func_type, + ) + + if left_hand_explicit_type is None: + # no type on the decorated function. our option here is to + # dig into the function body and get the return type, but they + # should just have an annotation. + msg = ( + "Can't infer type from @declared_attr on function '{}'; " + "please specify a return type from this function that is " + "one of: Mapped[], relationship[], " + "Column[], MapperProperty[]" + ) + util.fail(api, msg.format(stmt.var.name), stmt) + + left_hand_explicit_type = AnyType(TypeOfAny.special_form) + + left_node = NameExpr(stmt.var.name) + left_node.node = stmt.var + + # totally feeling around in the dark here as I don't totally understand + # the significance of UnboundType. It seems to be something that is + # not going to do what's expected when it is applied as the type of + # an AssignmentStatement. So do a feeling-around-in-the-dark version + # of converting it to the regular Instance/TypeInfo/UnionType structures + # we see everywhere else. + if isinstance(left_hand_explicit_type, UnboundType): + left_hand_explicit_type = get_proper_type( + util.unbound_to_instance(api, left_hand_explicit_type) + ) + + left_node.node.type = api.named_type( + names.NAMED_TYPE_SQLA_MAPPED, [left_hand_explicit_type] + ) + + # this will ignore the rvalue entirely + # rvalue = TempNode(AnyType(TypeOfAny.special_form)) + + # rewrite the node as: + # : Mapped[] = + # _sa_Mapped._empty_constructor(lambda: ) + # the function body is maintained so it gets type checked internally + rvalue = names.expr_to_mapped_constructor( + LambdaExpr(stmt.func.arguments, stmt.func.body) + ) + + new_stmt = AssignmentStmt([left_node], rvalue) + new_stmt.type = left_node.node.type + + attributes.append( + util.SQLAlchemyAttribute( + name=left_node.name, + line=stmt.line, + column=stmt.column, + typ=left_hand_explicit_type, + info=cls.info, + ) + ) + cls.defs.body[dec_index] = new_stmt + + +def _scan_declarative_assignment_stmt( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + attributes: List[util.SQLAlchemyAttribute], +) -> None: + """Extract mapping information from an assignment statement in a + declarative class. + + """ + lvalue = stmt.lvalues[0] + if not isinstance(lvalue, NameExpr): + return + + sym = cls.info.names.get(lvalue.name) + + # this establishes that semantic analysis has taken place, which + # means the nodes are populated and we are called from an appropriate + # hook. + assert sym is not None + node = sym.node + + if isinstance(node, PlaceholderNode): + return + + assert node is lvalue.node + assert isinstance(node, Var) + + if node.name == "__abstract__": + if api.parse_bool(stmt.rvalue) is True: + util.set_is_base(cls.info) + return + elif node.name == "__tablename__": + util.set_has_table(cls.info) + elif node.name.startswith("__"): + return + elif node.name == "_mypy_mapped_attrs": + if not isinstance(stmt.rvalue, ListExpr): + util.fail(api, "_mypy_mapped_attrs is expected to be a list", stmt) + else: + for item in stmt.rvalue.items: + if isinstance(item, (NameExpr, StrExpr)): + apply.apply_mypy_mapped_attr(cls, api, item, attributes) + + left_hand_mapped_type: Optional[Type] = None + left_hand_explicit_type: Optional[ProperType] = None + + if node.is_inferred or node.type is None: + if isinstance(stmt.type, UnboundType): + # look for an explicit Mapped[] type annotation on the left + # side with nothing on the right + + # print(stmt.type) + # Mapped?[Optional?[A?]] + + left_hand_explicit_type = stmt.type + + if stmt.type.name == "Mapped": + mapped_sym = api.lookup_qualified("Mapped", cls) + if ( + mapped_sym is not None + and mapped_sym.node is not None + and names.type_id_for_named_node(mapped_sym.node) + is names.MAPPED + ): + left_hand_explicit_type = get_proper_type( + stmt.type.args[0] + ) + left_hand_mapped_type = stmt.type + + # TODO: do we need to convert from unbound for this case? + # left_hand_explicit_type = util._unbound_to_instance( + # api, left_hand_explicit_type + # ) + else: + node_type = get_proper_type(node.type) + if ( + isinstance(node_type, Instance) + and names.type_id_for_named_node(node_type.type) is names.MAPPED + ): + # print(node.type) + # sqlalchemy.orm.attributes.Mapped[] + left_hand_explicit_type = get_proper_type(node_type.args[0]) + left_hand_mapped_type = node_type + else: + # print(node.type) + # + left_hand_explicit_type = node_type + left_hand_mapped_type = None + + if isinstance(stmt.rvalue, TempNode) and left_hand_mapped_type is not None: + # annotation without assignment and Mapped is present + # as type annotation + # equivalent to using _infer_type_from_left_hand_type_only. + + python_type_for_type = left_hand_explicit_type + elif isinstance(stmt.rvalue, CallExpr) and isinstance( + stmt.rvalue.callee, RefExpr + ): + python_type_for_type = infer.infer_type_from_right_hand_nameexpr( + api, stmt, node, left_hand_explicit_type, stmt.rvalue.callee + ) + + if python_type_for_type is None: + return + + else: + return + + assert python_type_for_type is not None + + attributes.append( + util.SQLAlchemyAttribute( + name=node.name, + line=stmt.line, + column=stmt.column, + typ=python_type_for_type, + info=cls.info, + ) + ) + + apply.apply_type_to_mapped_statement( + api, + stmt, + lvalue, + left_hand_explicit_type, + python_type_for_type, + ) + + +def _scan_for_mapped_bases( + cls: ClassDef, + api: SemanticAnalyzerPluginInterface, +) -> None: + """Given a class, iterate through its superclass hierarchy to find + all other classes that are considered as ORM-significant. + + Locates non-mapped mixins and scans them for mapped attributes to be + applied to subclasses. + + """ + + info = util.info_for_cls(cls, api) + + if info is None: + return + + for base_info in info.mro[1:-1]: + if base_info.fullname.startswith("builtins"): + continue + + # scan each base for mapped attributes. if they are not already + # scanned (but have all their type info), that means they are unmapped + # mixins + scan_declarative_assignments_and_apply_types( + base_info.defn, api, is_mixin_scan=True + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/infer.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/infer.py new file mode 100644 index 00000000..09b3c443 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/infer.py @@ -0,0 +1,590 @@ +# ext/mypy/infer.py +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Optional +from typing import Sequence + +from mypy.maptype import map_instance_to_supertype +from mypy.nodes import AssignmentStmt +from mypy.nodes import CallExpr +from mypy.nodes import Expression +from mypy.nodes import FuncDef +from mypy.nodes import LambdaExpr +from mypy.nodes import MemberExpr +from mypy.nodes import NameExpr +from mypy.nodes import RefExpr +from mypy.nodes import StrExpr +from mypy.nodes import TypeInfo +from mypy.nodes import Var +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.subtypes import is_subtype +from mypy.types import AnyType +from mypy.types import CallableType +from mypy.types import get_proper_type +from mypy.types import Instance +from mypy.types import NoneType +from mypy.types import ProperType +from mypy.types import TypeOfAny +from mypy.types import UnionType + +from . import names +from . import util + + +def infer_type_from_right_hand_nameexpr( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: Optional[ProperType], + infer_from_right_side: RefExpr, +) -> Optional[ProperType]: + type_id = names.type_id_for_callee(infer_from_right_side) + if type_id is None: + return None + elif type_id is names.MAPPED: + python_type_for_type = _infer_type_from_mapped( + api, stmt, node, left_hand_explicit_type, infer_from_right_side + ) + elif type_id is names.COLUMN: + python_type_for_type = _infer_type_from_decl_column( + api, stmt, node, left_hand_explicit_type + ) + elif type_id is names.RELATIONSHIP: + python_type_for_type = _infer_type_from_relationship( + api, stmt, node, left_hand_explicit_type + ) + elif type_id is names.COLUMN_PROPERTY: + python_type_for_type = _infer_type_from_decl_column_property( + api, stmt, node, left_hand_explicit_type + ) + elif type_id is names.SYNONYM_PROPERTY: + python_type_for_type = infer_type_from_left_hand_type_only( + api, node, left_hand_explicit_type + ) + elif type_id is names.COMPOSITE_PROPERTY: + python_type_for_type = _infer_type_from_decl_composite_property( + api, stmt, node, left_hand_explicit_type + ) + else: + return None + + return python_type_for_type + + +def _infer_type_from_relationship( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: Optional[ProperType], +) -> Optional[ProperType]: + """Infer the type of mapping from a relationship. + + E.g.:: + + @reg.mapped + class MyClass: + # ... + + addresses = relationship(Address, uselist=True) + + order: Mapped["Order"] = relationship("Order") + + Will resolve in mypy as:: + + @reg.mapped + class MyClass: + # ... + + addresses: Mapped[List[Address]] + + order: Mapped["Order"] + + """ + + assert isinstance(stmt.rvalue, CallExpr) + target_cls_arg = stmt.rvalue.args[0] + python_type_for_type: Optional[ProperType] = None + + if isinstance(target_cls_arg, NameExpr) and isinstance( + target_cls_arg.node, TypeInfo + ): + # type + related_object_type = target_cls_arg.node + python_type_for_type = Instance(related_object_type, []) + + # other cases not covered - an error message directs the user + # to set an explicit type annotation + # + # node.type == str, it's a string + # if isinstance(target_cls_arg, NameExpr) and isinstance( + # target_cls_arg.node, Var + # ) + # points to a type + # isinstance(target_cls_arg, NameExpr) and isinstance( + # target_cls_arg.node, TypeAlias + # ) + # string expression + # isinstance(target_cls_arg, StrExpr) + + uselist_arg = util.get_callexpr_kwarg(stmt.rvalue, "uselist") + collection_cls_arg: Optional[Expression] = util.get_callexpr_kwarg( + stmt.rvalue, "collection_class" + ) + type_is_a_collection = False + + # this can be used to determine Optional for a many-to-one + # in the same way nullable=False could be used, if we start supporting + # that. + # innerjoin_arg = util.get_callexpr_kwarg(stmt.rvalue, "innerjoin") + + if ( + uselist_arg is not None + and api.parse_bool(uselist_arg) is True + and collection_cls_arg is None + ): + type_is_a_collection = True + if python_type_for_type is not None: + python_type_for_type = api.named_type( + names.NAMED_TYPE_BUILTINS_LIST, [python_type_for_type] + ) + elif ( + uselist_arg is None or api.parse_bool(uselist_arg) is True + ) and collection_cls_arg is not None: + type_is_a_collection = True + if isinstance(collection_cls_arg, CallExpr): + collection_cls_arg = collection_cls_arg.callee + + if isinstance(collection_cls_arg, NameExpr) and isinstance( + collection_cls_arg.node, TypeInfo + ): + if python_type_for_type is not None: + # this can still be overridden by the left hand side + # within _infer_Type_from_left_and_inferred_right + python_type_for_type = Instance( + collection_cls_arg.node, [python_type_for_type] + ) + elif ( + isinstance(collection_cls_arg, NameExpr) + and isinstance(collection_cls_arg.node, FuncDef) + and collection_cls_arg.node.type is not None + ): + if python_type_for_type is not None: + # this can still be overridden by the left hand side + # within _infer_Type_from_left_and_inferred_right + + # TODO: handle mypy.types.Overloaded + if isinstance(collection_cls_arg.node.type, CallableType): + rt = get_proper_type(collection_cls_arg.node.type.ret_type) + + if isinstance(rt, CallableType): + callable_ret_type = get_proper_type(rt.ret_type) + if isinstance(callable_ret_type, Instance): + python_type_for_type = Instance( + callable_ret_type.type, + [python_type_for_type], + ) + else: + util.fail( + api, + "Expected Python collection type for " + "collection_class parameter", + stmt.rvalue, + ) + python_type_for_type = None + elif uselist_arg is not None and api.parse_bool(uselist_arg) is False: + if collection_cls_arg is not None: + util.fail( + api, + "Sending uselist=False and collection_class at the same time " + "does not make sense", + stmt.rvalue, + ) + if python_type_for_type is not None: + python_type_for_type = UnionType( + [python_type_for_type, NoneType()] + ) + + else: + if left_hand_explicit_type is None: + msg = ( + "Can't infer scalar or collection for ORM mapped expression " + "assigned to attribute '{}' if both 'uselist' and " + "'collection_class' arguments are absent from the " + "relationship(); please specify a " + "type annotation on the left hand side." + ) + util.fail(api, msg.format(node.name), node) + + if python_type_for_type is None: + return infer_type_from_left_hand_type_only( + api, node, left_hand_explicit_type + ) + elif left_hand_explicit_type is not None: + if type_is_a_collection: + assert isinstance(left_hand_explicit_type, Instance) + assert isinstance(python_type_for_type, Instance) + return _infer_collection_type_from_left_and_inferred_right( + api, node, left_hand_explicit_type, python_type_for_type + ) + else: + return _infer_type_from_left_and_inferred_right( + api, + node, + left_hand_explicit_type, + python_type_for_type, + ) + else: + return python_type_for_type + + +def _infer_type_from_decl_composite_property( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: Optional[ProperType], +) -> Optional[ProperType]: + """Infer the type of mapping from a Composite.""" + + assert isinstance(stmt.rvalue, CallExpr) + target_cls_arg = stmt.rvalue.args[0] + python_type_for_type = None + + if isinstance(target_cls_arg, NameExpr) and isinstance( + target_cls_arg.node, TypeInfo + ): + related_object_type = target_cls_arg.node + python_type_for_type = Instance(related_object_type, []) + else: + python_type_for_type = None + + if python_type_for_type is None: + return infer_type_from_left_hand_type_only( + api, node, left_hand_explicit_type + ) + elif left_hand_explicit_type is not None: + return _infer_type_from_left_and_inferred_right( + api, node, left_hand_explicit_type, python_type_for_type + ) + else: + return python_type_for_type + + +def _infer_type_from_mapped( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: Optional[ProperType], + infer_from_right_side: RefExpr, +) -> Optional[ProperType]: + """Infer the type of mapping from a right side expression + that returns Mapped. + + + """ + assert isinstance(stmt.rvalue, CallExpr) + + # (Pdb) print(stmt.rvalue.callee) + # NameExpr(query_expression [sqlalchemy.orm._orm_constructors.query_expression]) # noqa: E501 + # (Pdb) stmt.rvalue.callee.node + # + # (Pdb) stmt.rvalue.callee.node.type + # def [_T] (default_expr: sqlalchemy.sql.elements.ColumnElement[_T`-1] =) -> sqlalchemy.orm.base.Mapped[_T`-1] # noqa: E501 + # sqlalchemy.orm.base.Mapped[_T`-1] + # the_mapped_type = stmt.rvalue.callee.node.type.ret_type + + # TODO: look at generic ref and either use that, + # or reconcile w/ what's present, etc. + the_mapped_type = util.type_for_callee(infer_from_right_side) # noqa + + return infer_type_from_left_hand_type_only( + api, node, left_hand_explicit_type + ) + + +def _infer_type_from_decl_column_property( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: Optional[ProperType], +) -> Optional[ProperType]: + """Infer the type of mapping from a ColumnProperty. + + This includes mappings against ``column_property()`` as well as the + ``deferred()`` function. + + """ + assert isinstance(stmt.rvalue, CallExpr) + + if stmt.rvalue.args: + first_prop_arg = stmt.rvalue.args[0] + + if isinstance(first_prop_arg, CallExpr): + type_id = names.type_id_for_callee(first_prop_arg.callee) + + # look for column_property() / deferred() etc with Column as first + # argument + if type_id is names.COLUMN: + return _infer_type_from_decl_column( + api, + stmt, + node, + left_hand_explicit_type, + right_hand_expression=first_prop_arg, + ) + + if isinstance(stmt.rvalue, CallExpr): + type_id = names.type_id_for_callee(stmt.rvalue.callee) + # this is probably not strictly necessary as we have to use the left + # hand type for query expression in any case. any other no-arg + # column prop objects would go here also + if type_id is names.QUERY_EXPRESSION: + return _infer_type_from_decl_column( + api, + stmt, + node, + left_hand_explicit_type, + ) + + return infer_type_from_left_hand_type_only( + api, node, left_hand_explicit_type + ) + + +def _infer_type_from_decl_column( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: Optional[ProperType], + right_hand_expression: Optional[CallExpr] = None, +) -> Optional[ProperType]: + """Infer the type of mapping from a Column. + + E.g.:: + + @reg.mapped + class MyClass: + # ... + + a = Column(Integer) + + b = Column("b", String) + + c: Mapped[int] = Column(Integer) + + d: bool = Column(Boolean) + + Will resolve in MyPy as:: + + @reg.mapped + class MyClass: + # ... + + a : Mapped[int] + + b : Mapped[str] + + c: Mapped[int] + + d: Mapped[bool] + + """ + assert isinstance(node, Var) + + callee = None + + if right_hand_expression is None: + if not isinstance(stmt.rvalue, CallExpr): + return None + + right_hand_expression = stmt.rvalue + + for column_arg in right_hand_expression.args[0:2]: + if isinstance(column_arg, CallExpr): + if isinstance(column_arg.callee, RefExpr): + # x = Column(String(50)) + callee = column_arg.callee + type_args: Sequence[Expression] = column_arg.args + break + elif isinstance(column_arg, (NameExpr, MemberExpr)): + if isinstance(column_arg.node, TypeInfo): + # x = Column(String) + callee = column_arg + type_args = () + break + else: + # x = Column(some_name, String), go to next argument + continue + elif isinstance(column_arg, (StrExpr,)): + # x = Column("name", String), go to next argument + continue + elif isinstance(column_arg, (LambdaExpr,)): + # x = Column("name", String, default=lambda: uuid.uuid4()) + # go to next argument + continue + else: + assert False + + if callee is None: + return None + + if isinstance(callee.node, TypeInfo) and names.mro_has_id( + callee.node.mro, names.TYPEENGINE + ): + python_type_for_type = extract_python_type_from_typeengine( + api, callee.node, type_args + ) + + if left_hand_explicit_type is not None: + return _infer_type_from_left_and_inferred_right( + api, node, left_hand_explicit_type, python_type_for_type + ) + + else: + return UnionType([python_type_for_type, NoneType()]) + else: + # it's not TypeEngine, it's typically implicitly typed + # like ForeignKey. we can't infer from the right side. + return infer_type_from_left_hand_type_only( + api, node, left_hand_explicit_type + ) + + +def _infer_type_from_left_and_inferred_right( + api: SemanticAnalyzerPluginInterface, + node: Var, + left_hand_explicit_type: ProperType, + python_type_for_type: ProperType, + orig_left_hand_type: Optional[ProperType] = None, + orig_python_type_for_type: Optional[ProperType] = None, +) -> Optional[ProperType]: + """Validate type when a left hand annotation is present and we also + could infer the right hand side:: + + attrname: SomeType = Column(SomeDBType) + + """ + + if orig_left_hand_type is None: + orig_left_hand_type = left_hand_explicit_type + if orig_python_type_for_type is None: + orig_python_type_for_type = python_type_for_type + + if not is_subtype(left_hand_explicit_type, python_type_for_type): + effective_type = api.named_type( + names.NAMED_TYPE_SQLA_MAPPED, [orig_python_type_for_type] + ) + + msg = ( + "Left hand assignment '{}: {}' not compatible " + "with ORM mapped expression of type {}" + ) + util.fail( + api, + msg.format( + node.name, + util.format_type(orig_left_hand_type, api.options), + util.format_type(effective_type, api.options), + ), + node, + ) + + return orig_left_hand_type + + +def _infer_collection_type_from_left_and_inferred_right( + api: SemanticAnalyzerPluginInterface, + node: Var, + left_hand_explicit_type: Instance, + python_type_for_type: Instance, +) -> Optional[ProperType]: + orig_left_hand_type = left_hand_explicit_type + orig_python_type_for_type = python_type_for_type + + if left_hand_explicit_type.args: + left_hand_arg = get_proper_type(left_hand_explicit_type.args[0]) + python_type_arg = get_proper_type(python_type_for_type.args[0]) + else: + left_hand_arg = left_hand_explicit_type + python_type_arg = python_type_for_type + + assert isinstance(left_hand_arg, (Instance, UnionType)) + assert isinstance(python_type_arg, (Instance, UnionType)) + + return _infer_type_from_left_and_inferred_right( + api, + node, + left_hand_arg, + python_type_arg, + orig_left_hand_type=orig_left_hand_type, + orig_python_type_for_type=orig_python_type_for_type, + ) + + +def infer_type_from_left_hand_type_only( + api: SemanticAnalyzerPluginInterface, + node: Var, + left_hand_explicit_type: Optional[ProperType], +) -> Optional[ProperType]: + """Determine the type based on explicit annotation only. + + if no annotation were present, note that we need one there to know + the type. + + """ + if left_hand_explicit_type is None: + msg = ( + "Can't infer type from ORM mapped expression " + "assigned to attribute '{}'; please specify a " + "Python type or " + "Mapped[] on the left hand side." + ) + util.fail(api, msg.format(node.name), node) + + return api.named_type( + names.NAMED_TYPE_SQLA_MAPPED, [AnyType(TypeOfAny.special_form)] + ) + + else: + # use type from the left hand side + return left_hand_explicit_type + + +def extract_python_type_from_typeengine( + api: SemanticAnalyzerPluginInterface, + node: TypeInfo, + type_args: Sequence[Expression], +) -> ProperType: + if node.fullname == "sqlalchemy.sql.sqltypes.Enum" and type_args: + first_arg = type_args[0] + if isinstance(first_arg, RefExpr) and isinstance( + first_arg.node, TypeInfo + ): + for base_ in first_arg.node.mro: + if base_.fullname == "enum.Enum": + return Instance(first_arg.node, []) + # TODO: support other pep-435 types here + else: + return api.named_type(names.NAMED_TYPE_BUILTINS_STR, []) + + assert node.has_base("sqlalchemy.sql.type_api.TypeEngine"), ( + "could not extract Python type from node: %s" % node + ) + + type_engine_sym = api.lookup_fully_qualified_or_none( + "sqlalchemy.sql.type_api.TypeEngine" + ) + + assert type_engine_sym is not None and isinstance( + type_engine_sym.node, TypeInfo + ) + type_engine = map_instance_to_supertype( + Instance(node, []), + type_engine_sym.node, + ) + return get_proper_type(type_engine.args[-1]) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/names.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/names.py new file mode 100644 index 00000000..fc3d708e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/names.py @@ -0,0 +1,335 @@ +# ext/mypy/names.py +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Dict +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Union + +from mypy.nodes import ARG_POS +from mypy.nodes import CallExpr +from mypy.nodes import ClassDef +from mypy.nodes import Decorator +from mypy.nodes import Expression +from mypy.nodes import FuncDef +from mypy.nodes import MemberExpr +from mypy.nodes import NameExpr +from mypy.nodes import OverloadedFuncDef +from mypy.nodes import SymbolNode +from mypy.nodes import TypeAlias +from mypy.nodes import TypeInfo +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.types import CallableType +from mypy.types import get_proper_type +from mypy.types import Instance +from mypy.types import UnboundType + +from ... import util + +COLUMN: int = util.symbol("COLUMN") +RELATIONSHIP: int = util.symbol("RELATIONSHIP") +REGISTRY: int = util.symbol("REGISTRY") +COLUMN_PROPERTY: int = util.symbol("COLUMN_PROPERTY") +TYPEENGINE: int = util.symbol("TYPEENGNE") +MAPPED: int = util.symbol("MAPPED") +DECLARATIVE_BASE: int = util.symbol("DECLARATIVE_BASE") +DECLARATIVE_META: int = util.symbol("DECLARATIVE_META") +MAPPED_DECORATOR: int = util.symbol("MAPPED_DECORATOR") +SYNONYM_PROPERTY: int = util.symbol("SYNONYM_PROPERTY") +COMPOSITE_PROPERTY: int = util.symbol("COMPOSITE_PROPERTY") +DECLARED_ATTR: int = util.symbol("DECLARED_ATTR") +MAPPER_PROPERTY: int = util.symbol("MAPPER_PROPERTY") +AS_DECLARATIVE: int = util.symbol("AS_DECLARATIVE") +AS_DECLARATIVE_BASE: int = util.symbol("AS_DECLARATIVE_BASE") +DECLARATIVE_MIXIN: int = util.symbol("DECLARATIVE_MIXIN") +QUERY_EXPRESSION: int = util.symbol("QUERY_EXPRESSION") + +# names that must succeed with mypy.api.named_type +NAMED_TYPE_BUILTINS_OBJECT = "builtins.object" +NAMED_TYPE_BUILTINS_STR = "builtins.str" +NAMED_TYPE_BUILTINS_LIST = "builtins.list" +NAMED_TYPE_SQLA_MAPPED = "sqlalchemy.orm.base.Mapped" + +_RelFullNames = { + "sqlalchemy.orm.relationships.Relationship", + "sqlalchemy.orm.relationships.RelationshipProperty", + "sqlalchemy.orm.relationships._RelationshipDeclared", + "sqlalchemy.orm.Relationship", + "sqlalchemy.orm.RelationshipProperty", +} + +_lookup: Dict[str, Tuple[int, Set[str]]] = { + "Column": ( + COLUMN, + { + "sqlalchemy.sql.schema.Column", + "sqlalchemy.sql.Column", + }, + ), + "Relationship": (RELATIONSHIP, _RelFullNames), + "RelationshipProperty": (RELATIONSHIP, _RelFullNames), + "_RelationshipDeclared": (RELATIONSHIP, _RelFullNames), + "registry": ( + REGISTRY, + { + "sqlalchemy.orm.decl_api.registry", + "sqlalchemy.orm.registry", + }, + ), + "ColumnProperty": ( + COLUMN_PROPERTY, + { + "sqlalchemy.orm.properties.MappedSQLExpression", + "sqlalchemy.orm.MappedSQLExpression", + "sqlalchemy.orm.properties.ColumnProperty", + "sqlalchemy.orm.ColumnProperty", + }, + ), + "MappedSQLExpression": ( + COLUMN_PROPERTY, + { + "sqlalchemy.orm.properties.MappedSQLExpression", + "sqlalchemy.orm.MappedSQLExpression", + "sqlalchemy.orm.properties.ColumnProperty", + "sqlalchemy.orm.ColumnProperty", + }, + ), + "Synonym": ( + SYNONYM_PROPERTY, + { + "sqlalchemy.orm.descriptor_props.Synonym", + "sqlalchemy.orm.Synonym", + "sqlalchemy.orm.descriptor_props.SynonymProperty", + "sqlalchemy.orm.SynonymProperty", + }, + ), + "SynonymProperty": ( + SYNONYM_PROPERTY, + { + "sqlalchemy.orm.descriptor_props.Synonym", + "sqlalchemy.orm.Synonym", + "sqlalchemy.orm.descriptor_props.SynonymProperty", + "sqlalchemy.orm.SynonymProperty", + }, + ), + "Composite": ( + COMPOSITE_PROPERTY, + { + "sqlalchemy.orm.descriptor_props.Composite", + "sqlalchemy.orm.Composite", + "sqlalchemy.orm.descriptor_props.CompositeProperty", + "sqlalchemy.orm.CompositeProperty", + }, + ), + "CompositeProperty": ( + COMPOSITE_PROPERTY, + { + "sqlalchemy.orm.descriptor_props.Composite", + "sqlalchemy.orm.Composite", + "sqlalchemy.orm.descriptor_props.CompositeProperty", + "sqlalchemy.orm.CompositeProperty", + }, + ), + "MapperProperty": ( + MAPPER_PROPERTY, + { + "sqlalchemy.orm.interfaces.MapperProperty", + "sqlalchemy.orm.MapperProperty", + }, + ), + "TypeEngine": (TYPEENGINE, {"sqlalchemy.sql.type_api.TypeEngine"}), + "Mapped": (MAPPED, {NAMED_TYPE_SQLA_MAPPED}), + "declarative_base": ( + DECLARATIVE_BASE, + { + "sqlalchemy.ext.declarative.declarative_base", + "sqlalchemy.orm.declarative_base", + "sqlalchemy.orm.decl_api.declarative_base", + }, + ), + "DeclarativeMeta": ( + DECLARATIVE_META, + { + "sqlalchemy.ext.declarative.DeclarativeMeta", + "sqlalchemy.orm.DeclarativeMeta", + "sqlalchemy.orm.decl_api.DeclarativeMeta", + }, + ), + "mapped": ( + MAPPED_DECORATOR, + { + "sqlalchemy.orm.decl_api.registry.mapped", + "sqlalchemy.orm.registry.mapped", + }, + ), + "as_declarative": ( + AS_DECLARATIVE, + { + "sqlalchemy.ext.declarative.as_declarative", + "sqlalchemy.orm.decl_api.as_declarative", + "sqlalchemy.orm.as_declarative", + }, + ), + "as_declarative_base": ( + AS_DECLARATIVE_BASE, + { + "sqlalchemy.orm.decl_api.registry.as_declarative_base", + "sqlalchemy.orm.registry.as_declarative_base", + }, + ), + "declared_attr": ( + DECLARED_ATTR, + { + "sqlalchemy.orm.decl_api.declared_attr", + "sqlalchemy.orm.declared_attr", + }, + ), + "declarative_mixin": ( + DECLARATIVE_MIXIN, + { + "sqlalchemy.orm.decl_api.declarative_mixin", + "sqlalchemy.orm.declarative_mixin", + }, + ), + "query_expression": ( + QUERY_EXPRESSION, + { + "sqlalchemy.orm.query_expression", + "sqlalchemy.orm._orm_constructors.query_expression", + }, + ), +} + + +def has_base_type_id(info: TypeInfo, type_id: int) -> bool: + for mr in info.mro: + check_type_id, fullnames = _lookup.get(mr.name, (None, None)) + if check_type_id == type_id: + break + else: + return False + + if fullnames is None: + return False + + return mr.fullname in fullnames + + +def mro_has_id(mro: List[TypeInfo], type_id: int) -> bool: + for mr in mro: + check_type_id, fullnames = _lookup.get(mr.name, (None, None)) + if check_type_id == type_id: + break + else: + return False + + if fullnames is None: + return False + + return mr.fullname in fullnames + + +def type_id_for_unbound_type( + type_: UnboundType, cls: ClassDef, api: SemanticAnalyzerPluginInterface +) -> Optional[int]: + sym = api.lookup_qualified(type_.name, type_) + if sym is not None: + if isinstance(sym.node, TypeAlias): + target_type = get_proper_type(sym.node.target) + if isinstance(target_type, Instance): + return type_id_for_named_node(target_type.type) + elif isinstance(sym.node, TypeInfo): + return type_id_for_named_node(sym.node) + + return None + + +def type_id_for_callee(callee: Expression) -> Optional[int]: + if isinstance(callee, (MemberExpr, NameExpr)): + if isinstance(callee.node, Decorator) and isinstance( + callee.node.func, FuncDef + ): + if callee.node.func.type and isinstance( + callee.node.func.type, CallableType + ): + ret_type = get_proper_type(callee.node.func.type.ret_type) + + if isinstance(ret_type, Instance): + return type_id_for_fullname(ret_type.type.fullname) + + return None + + elif isinstance(callee.node, OverloadedFuncDef): + if ( + callee.node.impl + and callee.node.impl.type + and isinstance(callee.node.impl.type, CallableType) + ): + ret_type = get_proper_type(callee.node.impl.type.ret_type) + + if isinstance(ret_type, Instance): + return type_id_for_fullname(ret_type.type.fullname) + + return None + elif isinstance(callee.node, FuncDef): + if callee.node.type and isinstance(callee.node.type, CallableType): + ret_type = get_proper_type(callee.node.type.ret_type) + + if isinstance(ret_type, Instance): + return type_id_for_fullname(ret_type.type.fullname) + + return None + elif isinstance(callee.node, TypeAlias): + target_type = get_proper_type(callee.node.target) + if isinstance(target_type, Instance): + return type_id_for_fullname(target_type.type.fullname) + elif isinstance(callee.node, TypeInfo): + return type_id_for_named_node(callee) + return None + + +def type_id_for_named_node( + node: Union[NameExpr, MemberExpr, SymbolNode] +) -> Optional[int]: + type_id, fullnames = _lookup.get(node.name, (None, None)) + + if type_id is None or fullnames is None: + return None + elif node.fullname in fullnames: + return type_id + else: + return None + + +def type_id_for_fullname(fullname: str) -> Optional[int]: + tokens = fullname.split(".") + immediate = tokens[-1] + + type_id, fullnames = _lookup.get(immediate, (None, None)) + + if type_id is None or fullnames is None: + return None + elif fullname in fullnames: + return type_id + else: + return None + + +def expr_to_mapped_constructor(expr: Expression) -> CallExpr: + column_descriptor = NameExpr("__sa_Mapped") + column_descriptor.fullname = NAMED_TYPE_SQLA_MAPPED + member_expr = MemberExpr(column_descriptor, "_empty_constructor") + return CallExpr( + member_expr, + [expr], + [ARG_POS], + ["arg1"], + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/plugin.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/plugin.py new file mode 100644 index 00000000..00eb4d1c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/plugin.py @@ -0,0 +1,303 @@ +# ext/mypy/plugin.py +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +""" +Mypy plugin for SQLAlchemy ORM. + +""" +from __future__ import annotations + +from typing import Callable +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type as TypingType +from typing import Union + +from mypy import nodes +from mypy.mro import calculate_mro +from mypy.mro import MroError +from mypy.nodes import Block +from mypy.nodes import ClassDef +from mypy.nodes import GDEF +from mypy.nodes import MypyFile +from mypy.nodes import NameExpr +from mypy.nodes import SymbolTable +from mypy.nodes import SymbolTableNode +from mypy.nodes import TypeInfo +from mypy.plugin import AttributeContext +from mypy.plugin import ClassDefContext +from mypy.plugin import DynamicClassDefContext +from mypy.plugin import Plugin +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.types import get_proper_type +from mypy.types import Instance +from mypy.types import Type + +from . import decl_class +from . import names +from . import util + +try: + __import__("sqlalchemy-stubs") +except ImportError: + pass +else: + raise ImportError( + "The SQLAlchemy mypy plugin in SQLAlchemy " + "2.0 does not work with sqlalchemy-stubs or " + "sqlalchemy2-stubs installed, as well as with any other third party " + "SQLAlchemy stubs. Please uninstall all SQLAlchemy stubs " + "packages." + ) + + +class SQLAlchemyPlugin(Plugin): + def get_dynamic_class_hook( + self, fullname: str + ) -> Optional[Callable[[DynamicClassDefContext], None]]: + if names.type_id_for_fullname(fullname) is names.DECLARATIVE_BASE: + return _dynamic_class_hook + return None + + def get_customize_class_mro_hook( + self, fullname: str + ) -> Optional[Callable[[ClassDefContext], None]]: + return _fill_in_decorators + + def get_class_decorator_hook( + self, fullname: str + ) -> Optional[Callable[[ClassDefContext], None]]: + sym = self.lookup_fully_qualified(fullname) + + if sym is not None and sym.node is not None: + type_id = names.type_id_for_named_node(sym.node) + if type_id is names.MAPPED_DECORATOR: + return _cls_decorator_hook + elif type_id in ( + names.AS_DECLARATIVE, + names.AS_DECLARATIVE_BASE, + ): + return _base_cls_decorator_hook + elif type_id is names.DECLARATIVE_MIXIN: + return _declarative_mixin_hook + + return None + + def get_metaclass_hook( + self, fullname: str + ) -> Optional[Callable[[ClassDefContext], None]]: + if names.type_id_for_fullname(fullname) is names.DECLARATIVE_META: + # Set any classes that explicitly have metaclass=DeclarativeMeta + # as declarative so the check in `get_base_class_hook()` works + return _metaclass_cls_hook + + return None + + def get_base_class_hook( + self, fullname: str + ) -> Optional[Callable[[ClassDefContext], None]]: + sym = self.lookup_fully_qualified(fullname) + + if ( + sym + and isinstance(sym.node, TypeInfo) + and util.has_declarative_base(sym.node) + ): + return _base_cls_hook + + return None + + def get_attribute_hook( + self, fullname: str + ) -> Optional[Callable[[AttributeContext], Type]]: + if fullname.startswith( + "sqlalchemy.orm.attributes.QueryableAttribute." + ): + return _queryable_getattr_hook + + return None + + def get_additional_deps( + self, file: MypyFile + ) -> List[Tuple[int, str, int]]: + return [ + # + (10, "sqlalchemy.orm", -1), + (10, "sqlalchemy.orm.attributes", -1), + (10, "sqlalchemy.orm.decl_api", -1), + ] + + +def plugin(version: str) -> TypingType[SQLAlchemyPlugin]: + return SQLAlchemyPlugin + + +def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: + """Generate a declarative Base class when the declarative_base() function + is encountered.""" + + _add_globals(ctx) + + cls = ClassDef(ctx.name, Block([])) + cls.fullname = ctx.api.qualified_name(ctx.name) + + info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) + cls.info = info + _set_declarative_metaclass(ctx.api, cls) + + cls_arg = util.get_callexpr_kwarg(ctx.call, "cls", expr_types=(NameExpr,)) + if cls_arg is not None and isinstance(cls_arg.node, TypeInfo): + util.set_is_base(cls_arg.node) + decl_class.scan_declarative_assignments_and_apply_types( + cls_arg.node.defn, ctx.api, is_mixin_scan=True + ) + info.bases = [Instance(cls_arg.node, [])] + else: + obj = ctx.api.named_type(names.NAMED_TYPE_BUILTINS_OBJECT) + + info.bases = [obj] + + try: + calculate_mro(info) + except MroError: + util.fail( + ctx.api, "Not able to calculate MRO for declarative base", ctx.call + ) + obj = ctx.api.named_type(names.NAMED_TYPE_BUILTINS_OBJECT) + info.bases = [obj] + info.fallback_to_any = True + + ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) + util.set_is_base(info) + + +def _fill_in_decorators(ctx: ClassDefContext) -> None: + for decorator in ctx.cls.decorators: + # set the ".fullname" attribute of a class decorator + # that is a MemberExpr. This causes the logic in + # semanal.py->apply_class_plugin_hooks to invoke the + # get_class_decorator_hook for our "registry.map_class()" + # and "registry.as_declarative_base()" methods. + # this seems like a bug in mypy that these decorators are otherwise + # skipped. + + if ( + isinstance(decorator, nodes.CallExpr) + and isinstance(decorator.callee, nodes.MemberExpr) + and decorator.callee.name == "as_declarative_base" + ): + target = decorator.callee + elif ( + isinstance(decorator, nodes.MemberExpr) + and decorator.name == "mapped" + ): + target = decorator + else: + continue + + if isinstance(target.expr, NameExpr): + sym = ctx.api.lookup_qualified( + target.expr.name, target, suppress_errors=True + ) + else: + continue + + if sym and sym.node: + sym_type = get_proper_type(sym.type) + if isinstance(sym_type, Instance): + target.fullname = f"{sym_type.type.fullname}.{target.name}" + else: + # if the registry is in the same file as where the + # decorator is used, it might not have semantic + # symbols applied and we can't get a fully qualified + # name or an inferred type, so we are actually going to + # flag an error in this case that they need to annotate + # it. The "registry" is declared just + # once (or few times), so they have to just not use + # type inference for its assignment in this one case. + util.fail( + ctx.api, + "Class decorator called %s(), but we can't " + "tell if it's from an ORM registry. Please " + "annotate the registry assignment, e.g. " + "my_registry: registry = registry()" % target.name, + sym.node, + ) + + +def _cls_decorator_hook(ctx: ClassDefContext) -> None: + _add_globals(ctx) + assert isinstance(ctx.reason, nodes.MemberExpr) + expr = ctx.reason.expr + + assert isinstance(expr, nodes.RefExpr) and isinstance(expr.node, nodes.Var) + + node_type = get_proper_type(expr.node.type) + + assert ( + isinstance(node_type, Instance) + and names.type_id_for_named_node(node_type.type) is names.REGISTRY + ) + + decl_class.scan_declarative_assignments_and_apply_types(ctx.cls, ctx.api) + + +def _base_cls_decorator_hook(ctx: ClassDefContext) -> None: + _add_globals(ctx) + + cls = ctx.cls + + _set_declarative_metaclass(ctx.api, cls) + + util.set_is_base(ctx.cls.info) + decl_class.scan_declarative_assignments_and_apply_types( + cls, ctx.api, is_mixin_scan=True + ) + + +def _declarative_mixin_hook(ctx: ClassDefContext) -> None: + _add_globals(ctx) + util.set_is_base(ctx.cls.info) + decl_class.scan_declarative_assignments_and_apply_types( + ctx.cls, ctx.api, is_mixin_scan=True + ) + + +def _metaclass_cls_hook(ctx: ClassDefContext) -> None: + util.set_is_base(ctx.cls.info) + + +def _base_cls_hook(ctx: ClassDefContext) -> None: + _add_globals(ctx) + decl_class.scan_declarative_assignments_and_apply_types(ctx.cls, ctx.api) + + +def _queryable_getattr_hook(ctx: AttributeContext) -> Type: + # how do I....tell it it has no attribute of a certain name? + # can't find any Type that seems to match that + return ctx.default_attr_type + + +def _add_globals(ctx: Union[ClassDefContext, DynamicClassDefContext]) -> None: + """Add __sa_DeclarativeMeta and __sa_Mapped symbol to the global space + for all class defs + + """ + + util.add_global(ctx, "sqlalchemy.orm", "Mapped", "__sa_Mapped") + + +def _set_declarative_metaclass( + api: SemanticAnalyzerPluginInterface, target_cls: ClassDef +) -> None: + info = target_cls.info + sym = api.lookup_fully_qualified_or_none( + "sqlalchemy.orm.decl_api.DeclarativeMeta" + ) + assert sym is not None and isinstance(sym.node, TypeInfo) + info.declared_metaclass = info.metaclass_type = Instance(sym.node, []) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/util.py new file mode 100644 index 00000000..af0882bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/mypy/util.py @@ -0,0 +1,357 @@ +# ext/mypy/util.py +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import re +from typing import Any +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type as TypingType +from typing import TypeVar +from typing import Union + +from mypy import version +from mypy.messages import format_type as _mypy_format_type +from mypy.nodes import CallExpr +from mypy.nodes import ClassDef +from mypy.nodes import CLASSDEF_NO_INFO +from mypy.nodes import Context +from mypy.nodes import Expression +from mypy.nodes import FuncDef +from mypy.nodes import IfStmt +from mypy.nodes import JsonDict +from mypy.nodes import MemberExpr +from mypy.nodes import NameExpr +from mypy.nodes import Statement +from mypy.nodes import SymbolTableNode +from mypy.nodes import TypeAlias +from mypy.nodes import TypeInfo +from mypy.options import Options +from mypy.plugin import ClassDefContext +from mypy.plugin import DynamicClassDefContext +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.plugins.common import deserialize_and_fixup_type +from mypy.typeops import map_type_from_supertype +from mypy.types import CallableType +from mypy.types import get_proper_type +from mypy.types import Instance +from mypy.types import NoneType +from mypy.types import Type +from mypy.types import TypeVarType +from mypy.types import UnboundType +from mypy.types import UnionType + +_vers = tuple( + [int(x) for x in version.__version__.split(".") if re.match(r"^\d+$", x)] +) +mypy_14 = _vers >= (1, 4) + + +_TArgType = TypeVar("_TArgType", bound=Union[CallExpr, NameExpr]) + + +class SQLAlchemyAttribute: + def __init__( + self, + name: str, + line: int, + column: int, + typ: Optional[Type], + info: TypeInfo, + ) -> None: + self.name = name + self.line = line + self.column = column + self.type = typ + self.info = info + + def serialize(self) -> JsonDict: + assert self.type + return { + "name": self.name, + "line": self.line, + "column": self.column, + "type": serialize_type(self.type), + } + + def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None: + """Expands type vars in the context of a subtype when an attribute is + inherited from a generic super type. + """ + if not isinstance(self.type, TypeVarType): + return + + self.type = map_type_from_supertype(self.type, sub_type, self.info) + + @classmethod + def deserialize( + cls, + info: TypeInfo, + data: JsonDict, + api: SemanticAnalyzerPluginInterface, + ) -> SQLAlchemyAttribute: + data = data.copy() + typ = deserialize_and_fixup_type(data.pop("type"), api) + return cls(typ=typ, info=info, **data) + + +def name_is_dunder(name: str) -> bool: + return bool(re.match(r"^__.+?__$", name)) + + +def _set_info_metadata(info: TypeInfo, key: str, data: Any) -> None: + info.metadata.setdefault("sqlalchemy", {})[key] = data + + +def _get_info_metadata(info: TypeInfo, key: str) -> Optional[Any]: + return info.metadata.get("sqlalchemy", {}).get(key, None) + + +def _get_info_mro_metadata(info: TypeInfo, key: str) -> Optional[Any]: + if info.mro: + for base in info.mro: + metadata = _get_info_metadata(base, key) + if metadata is not None: + return metadata + return None + + +def establish_as_sqlalchemy(info: TypeInfo) -> None: + info.metadata.setdefault("sqlalchemy", {}) + + +def set_is_base(info: TypeInfo) -> None: + _set_info_metadata(info, "is_base", True) + + +def get_is_base(info: TypeInfo) -> bool: + is_base = _get_info_metadata(info, "is_base") + return is_base is True + + +def has_declarative_base(info: TypeInfo) -> bool: + is_base = _get_info_mro_metadata(info, "is_base") + return is_base is True + + +def set_has_table(info: TypeInfo) -> None: + _set_info_metadata(info, "has_table", True) + + +def get_has_table(info: TypeInfo) -> bool: + is_base = _get_info_metadata(info, "has_table") + return is_base is True + + +def get_mapped_attributes( + info: TypeInfo, api: SemanticAnalyzerPluginInterface +) -> Optional[List[SQLAlchemyAttribute]]: + mapped_attributes: Optional[List[JsonDict]] = _get_info_metadata( + info, "mapped_attributes" + ) + if mapped_attributes is None: + return None + + attributes: List[SQLAlchemyAttribute] = [] + + for data in mapped_attributes: + attr = SQLAlchemyAttribute.deserialize(info, data, api) + attr.expand_typevar_from_subtype(info) + attributes.append(attr) + + return attributes + + +def format_type(typ_: Type, options: Options) -> str: + if mypy_14: + return _mypy_format_type(typ_, options) + else: + return _mypy_format_type(typ_) # type: ignore + + +def set_mapped_attributes( + info: TypeInfo, attributes: List[SQLAlchemyAttribute] +) -> None: + _set_info_metadata( + info, + "mapped_attributes", + [attribute.serialize() for attribute in attributes], + ) + + +def fail(api: SemanticAnalyzerPluginInterface, msg: str, ctx: Context) -> None: + msg = "[SQLAlchemy Mypy plugin] %s" % msg + return api.fail(msg, ctx) + + +def add_global( + ctx: Union[ClassDefContext, DynamicClassDefContext], + module: str, + symbol_name: str, + asname: str, +) -> None: + module_globals = ctx.api.modules[ctx.api.cur_mod_id].names + + if asname not in module_globals: + lookup_sym: SymbolTableNode = ctx.api.modules[module].names[ + symbol_name + ] + + module_globals[asname] = lookup_sym + + +@overload +def get_callexpr_kwarg( + callexpr: CallExpr, name: str, *, expr_types: None = ... +) -> Optional[Union[CallExpr, NameExpr]]: ... + + +@overload +def get_callexpr_kwarg( + callexpr: CallExpr, + name: str, + *, + expr_types: Tuple[TypingType[_TArgType], ...], +) -> Optional[_TArgType]: ... + + +def get_callexpr_kwarg( + callexpr: CallExpr, + name: str, + *, + expr_types: Optional[Tuple[TypingType[Any], ...]] = None, +) -> Optional[Any]: + try: + arg_idx = callexpr.arg_names.index(name) + except ValueError: + return None + + kwarg = callexpr.args[arg_idx] + if isinstance( + kwarg, expr_types if expr_types is not None else (NameExpr, CallExpr) + ): + return kwarg + + return None + + +def flatten_typechecking(stmts: Iterable[Statement]) -> Iterator[Statement]: + for stmt in stmts: + if ( + isinstance(stmt, IfStmt) + and isinstance(stmt.expr[0], NameExpr) + and stmt.expr[0].fullname == "typing.TYPE_CHECKING" + ): + yield from stmt.body[0].body + else: + yield stmt + + +def type_for_callee(callee: Expression) -> Optional[Union[Instance, TypeInfo]]: + if isinstance(callee, (MemberExpr, NameExpr)): + if isinstance(callee.node, FuncDef): + if callee.node.type and isinstance(callee.node.type, CallableType): + ret_type = get_proper_type(callee.node.type.ret_type) + + if isinstance(ret_type, Instance): + return ret_type + + return None + elif isinstance(callee.node, TypeAlias): + target_type = get_proper_type(callee.node.target) + if isinstance(target_type, Instance): + return target_type + elif isinstance(callee.node, TypeInfo): + return callee.node + return None + + +def unbound_to_instance( + api: SemanticAnalyzerPluginInterface, typ: Type +) -> Type: + """Take the UnboundType that we seem to get as the ret_type from a FuncDef + and convert it into an Instance/TypeInfo kind of structure that seems + to work as the left-hand type of an AssignmentStatement. + + """ + + if not isinstance(typ, UnboundType): + return typ + + # TODO: figure out a more robust way to check this. The node is some + # kind of _SpecialForm, there's a typing.Optional that's _SpecialForm, + # but I can't figure out how to get them to match up + if typ.name == "Optional": + # convert from "Optional?" to the more familiar + # UnionType[..., NoneType()] + return unbound_to_instance( + api, + UnionType( + [unbound_to_instance(api, typ_arg) for typ_arg in typ.args] + + [NoneType()] + ), + ) + + node = api.lookup_qualified(typ.name, typ) + + if ( + node is not None + and isinstance(node, SymbolTableNode) + and isinstance(node.node, TypeInfo) + ): + bound_type = node.node + + return Instance( + bound_type, + [ + ( + unbound_to_instance(api, arg) + if isinstance(arg, UnboundType) + else arg + ) + for arg in typ.args + ], + ) + else: + return typ + + +def info_for_cls( + cls: ClassDef, api: SemanticAnalyzerPluginInterface +) -> Optional[TypeInfo]: + if cls.info is CLASSDEF_NO_INFO: + sym = api.lookup_qualified(cls.name, cls) + if sym is None: + return None + assert sym and isinstance(sym.node, TypeInfo) + return sym.node + + return cls.info + + +def serialize_type(typ: Type) -> Union[str, JsonDict]: + try: + return typ.serialize() + except Exception: + pass + if hasattr(typ, "args"): + typ.args = tuple( + ( + a.resolve_string_annotation() + if hasattr(a, "resolve_string_annotation") + else a + ) + for a in typ.args + ) + elif hasattr(typ, "resolve_string_annotation"): + typ = typ.resolve_string_annotation() + return typ.serialize() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/orderinglist.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/orderinglist.py new file mode 100644 index 00000000..1a12cf38 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/orderinglist.py @@ -0,0 +1,416 @@ +# ext/orderinglist.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +"""A custom list that manages index/position information for contained +elements. + +:author: Jason Kirtland + +``orderinglist`` is a helper for mutable ordered relationships. It will +intercept list operations performed on a :func:`_orm.relationship`-managed +collection and +automatically synchronize changes in list position onto a target scalar +attribute. + +Example: A ``slide`` table, where each row refers to zero or more entries +in a related ``bullet`` table. The bullets within a slide are +displayed in order based on the value of the ``position`` column in the +``bullet`` table. As entries are reordered in memory, the value of the +``position`` attribute should be updated to reflect the new sort order:: + + + Base = declarative_base() + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position") + + class Bullet(Base): + __tablename__ = 'bullet' + id = Column(Integer, primary_key=True) + slide_id = Column(Integer, ForeignKey('slide.id')) + position = Column(Integer) + text = Column(String) + +The standard relationship mapping will produce a list-like attribute on each +``Slide`` containing all related ``Bullet`` objects, +but coping with changes in ordering is not handled automatically. +When appending a ``Bullet`` into ``Slide.bullets``, the ``Bullet.position`` +attribute will remain unset until manually assigned. When the ``Bullet`` +is inserted into the middle of the list, the following ``Bullet`` objects +will also need to be renumbered. + +The :class:`.OrderingList` object automates this task, managing the +``position`` attribute on all ``Bullet`` objects in the collection. It is +constructed using the :func:`.ordering_list` factory:: + + from sqlalchemy.ext.orderinglist import ordering_list + + Base = declarative_base() + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position", + collection_class=ordering_list('position')) + + class Bullet(Base): + __tablename__ = 'bullet' + id = Column(Integer, primary_key=True) + slide_id = Column(Integer, ForeignKey('slide.id')) + position = Column(Integer) + text = Column(String) + +With the above mapping the ``Bullet.position`` attribute is managed:: + + s = Slide() + s.bullets.append(Bullet()) + s.bullets.append(Bullet()) + s.bullets[1].position + >>> 1 + s.bullets.insert(1, Bullet()) + s.bullets[2].position + >>> 2 + +The :class:`.OrderingList` construct only works with **changes** to a +collection, and not the initial load from the database, and requires that the +list be sorted when loaded. Therefore, be sure to specify ``order_by`` on the +:func:`_orm.relationship` against the target ordering attribute, so that the +ordering is correct when first loaded. + +.. warning:: + + :class:`.OrderingList` only provides limited functionality when a primary + key column or unique column is the target of the sort. Operations + that are unsupported or are problematic include: + + * two entries must trade values. This is not supported directly in the + case of a primary key or unique constraint because it means at least + one row would need to be temporarily removed first, or changed to + a third, neutral value while the switch occurs. + + * an entry must be deleted in order to make room for a new entry. + SQLAlchemy's unit of work performs all INSERTs before DELETEs within a + single flush. In the case of a primary key, it will trade + an INSERT/DELETE of the same primary key for an UPDATE statement in order + to lessen the impact of this limitation, however this does not take place + for a UNIQUE column. + A future feature will allow the "DELETE before INSERT" behavior to be + possible, alleviating this limitation, though this feature will require + explicit configuration at the mapper level for sets of columns that + are to be handled in this way. + +:func:`.ordering_list` takes the name of the related object's ordering +attribute as an argument. By default, the zero-based integer index of the +object's position in the :func:`.ordering_list` is synchronized with the +ordering attribute: index 0 will get position 0, index 1 position 1, etc. To +start numbering at 1 or some other integer, provide ``count_from=1``. + + +""" +from __future__ import annotations + +from typing import Callable +from typing import List +from typing import Optional +from typing import Sequence +from typing import TypeVar + +from ..orm.collections import collection +from ..orm.collections import collection_adapter + +_T = TypeVar("_T") +OrderingFunc = Callable[[int, Sequence[_T]], int] + + +__all__ = ["ordering_list"] + + +def ordering_list( + attr: str, + count_from: Optional[int] = None, + ordering_func: Optional[OrderingFunc] = None, + reorder_on_append: bool = False, +) -> Callable[[], OrderingList]: + """Prepares an :class:`OrderingList` factory for use in mapper definitions. + + Returns an object suitable for use as an argument to a Mapper + relationship's ``collection_class`` option. e.g.:: + + from sqlalchemy.ext.orderinglist import ordering_list + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position", + collection_class=ordering_list('position')) + + :param attr: + Name of the mapped attribute to use for storage and retrieval of + ordering information + + :param count_from: + Set up an integer-based ordering, starting at ``count_from``. For + example, ``ordering_list('pos', count_from=1)`` would create a 1-based + list in SQL, storing the value in the 'pos' column. Ignored if + ``ordering_func`` is supplied. + + Additional arguments are passed to the :class:`.OrderingList` constructor. + + """ + + kw = _unsugar_count_from( + count_from=count_from, + ordering_func=ordering_func, + reorder_on_append=reorder_on_append, + ) + return lambda: OrderingList(attr, **kw) + + +# Ordering utility functions + + +def count_from_0(index, collection): + """Numbering function: consecutive integers starting at 0.""" + + return index + + +def count_from_1(index, collection): + """Numbering function: consecutive integers starting at 1.""" + + return index + 1 + + +def count_from_n_factory(start): + """Numbering function: consecutive integers starting at arbitrary start.""" + + def f(index, collection): + return index + start + + try: + f.__name__ = "count_from_%i" % start + except TypeError: + pass + return f + + +def _unsugar_count_from(**kw): + """Builds counting functions from keyword arguments. + + Keyword argument filter, prepares a simple ``ordering_func`` from a + ``count_from`` argument, otherwise passes ``ordering_func`` on unchanged. + """ + + count_from = kw.pop("count_from", None) + if kw.get("ordering_func", None) is None and count_from is not None: + if count_from == 0: + kw["ordering_func"] = count_from_0 + elif count_from == 1: + kw["ordering_func"] = count_from_1 + else: + kw["ordering_func"] = count_from_n_factory(count_from) + return kw + + +class OrderingList(List[_T]): + """A custom list that manages position information for its children. + + The :class:`.OrderingList` object is normally set up using the + :func:`.ordering_list` factory function, used in conjunction with + the :func:`_orm.relationship` function. + + """ + + ordering_attr: str + ordering_func: OrderingFunc + reorder_on_append: bool + + def __init__( + self, + ordering_attr: Optional[str] = None, + ordering_func: Optional[OrderingFunc] = None, + reorder_on_append: bool = False, + ): + """A custom list that manages position information for its children. + + ``OrderingList`` is a ``collection_class`` list implementation that + syncs position in a Python list with a position attribute on the + mapped objects. + + This implementation relies on the list starting in the proper order, + so be **sure** to put an ``order_by`` on your relationship. + + :param ordering_attr: + Name of the attribute that stores the object's order in the + relationship. + + :param ordering_func: Optional. A function that maps the position in + the Python list to a value to store in the + ``ordering_attr``. Values returned are usually (but need not be!) + integers. + + An ``ordering_func`` is called with two positional parameters: the + index of the element in the list, and the list itself. + + If omitted, Python list indexes are used for the attribute values. + Two basic pre-built numbering functions are provided in this module: + ``count_from_0`` and ``count_from_1``. For more exotic examples + like stepped numbering, alphabetical and Fibonacci numbering, see + the unit tests. + + :param reorder_on_append: + Default False. When appending an object with an existing (non-None) + ordering value, that value will be left untouched unless + ``reorder_on_append`` is true. This is an optimization to avoid a + variety of dangerous unexpected database writes. + + SQLAlchemy will add instances to the list via append() when your + object loads. If for some reason the result set from the database + skips a step in the ordering (say, row '1' is missing but you get + '2', '3', and '4'), reorder_on_append=True would immediately + renumber the items to '1', '2', '3'. If you have multiple sessions + making changes, any of whom happen to load this collection even in + passing, all of the sessions would try to "clean up" the numbering + in their commits, possibly causing all but one to fail with a + concurrent modification error. + + Recommend leaving this with the default of False, and just call + ``reorder()`` if you're doing ``append()`` operations with + previously ordered instances or when doing some housekeeping after + manual sql operations. + + """ + self.ordering_attr = ordering_attr + if ordering_func is None: + ordering_func = count_from_0 + self.ordering_func = ordering_func + self.reorder_on_append = reorder_on_append + + # More complex serialization schemes (multi column, e.g.) are possible by + # subclassing and reimplementing these two methods. + def _get_order_value(self, entity): + return getattr(entity, self.ordering_attr) + + def _set_order_value(self, entity, value): + setattr(entity, self.ordering_attr, value) + + def reorder(self) -> None: + """Synchronize ordering for the entire collection. + + Sweeps through the list and ensures that each object has accurate + ordering information set. + + """ + for index, entity in enumerate(self): + self._order_entity(index, entity, True) + + # As of 0.5, _reorder is no longer semi-private + _reorder = reorder + + def _order_entity(self, index, entity, reorder=True): + have = self._get_order_value(entity) + + # Don't disturb existing ordering if reorder is False + if have is not None and not reorder: + return + + should_be = self.ordering_func(index, self) + if have != should_be: + self._set_order_value(entity, should_be) + + def append(self, entity): + super().append(entity) + self._order_entity(len(self) - 1, entity, self.reorder_on_append) + + def _raw_append(self, entity): + """Append without any ordering behavior.""" + + super().append(entity) + + _raw_append = collection.adds(1)(_raw_append) + + def insert(self, index, entity): + super().insert(index, entity) + self._reorder() + + def remove(self, entity): + super().remove(entity) + + adapter = collection_adapter(self) + if adapter and adapter._referenced_by_owner: + self._reorder() + + def pop(self, index=-1): + entity = super().pop(index) + self._reorder() + return entity + + def __setitem__(self, index, entity): + if isinstance(index, slice): + step = index.step or 1 + start = index.start or 0 + if start < 0: + start += len(self) + stop = index.stop or len(self) + if stop < 0: + stop += len(self) + + for i in range(start, stop, step): + self.__setitem__(i, entity[i]) + else: + self._order_entity(index, entity, True) + super().__setitem__(index, entity) + + def __delitem__(self, index): + super().__delitem__(index) + self._reorder() + + def __setslice__(self, start, end, values): + super().__setslice__(start, end, values) + self._reorder() + + def __delslice__(self, start, end): + super().__delslice__(start, end) + self._reorder() + + def __reduce__(self): + return _reconstitute, (self.__class__, self.__dict__, list(self)) + + for func_name, func in list(locals().items()): + if ( + callable(func) + and func.__name__ == func_name + and not func.__doc__ + and hasattr(list, func_name) + ): + func.__doc__ = getattr(list, func_name).__doc__ + del func_name, func + + +def _reconstitute(cls, dict_, items): + """Reconstitute an :class:`.OrderingList`. + + This is the adjoint to :meth:`.OrderingList.__reduce__`. It is used for + unpickling :class:`.OrderingList` objects. + + """ + obj = cls.__new__(cls) + obj.__dict__.update(dict_) + list.extend(obj, items) + return obj diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/ext/serializer.py b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/serializer.py new file mode 100644 index 00000000..130d2537 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/ext/serializer.py @@ -0,0 +1,181 @@ +# ext/serializer.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +"""Serializer/Deserializer objects for usage with SQLAlchemy query structures, +allowing "contextual" deserialization. + +.. legacy:: + + The serializer extension is **legacy** and should not be used for + new development. + +Any SQLAlchemy query structure, either based on sqlalchemy.sql.* +or sqlalchemy.orm.* can be used. The mappers, Tables, Columns, Session +etc. which are referenced by the structure are not persisted in serialized +form, but are instead re-associated with the query structure +when it is deserialized. + +.. warning:: The serializer extension uses pickle to serialize and + deserialize objects, so the same security consideration mentioned + in the `python documentation + `_ apply. + +Usage is nearly the same as that of the standard Python pickle module:: + + from sqlalchemy.ext.serializer import loads, dumps + metadata = MetaData(bind=some_engine) + Session = scoped_session(sessionmaker()) + + # ... define mappers + + query = Session.query(MyClass). + filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) + + # pickle the query + serialized = dumps(query) + + # unpickle. Pass in metadata + scoped_session + query2 = loads(serialized, metadata, Session) + + print query2.all() + +Similar restrictions as when using raw pickle apply; mapped classes must be +themselves be pickleable, meaning they are importable from a module-level +namespace. + +The serializer module is only appropriate for query structures. It is not +needed for: + +* instances of user-defined classes. These contain no references to engines, + sessions or expression constructs in the typical case and can be serialized + directly. + +* Table metadata that is to be loaded entirely from the serialized structure + (i.e. is not already declared in the application). Regular + pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object, + typically one which was reflected from an existing database at some previous + point in time. The serializer module is specifically for the opposite case, + where the Table metadata is already present in memory. + +""" + +from io import BytesIO +import pickle +import re + +from .. import Column +from .. import Table +from ..engine import Engine +from ..orm import class_mapper +from ..orm.interfaces import MapperProperty +from ..orm.mapper import Mapper +from ..orm.session import Session +from ..util import b64decode +from ..util import b64encode + + +__all__ = ["Serializer", "Deserializer", "dumps", "loads"] + + +class Serializer(pickle.Pickler): + + def persistent_id(self, obj): + # print "serializing:", repr(obj) + if isinstance(obj, Mapper) and not obj.non_primary: + id_ = "mapper:" + b64encode(pickle.dumps(obj.class_)) + elif isinstance(obj, MapperProperty) and not obj.parent.non_primary: + id_ = ( + "mapperprop:" + + b64encode(pickle.dumps(obj.parent.class_)) + + ":" + + obj.key + ) + elif isinstance(obj, Table): + if "parententity" in obj._annotations: + id_ = "mapper_selectable:" + b64encode( + pickle.dumps(obj._annotations["parententity"].class_) + ) + else: + id_ = f"table:{obj.key}" + elif isinstance(obj, Column) and isinstance(obj.table, Table): + id_ = f"column:{obj.table.key}:{obj.key}" + elif isinstance(obj, Session): + id_ = "session:" + elif isinstance(obj, Engine): + id_ = "engine:" + else: + return None + return id_ + + +our_ids = re.compile( + r"(mapperprop|mapper|mapper_selectable|table|column|" + r"session|attribute|engine):(.*)" +) + + +class Deserializer(pickle.Unpickler): + + def __init__(self, file, metadata=None, scoped_session=None, engine=None): + super().__init__(file) + self.metadata = metadata + self.scoped_session = scoped_session + self.engine = engine + + def get_engine(self): + if self.engine: + return self.engine + elif self.scoped_session and self.scoped_session().bind: + return self.scoped_session().bind + else: + return None + + def persistent_load(self, id_): + m = our_ids.match(str(id_)) + if not m: + return None + else: + type_, args = m.group(1, 2) + if type_ == "attribute": + key, clsarg = args.split(":") + cls = pickle.loads(b64decode(clsarg)) + return getattr(cls, key) + elif type_ == "mapper": + cls = pickle.loads(b64decode(args)) + return class_mapper(cls) + elif type_ == "mapper_selectable": + cls = pickle.loads(b64decode(args)) + return class_mapper(cls).__clause_element__() + elif type_ == "mapperprop": + mapper, keyname = args.split(":") + cls = pickle.loads(b64decode(mapper)) + return class_mapper(cls).attrs[keyname] + elif type_ == "table": + return self.metadata.tables[args] + elif type_ == "column": + table, colname = args.split(":") + return self.metadata.tables[table].c[colname] + elif type_ == "session": + return self.scoped_session() + elif type_ == "engine": + return self.get_engine() + else: + raise Exception("Unknown token: %s" % type_) + + +def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL): + buf = BytesIO() + pickler = Serializer(buf, protocol) + pickler.dump(obj) + return buf.getvalue() + + +def loads(data, metadata=None, scoped_session=None, engine=None): + buf = BytesIO(data) + unpickler = Deserializer(buf, metadata, scoped_session, engine) + return unpickler.load() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/future/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/future/__init__.py new file mode 100644 index 00000000..8ce36ccb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/future/__init__.py @@ -0,0 +1,16 @@ +# future/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""2.0 API features. + +this module is legacy as 2.0 APIs are now standard. + +""" +from .engine import Connection as Connection +from .engine import create_engine as create_engine +from .engine import Engine as Engine +from ..sql._selectable_constructors import select as select diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/future/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/future/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..1da6f94a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/future/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/future/__pycache__/engine.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/future/__pycache__/engine.cpython-312.pyc new file mode 100644 index 00000000..903b8c1e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/future/__pycache__/engine.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/future/engine.py b/.venv/lib/python3.12/site-packages/sqlalchemy/future/engine.py new file mode 100644 index 00000000..b55cda08 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/future/engine.py @@ -0,0 +1,15 @@ +# future/engine.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +"""2.0 API features. + +this module is legacy as 2.0 APIs are now standard. + +""" + +from ..engine import Connection as Connection # noqa: F401 +from ..engine import create_engine as create_engine # noqa: F401 +from ..engine import Engine as Engine # noqa: F401 diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/inspection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/inspection.py new file mode 100644 index 00000000..30d53195 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/inspection.py @@ -0,0 +1,174 @@ +# inspection.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""The inspection module provides the :func:`_sa.inspect` function, +which delivers runtime information about a wide variety +of SQLAlchemy objects, both within the Core as well as the +ORM. + +The :func:`_sa.inspect` function is the entry point to SQLAlchemy's +public API for viewing the configuration and construction +of in-memory objects. Depending on the type of object +passed to :func:`_sa.inspect`, the return value will either be +a related object which provides a known interface, or in many +cases it will return the object itself. + +The rationale for :func:`_sa.inspect` is twofold. One is that +it replaces the need to be aware of a large variety of "information +getting" functions in SQLAlchemy, such as +:meth:`_reflection.Inspector.from_engine` (deprecated in 1.4), +:func:`.orm.attributes.instance_state`, :func:`_orm.class_mapper`, +and others. The other is that the return value of :func:`_sa.inspect` +is guaranteed to obey a documented API, thus allowing third party +tools which build on top of SQLAlchemy configurations to be constructed +in a forwards-compatible way. + +""" +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import Optional +from typing import overload +from typing import Type +from typing import TypeVar +from typing import Union + +from . import exc +from .util.typing import Literal +from .util.typing import Protocol + +_T = TypeVar("_T", bound=Any) +_TCov = TypeVar("_TCov", bound=Any, covariant=True) +_F = TypeVar("_F", bound=Callable[..., Any]) + +_IN = TypeVar("_IN", bound=Any) + +_registrars: Dict[type, Union[Literal[True], Callable[[Any], Any]]] = {} + + +class Inspectable(Generic[_T]): + """define a class as inspectable. + + This allows typing to set up a linkage between an object that + can be inspected and the type of inspection it returns. + + Unfortunately we cannot at the moment get all classes that are + returned by inspection to suit this interface as we get into + MRO issues. + + """ + + __slots__ = () + + +class _InspectableTypeProtocol(Protocol[_TCov]): + """a protocol defining a method that's used when a type (ie the class + itself) is passed to inspect(). + + """ + + def _sa_inspect_type(self) -> _TCov: ... + + +class _InspectableProtocol(Protocol[_TCov]): + """a protocol defining a method that's used when an instance is + passed to inspect(). + + """ + + def _sa_inspect_instance(self) -> _TCov: ... + + +@overload +def inspect( + subject: Type[_InspectableTypeProtocol[_IN]], raiseerr: bool = True +) -> _IN: ... + + +@overload +def inspect( + subject: _InspectableProtocol[_IN], raiseerr: bool = True +) -> _IN: ... + + +@overload +def inspect(subject: Inspectable[_IN], raiseerr: bool = True) -> _IN: ... + + +@overload +def inspect(subject: Any, raiseerr: Literal[False] = ...) -> Optional[Any]: ... + + +@overload +def inspect(subject: Any, raiseerr: bool = True) -> Any: ... + + +def inspect(subject: Any, raiseerr: bool = True) -> Any: + """Produce an inspection object for the given target. + + The returned value in some cases may be the + same object as the one given, such as if a + :class:`_orm.Mapper` object is passed. In other + cases, it will be an instance of the registered + inspection type for the given object, such as + if an :class:`_engine.Engine` is passed, an + :class:`_reflection.Inspector` object is returned. + + :param subject: the subject to be inspected. + :param raiseerr: When ``True``, if the given subject + does not + correspond to a known SQLAlchemy inspected type, + :class:`sqlalchemy.exc.NoInspectionAvailable` + is raised. If ``False``, ``None`` is returned. + + """ + type_ = type(subject) + for cls in type_.__mro__: + if cls in _registrars: + reg = _registrars.get(cls, None) + if reg is None: + continue + elif reg is True: + return subject + ret = reg(subject) + if ret is not None: + return ret + else: + reg = ret = None + + if raiseerr and (reg is None or ret is None): + raise exc.NoInspectionAvailable( + "No inspection system is " + "available for object of type %s" % type_ + ) + return ret + + +def _inspects( + *types: Type[Any], +) -> Callable[[_F], _F]: + def decorate(fn_or_cls: _F) -> _F: + for type_ in types: + if type_ in _registrars: + raise AssertionError("Type %s is already registered" % type_) + _registrars[type_] = fn_or_cls + return fn_or_cls + + return decorate + + +_TT = TypeVar("_TT", bound="Type[Any]") + + +def _self_inspects(cls: _TT) -> _TT: + if cls in _registrars: + raise AssertionError("Type %s is already registered" % cls) + _registrars[cls] = True + return cls diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/log.py b/.venv/lib/python3.12/site-packages/sqlalchemy/log.py new file mode 100644 index 00000000..e6922b81 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/log.py @@ -0,0 +1,288 @@ +# log.py +# Copyright (C) 2006-2024 the SQLAlchemy authors and contributors +# +# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Logging control and utilities. + +Control of logging for SA can be performed from the regular python logging +module. The regular dotted module namespace is used, starting at +'sqlalchemy'. For class-level logging, the class name is appended. + +The "echo" keyword parameter, available on SQLA :class:`_engine.Engine` +and :class:`_pool.Pool` objects, corresponds to a logger specific to that +instance only. + +""" +from __future__ import annotations + +import logging +import sys +from typing import Any +from typing import Optional +from typing import overload +from typing import Set +from typing import Type +from typing import TypeVar +from typing import Union + +from .util import py311 +from .util import py38 +from .util.typing import Literal + + +if py38: + STACKLEVEL = True + # needed as of py3.11.0b1 + # #8019 + STACKLEVEL_OFFSET = 2 if py311 else 1 +else: + STACKLEVEL = False + STACKLEVEL_OFFSET = 0 + +_IT = TypeVar("_IT", bound="Identified") + +_EchoFlagType = Union[None, bool, Literal["debug"]] + +# set initial level to WARN. This so that +# log statements don't occur in the absence of explicit +# logging being enabled for 'sqlalchemy'. +rootlogger = logging.getLogger("sqlalchemy") +if rootlogger.level == logging.NOTSET: + rootlogger.setLevel(logging.WARN) + + +def _add_default_handler(logger: logging.Logger) -> None: + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter( + logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s") + ) + logger.addHandler(handler) + + +_logged_classes: Set[Type[Identified]] = set() + + +def _qual_logger_name_for_cls(cls: Type[Identified]) -> str: + return ( + getattr(cls, "_sqla_logger_namespace", None) + or cls.__module__ + "." + cls.__name__ + ) + + +def class_logger(cls: Type[_IT]) -> Type[_IT]: + logger = logging.getLogger(_qual_logger_name_for_cls(cls)) + cls._should_log_debug = lambda self: logger.isEnabledFor( # type: ignore[method-assign] # noqa: E501 + logging.DEBUG + ) + cls._should_log_info = lambda self: logger.isEnabledFor( # type: ignore[method-assign] # noqa: E501 + logging.INFO + ) + cls.logger = logger + _logged_classes.add(cls) + return cls + + +_IdentifiedLoggerType = Union[logging.Logger, "InstanceLogger"] + + +class Identified: + __slots__ = () + + logging_name: Optional[str] = None + + logger: _IdentifiedLoggerType + + _echo: _EchoFlagType + + def _should_log_debug(self) -> bool: + return self.logger.isEnabledFor(logging.DEBUG) + + def _should_log_info(self) -> bool: + return self.logger.isEnabledFor(logging.INFO) + + +class InstanceLogger: + """A logger adapter (wrapper) for :class:`.Identified` subclasses. + + This allows multiple instances (e.g. Engine or Pool instances) + to share a logger, but have its verbosity controlled on a + per-instance basis. + + The basic functionality is to return a logging level + which is based on an instance's echo setting. + + Default implementation is: + + 'debug' -> logging.DEBUG + True -> logging.INFO + False -> Effective level of underlying logger ( + logging.WARNING by default) + None -> same as False + """ + + # Map echo settings to logger levels + _echo_map = { + None: logging.NOTSET, + False: logging.NOTSET, + True: logging.INFO, + "debug": logging.DEBUG, + } + + _echo: _EchoFlagType + + __slots__ = ("echo", "logger") + + def __init__(self, echo: _EchoFlagType, name: str): + self.echo = echo + self.logger = logging.getLogger(name) + + # if echo flag is enabled and no handlers, + # add a handler to the list + if self._echo_map[echo] <= logging.INFO and not self.logger.handlers: + _add_default_handler(self.logger) + + # + # Boilerplate convenience methods + # + def debug(self, msg: str, *args: Any, **kwargs: Any) -> None: + """Delegate a debug call to the underlying logger.""" + + self.log(logging.DEBUG, msg, *args, **kwargs) + + def info(self, msg: str, *args: Any, **kwargs: Any) -> None: + """Delegate an info call to the underlying logger.""" + + self.log(logging.INFO, msg, *args, **kwargs) + + def warning(self, msg: str, *args: Any, **kwargs: Any) -> None: + """Delegate a warning call to the underlying logger.""" + + self.log(logging.WARNING, msg, *args, **kwargs) + + warn = warning + + def error(self, msg: str, *args: Any, **kwargs: Any) -> None: + """ + Delegate an error call to the underlying logger. + """ + self.log(logging.ERROR, msg, *args, **kwargs) + + def exception(self, msg: str, *args: Any, **kwargs: Any) -> None: + """Delegate an exception call to the underlying logger.""" + + kwargs["exc_info"] = 1 + self.log(logging.ERROR, msg, *args, **kwargs) + + def critical(self, msg: str, *args: Any, **kwargs: Any) -> None: + """Delegate a critical call to the underlying logger.""" + + self.log(logging.CRITICAL, msg, *args, **kwargs) + + def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None: + """Delegate a log call to the underlying logger. + + The level here is determined by the echo + flag as well as that of the underlying logger, and + logger._log() is called directly. + + """ + + # inline the logic from isEnabledFor(), + # getEffectiveLevel(), to avoid overhead. + + if self.logger.manager.disable >= level: + return + + selected_level = self._echo_map[self.echo] + if selected_level == logging.NOTSET: + selected_level = self.logger.getEffectiveLevel() + + if level >= selected_level: + if STACKLEVEL: + kwargs["stacklevel"] = ( + kwargs.get("stacklevel", 1) + STACKLEVEL_OFFSET + ) + + self.logger._log(level, msg, args, **kwargs) + + def isEnabledFor(self, level: int) -> bool: + """Is this logger enabled for level 'level'?""" + + if self.logger.manager.disable >= level: + return False + return level >= self.getEffectiveLevel() + + def getEffectiveLevel(self) -> int: + """What's the effective level for this logger?""" + + level = self._echo_map[self.echo] + if level == logging.NOTSET: + level = self.logger.getEffectiveLevel() + return level + + +def instance_logger( + instance: Identified, echoflag: _EchoFlagType = None +) -> None: + """create a logger for an instance that implements :class:`.Identified`.""" + + if instance.logging_name: + name = "%s.%s" % ( + _qual_logger_name_for_cls(instance.__class__), + instance.logging_name, + ) + else: + name = _qual_logger_name_for_cls(instance.__class__) + + instance._echo = echoflag # type: ignore + + logger: Union[logging.Logger, InstanceLogger] + + if echoflag in (False, None): + # if no echo setting or False, return a Logger directly, + # avoiding overhead of filtering + logger = logging.getLogger(name) + else: + # if a specified echo flag, return an EchoLogger, + # which checks the flag, overrides normal log + # levels by calling logger._log() + logger = InstanceLogger(echoflag, name) + + instance.logger = logger # type: ignore + + +class echo_property: + __doc__ = """\ + When ``True``, enable log output for this element. + + This has the effect of setting the Python logging level for the namespace + of this element's class and object reference. A value of boolean ``True`` + indicates that the loglevel ``logging.INFO`` will be set for the logger, + whereas the string value ``debug`` will set the loglevel to + ``logging.DEBUG``. + """ + + @overload + def __get__( + self, instance: Literal[None], owner: Type[Identified] + ) -> echo_property: ... + + @overload + def __get__( + self, instance: Identified, owner: Type[Identified] + ) -> _EchoFlagType: ... + + def __get__( + self, instance: Optional[Identified], owner: Type[Identified] + ) -> Union[echo_property, _EchoFlagType]: + if instance is None: + return self + else: + return instance._echo + + def __set__(self, instance: Identified, value: _EchoFlagType) -> None: + instance_logger(instance, echoflag=value) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py new file mode 100644 index 00000000..70a11294 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py @@ -0,0 +1,170 @@ +# orm/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +""" +Functional constructs for ORM configuration. + +See the SQLAlchemy object relational tutorial and mapper configuration +documentation for an overview of how this module is used. + +""" + +from __future__ import annotations + +from typing import Any + +from . import exc as exc +from . import mapper as mapperlib +from . import strategy_options as strategy_options +from ._orm_constructors import _mapper_fn as mapper +from ._orm_constructors import aliased as aliased +from ._orm_constructors import backref as backref +from ._orm_constructors import clear_mappers as clear_mappers +from ._orm_constructors import column_property as column_property +from ._orm_constructors import composite as composite +from ._orm_constructors import contains_alias as contains_alias +from ._orm_constructors import create_session as create_session +from ._orm_constructors import deferred as deferred +from ._orm_constructors import dynamic_loader as dynamic_loader +from ._orm_constructors import join as join +from ._orm_constructors import mapped_column as mapped_column +from ._orm_constructors import orm_insert_sentinel as orm_insert_sentinel +from ._orm_constructors import outerjoin as outerjoin +from ._orm_constructors import query_expression as query_expression +from ._orm_constructors import relationship as relationship +from ._orm_constructors import synonym as synonym +from ._orm_constructors import with_loader_criteria as with_loader_criteria +from ._orm_constructors import with_polymorphic as with_polymorphic +from .attributes import AttributeEventToken as AttributeEventToken +from .attributes import InstrumentedAttribute as InstrumentedAttribute +from .attributes import QueryableAttribute as QueryableAttribute +from .base import class_mapper as class_mapper +from .base import DynamicMapped as DynamicMapped +from .base import InspectionAttrExtensionType as InspectionAttrExtensionType +from .base import LoaderCallableStatus as LoaderCallableStatus +from .base import Mapped as Mapped +from .base import NotExtension as NotExtension +from .base import ORMDescriptor as ORMDescriptor +from .base import PassiveFlag as PassiveFlag +from .base import SQLORMExpression as SQLORMExpression +from .base import WriteOnlyMapped as WriteOnlyMapped +from .context import FromStatement as FromStatement +from .context import QueryContext as QueryContext +from .decl_api import add_mapped_attribute as add_mapped_attribute +from .decl_api import as_declarative as as_declarative +from .decl_api import declarative_base as declarative_base +from .decl_api import declarative_mixin as declarative_mixin +from .decl_api import DeclarativeBase as DeclarativeBase +from .decl_api import DeclarativeBaseNoMeta as DeclarativeBaseNoMeta +from .decl_api import DeclarativeMeta as DeclarativeMeta +from .decl_api import declared_attr as declared_attr +from .decl_api import has_inherited_table as has_inherited_table +from .decl_api import MappedAsDataclass as MappedAsDataclass +from .decl_api import registry as registry +from .decl_api import synonym_for as synonym_for +from .decl_base import MappedClassProtocol as MappedClassProtocol +from .descriptor_props import Composite as Composite +from .descriptor_props import CompositeProperty as CompositeProperty +from .descriptor_props import Synonym as Synonym +from .descriptor_props import SynonymProperty as SynonymProperty +from .dynamic import AppenderQuery as AppenderQuery +from .events import AttributeEvents as AttributeEvents +from .events import InstanceEvents as InstanceEvents +from .events import InstrumentationEvents as InstrumentationEvents +from .events import MapperEvents as MapperEvents +from .events import QueryEvents as QueryEvents +from .events import SessionEvents as SessionEvents +from .identity import IdentityMap as IdentityMap +from .instrumentation import ClassManager as ClassManager +from .interfaces import EXT_CONTINUE as EXT_CONTINUE +from .interfaces import EXT_SKIP as EXT_SKIP +from .interfaces import EXT_STOP as EXT_STOP +from .interfaces import InspectionAttr as InspectionAttr +from .interfaces import InspectionAttrInfo as InspectionAttrInfo +from .interfaces import MANYTOMANY as MANYTOMANY +from .interfaces import MANYTOONE as MANYTOONE +from .interfaces import MapperProperty as MapperProperty +from .interfaces import NO_KEY as NO_KEY +from .interfaces import NO_VALUE as NO_VALUE +from .interfaces import ONETOMANY as ONETOMANY +from .interfaces import PropComparator as PropComparator +from .interfaces import RelationshipDirection as RelationshipDirection +from .interfaces import UserDefinedOption as UserDefinedOption +from .loading import merge_frozen_result as merge_frozen_result +from .loading import merge_result as merge_result +from .mapped_collection import attribute_keyed_dict as attribute_keyed_dict +from .mapped_collection import ( + attribute_mapped_collection as attribute_mapped_collection, +) +from .mapped_collection import column_keyed_dict as column_keyed_dict +from .mapped_collection import ( + column_mapped_collection as column_mapped_collection, +) +from .mapped_collection import keyfunc_mapping as keyfunc_mapping +from .mapped_collection import KeyFuncDict as KeyFuncDict +from .mapped_collection import mapped_collection as mapped_collection +from .mapped_collection import MappedCollection as MappedCollection +from .mapper import configure_mappers as configure_mappers +from .mapper import Mapper as Mapper +from .mapper import reconstructor as reconstructor +from .mapper import validates as validates +from .properties import ColumnProperty as ColumnProperty +from .properties import MappedColumn as MappedColumn +from .properties import MappedSQLExpression as MappedSQLExpression +from .query import AliasOption as AliasOption +from .query import Query as Query +from .relationships import foreign as foreign +from .relationships import Relationship as Relationship +from .relationships import RelationshipProperty as RelationshipProperty +from .relationships import remote as remote +from .scoping import QueryPropertyDescriptor as QueryPropertyDescriptor +from .scoping import scoped_session as scoped_session +from .session import close_all_sessions as close_all_sessions +from .session import make_transient as make_transient +from .session import make_transient_to_detached as make_transient_to_detached +from .session import object_session as object_session +from .session import ORMExecuteState as ORMExecuteState +from .session import Session as Session +from .session import sessionmaker as sessionmaker +from .session import SessionTransaction as SessionTransaction +from .session import SessionTransactionOrigin as SessionTransactionOrigin +from .state import AttributeState as AttributeState +from .state import InstanceState as InstanceState +from .strategy_options import contains_eager as contains_eager +from .strategy_options import defaultload as defaultload +from .strategy_options import defer as defer +from .strategy_options import immediateload as immediateload +from .strategy_options import joinedload as joinedload +from .strategy_options import lazyload as lazyload +from .strategy_options import Load as Load +from .strategy_options import load_only as load_only +from .strategy_options import noload as noload +from .strategy_options import raiseload as raiseload +from .strategy_options import selectin_polymorphic as selectin_polymorphic +from .strategy_options import selectinload as selectinload +from .strategy_options import subqueryload as subqueryload +from .strategy_options import undefer as undefer +from .strategy_options import undefer_group as undefer_group +from .strategy_options import with_expression as with_expression +from .unitofwork import UOWTransaction as UOWTransaction +from .util import Bundle as Bundle +from .util import CascadeOptions as CascadeOptions +from .util import LoaderCriteriaOption as LoaderCriteriaOption +from .util import object_mapper as object_mapper +from .util import polymorphic_union as polymorphic_union +from .util import was_deleted as was_deleted +from .util import with_parent as with_parent +from .writeonly import WriteOnlyCollection as WriteOnlyCollection +from .. import util as _sa_util + + +def __go(lcls: Any) -> None: + _sa_util.preloaded.import_prefix("sqlalchemy.orm") + _sa_util.preloaded.import_prefix("sqlalchemy.ext") + + +__go(locals()) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..c9c73e0a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc new file mode 100644 index 00000000..2de6c1d6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc new file mode 100644 index 00000000..b4a4a5e5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc new file mode 100644 index 00000000..60ccc519 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..917277c3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc new file mode 100644 index 00000000..6ba8a55e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc new file mode 100644 index 00000000..5c8f8acb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/collections.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/collections.cpython-312.pyc new file mode 100644 index 00000000..bf27b9b9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/collections.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/context.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/context.cpython-312.pyc new file mode 100644 index 00000000..b02df9ff Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/context.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc new file mode 100644 index 00000000..0bd2e008 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc new file mode 100644 index 00000000..1ddd1b5c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc new file mode 100644 index 00000000..98d5c8df Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc new file mode 100644 index 00000000..6a5ffca4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc new file mode 100644 index 00000000..acf96672 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc new file mode 100644 index 00000000..b85323db Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/events.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..20b1b1ea Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/events.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/exc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/exc.cpython-312.pyc new file mode 100644 index 00000000..ec935913 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/exc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/identity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/identity.cpython-312.pyc new file mode 100644 index 00000000..ade14665 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/identity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc new file mode 100644 index 00000000..5710c6bb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 00000000..55ccf1c2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/loading.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/loading.cpython-312.pyc new file mode 100644 index 00000000..20687baa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/loading.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc new file mode 100644 index 00000000..fc5068cb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc new file mode 100644 index 00000000..2a2f119c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc new file mode 100644 index 00000000..493e6c68 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc new file mode 100644 index 00000000..ec02ac08 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/properties.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/properties.cpython-312.pyc new file mode 100644 index 00000000..a17d454e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/properties.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/query.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/query.cpython-312.pyc new file mode 100644 index 00000000..e88b1e47 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/query.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc new file mode 100644 index 00000000..ae73698c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc new file mode 100644 index 00000000..8c6e1a3c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/session.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/session.cpython-312.pyc new file mode 100644 index 00000000..e8ec17ea Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/session.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/state.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/state.cpython-312.pyc new file mode 100644 index 00000000..6e57cfde Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/state.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc new file mode 100644 index 00000000..5d627005 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc new file mode 100644 index 00000000..a567755a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc new file mode 100644 index 00000000..69d29567 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/sync.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/sync.cpython-312.pyc new file mode 100644 index 00000000..53c56473 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/sync.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc new file mode 100644 index 00000000..3d96de22 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/util.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..67603295 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/util.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc new file mode 100644 index 00000000..93e97555 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py new file mode 100644 index 00000000..d9379da4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py @@ -0,0 +1,2512 @@ +# orm/_orm_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import Collection +from typing import Iterable +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from . import mapperlib as mapperlib +from ._typing import _O +from .descriptor_props import Composite +from .descriptor_props import Synonym +from .interfaces import _AttributeOptions +from .properties import MappedColumn +from .properties import MappedSQLExpression +from .query import AliasOption +from .relationships import _RelationshipArgumentType +from .relationships import _RelationshipDeclared +from .relationships import _RelationshipSecondaryArgument +from .relationships import RelationshipProperty +from .session import Session +from .util import _ORMJoin +from .util import AliasedClass +from .util import AliasedInsp +from .util import LoaderCriteriaOption +from .. import sql +from .. import util +from ..exc import InvalidRequestError +from ..sql._typing import _no_kw +from ..sql.base import _NoArg +from ..sql.base import SchemaEventTarget +from ..sql.schema import _InsertSentinelColumnDefault +from ..sql.schema import SchemaConst +from ..sql.selectable import FromClause +from ..util.typing import Annotated +from ..util.typing import Literal + +if TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _ORMColumnExprArgument + from .descriptor_props import _CC + from .descriptor_props import _CompositeAttrType + from .interfaces import PropComparator + from .mapper import Mapper + from .query import Query + from .relationships import _LazyLoadArgumentType + from .relationships import _ORMColCollectionArgument + from .relationships import _ORMOrderByArgument + from .relationships import _RelationshipJoinConditionArgument + from .relationships import ORMBackrefArgument + from .session import _SessionBind + from ..sql._typing import _AutoIncrementType + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _FromClauseArgument + from ..sql._typing import _InfoType + from ..sql._typing import _OnClauseArgument + from ..sql._typing import _TypeEngineArgument + from ..sql.elements import ColumnElement + from ..sql.schema import _ServerDefaultArgument + from ..sql.schema import _ServerOnUpdateArgument + from ..sql.selectable import Alias + from ..sql.selectable import Subquery + + +_T = typing.TypeVar("_T") + + +@util.deprecated( + "1.4", + "The :class:`.AliasOption` object is not necessary " + "for entities to be matched up to a query that is established " + "via :meth:`.Query.from_statement` and now does nothing.", + enable_warnings=False, # AliasOption itself warns +) +def contains_alias(alias: Union[Alias, Subquery]) -> AliasOption: + r"""Return a :class:`.MapperOption` that will indicate to the + :class:`_query.Query` + that the main table has been aliased. + + """ + return AliasOption(alias) + + +def mapped_column( + __name_pos: Optional[ + Union[str, _TypeEngineArgument[Any], SchemaEventTarget] + ] = None, + __type_pos: Optional[ + Union[_TypeEngineArgument[Any], SchemaEventTarget] + ] = None, + *args: SchemaEventTarget, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + nullable: Optional[ + Union[bool, Literal[SchemaConst.NULL_UNSPECIFIED]] + ] = SchemaConst.NULL_UNSPECIFIED, + primary_key: Optional[bool] = False, + deferred: Union[_NoArg, bool] = _NoArg.NO_ARG, + deferred_group: Optional[str] = None, + deferred_raiseload: Optional[bool] = None, + use_existing_column: bool = False, + name: Optional[str] = None, + type_: Optional[_TypeEngineArgument[Any]] = None, + autoincrement: _AutoIncrementType = "auto", + doc: Optional[str] = None, + key: Optional[str] = None, + index: Optional[bool] = None, + unique: Optional[bool] = None, + info: Optional[_InfoType] = None, + onupdate: Optional[Any] = None, + insert_default: Optional[Any] = _NoArg.NO_ARG, + server_default: Optional[_ServerDefaultArgument] = None, + server_onupdate: Optional[_ServerOnUpdateArgument] = None, + active_history: bool = False, + quote: Optional[bool] = None, + system: bool = False, + comment: Optional[str] = None, + sort_order: Union[_NoArg, int] = _NoArg.NO_ARG, + **kw: Any, +) -> MappedColumn[Any]: + r"""declare a new ORM-mapped :class:`_schema.Column` construct + for use within :ref:`Declarative Table ` + configuration. + + The :func:`_orm.mapped_column` function provides an ORM-aware and + Python-typing-compatible construct which is used with + :ref:`declarative ` mappings to indicate an + attribute that's mapped to a Core :class:`_schema.Column` object. It + provides the equivalent feature as mapping an attribute to a + :class:`_schema.Column` object directly when using Declarative, + specifically when using :ref:`Declarative Table ` + configuration. + + .. versionadded:: 2.0 + + :func:`_orm.mapped_column` is normally used with explicit typing along with + the :class:`_orm.Mapped` annotation type, where it can derive the SQL + type and nullability for the column based on what's present within the + :class:`_orm.Mapped` annotation. It also may be used without annotations + as a drop-in replacement for how :class:`_schema.Column` is used in + Declarative mappings in SQLAlchemy 1.x style. + + For usage examples of :func:`_orm.mapped_column`, see the documentation + at :ref:`orm_declarative_table`. + + .. seealso:: + + :ref:`orm_declarative_table` - complete documentation + + :ref:`whatsnew_20_orm_declarative_typing` - migration notes for + Declarative mappings using 1.x style mappings + + :param __name: String name to give to the :class:`_schema.Column`. This + is an optional, positional only argument that if present must be the + first positional argument passed. If omitted, the attribute name to + which the :func:`_orm.mapped_column` is mapped will be used as the SQL + column name. + :param __type: :class:`_types.TypeEngine` type or instance which will + indicate the datatype to be associated with the :class:`_schema.Column`. + This is an optional, positional-only argument that if present must + immediately follow the ``__name`` parameter if present also, or otherwise + be the first positional parameter. If omitted, the ultimate type for + the column may be derived either from the annotated type, or if a + :class:`_schema.ForeignKey` is present, from the datatype of the + referenced column. + :param \*args: Additional positional arguments include constructs such + as :class:`_schema.ForeignKey`, :class:`_schema.CheckConstraint`, + and :class:`_schema.Identity`, which are passed through to the constructed + :class:`_schema.Column`. + :param nullable: Optional bool, whether the column should be "NULL" or + "NOT NULL". If omitted, the nullability is derived from the type + annotation based on whether or not ``typing.Optional`` is present. + ``nullable`` defaults to ``True`` otherwise for non-primary key columns, + and ``False`` for primary key columns. + :param primary_key: optional bool, indicates the :class:`_schema.Column` + would be part of the table's primary key or not. + :param deferred: Optional bool - this keyword argument is consumed by the + ORM declarative process, and is not part of the :class:`_schema.Column` + itself; instead, it indicates that this column should be "deferred" for + loading as though mapped by :func:`_orm.deferred`. + + .. seealso:: + + :ref:`orm_queryguide_deferred_declarative` + + :param deferred_group: Implies :paramref:`_orm.mapped_column.deferred` + to ``True``, and set the :paramref:`_orm.deferred.group` parameter. + + .. seealso:: + + :ref:`orm_queryguide_deferred_group` + + :param deferred_raiseload: Implies :paramref:`_orm.mapped_column.deferred` + to ``True``, and set the :paramref:`_orm.deferred.raiseload` parameter. + + .. seealso:: + + :ref:`orm_queryguide_deferred_raiseload` + + :param use_existing_column: if True, will attempt to locate the given + column name on an inherited superclass (typically single inheriting + superclass), and if present, will not produce a new column, mapping + to the superclass column as though it were omitted from this class. + This is used for mixins that add new columns to an inherited superclass. + + .. seealso:: + + :ref:`orm_inheritance_column_conflicts` + + .. versionadded:: 2.0.0b4 + + :param default: Passed directly to the + :paramref:`_schema.Column.default` parameter if the + :paramref:`_orm.mapped_column.insert_default` parameter is not present. + Additionally, when used with :ref:`orm_declarative_native_dataclasses`, + indicates a default Python value that should be applied to the keyword + constructor within the generated ``__init__()`` method. + + Note that in the case of dataclass generation when + :paramref:`_orm.mapped_column.insert_default` is not present, this means + the :paramref:`_orm.mapped_column.default` value is used in **two** + places, both the ``__init__()`` method as well as the + :paramref:`_schema.Column.default` parameter. While this behavior may + change in a future release, for the moment this tends to "work out"; a + default of ``None`` will mean that the :class:`_schema.Column` gets no + default generator, whereas a default that refers to a non-``None`` Python + or SQL expression value will be assigned up front on the object when + ``__init__()`` is called, which is the same value that the Core + :class:`_sql.Insert` construct would use in any case, leading to the same + end result. + + .. note:: When using Core level column defaults that are callables to + be interpreted by the underlying :class:`_schema.Column` in conjunction + with :ref:`ORM-mapped dataclasses + `, especially those that are + :ref:`context-aware default functions `, + **the** :paramref:`_orm.mapped_column.insert_default` **parameter must + be used instead**. This is necessary to disambiguate the callable from + being interpreted as a dataclass level default. + + .. seealso:: + + :ref:`defaults_default_factory_insert_default` + + :paramref:`_orm.mapped_column.insert_default` + + :paramref:`_orm.mapped_column.default_factory` + + :param insert_default: Passed directly to the + :paramref:`_schema.Column.default` parameter; will supersede the value + of :paramref:`_orm.mapped_column.default` when present, however + :paramref:`_orm.mapped_column.default` will always apply to the + constructor default for a dataclasses mapping. + + .. seealso:: + + :ref:`defaults_default_factory_insert_default` + + :paramref:`_orm.mapped_column.default` + + :paramref:`_orm.mapped_column.default_factory` + + :param sort_order: An integer that indicates how this mapped column + should be sorted compared to the others when the ORM is creating a + :class:`_schema.Table`. Among mapped columns that have the same + value the default ordering is used, placing first the mapped columns + defined in the main class, then the ones in the super classes. + Defaults to 0. The sort is ascending. + + .. versionadded:: 2.0.4 + + :param active_history=False: + + When ``True``, indicates that the "previous" value for a + scalar attribute should be loaded when replaced, if not + already loaded. Normally, history tracking logic for + simple non-primary-key scalar values only needs to be + aware of the "new" value in order to perform a flush. This + flag is available for applications that make use of + :func:`.attributes.get_history` or :meth:`.Session.is_modified` + which also need to know the "previous" value of the attribute. + + .. versionadded:: 2.0.10 + + + :param init: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__init__()`` + method as generated by the dataclass process. + :param repr: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__repr__()`` + method as generated by the dataclass process. + :param default_factory: Specific to + :ref:`orm_declarative_native_dataclasses`, + specifies a default-value generation function that will take place + as part of the ``__init__()`` + method as generated by the dataclass process. + + .. seealso:: + + :ref:`defaults_default_factory_insert_default` + + :paramref:`_orm.mapped_column.default` + + :paramref:`_orm.mapped_column.insert_default` + + :param compare: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be included in comparison operations when generating the + ``__eq__()`` and ``__ne__()`` methods for the mapped class. + + .. versionadded:: 2.0.0b4 + + :param kw_only: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be marked as keyword-only when generating the ``__init__()``. + + :param \**kw: All remaining keyword arguments are passed through to the + constructor for the :class:`_schema.Column`. + + """ + + return MappedColumn( + __name_pos, + __type_pos, + *args, + name=name, + type_=type_, + autoincrement=autoincrement, + insert_default=insert_default, + attribute_options=_AttributeOptions( + init, repr, default, default_factory, compare, kw_only + ), + doc=doc, + key=key, + index=index, + unique=unique, + info=info, + active_history=active_history, + nullable=nullable, + onupdate=onupdate, + primary_key=primary_key, + server_default=server_default, + server_onupdate=server_onupdate, + use_existing_column=use_existing_column, + quote=quote, + comment=comment, + system=system, + deferred=deferred, + deferred_group=deferred_group, + deferred_raiseload=deferred_raiseload, + sort_order=sort_order, + **kw, + ) + + +def orm_insert_sentinel( + name: Optional[str] = None, + type_: Optional[_TypeEngineArgument[Any]] = None, + *, + default: Optional[Any] = None, + omit_from_statements: bool = True, +) -> MappedColumn[Any]: + """Provides a surrogate :func:`_orm.mapped_column` that generates + a so-called :term:`sentinel` column, allowing efficient bulk + inserts with deterministic RETURNING sorting for tables that don't + otherwise have qualifying primary key configurations. + + Use of :func:`_orm.orm_insert_sentinel` is analogous to the use of the + :func:`_schema.insert_sentinel` construct within a Core + :class:`_schema.Table` construct. + + Guidelines for adding this construct to a Declarative mapped class + are the same as that of the :func:`_schema.insert_sentinel` construct; + the database table itself also needs to have a column with this name + present. + + For background on how this object is used, see the section + :ref:`engine_insertmanyvalues_sentinel_columns` as part of the + section :ref:`engine_insertmanyvalues`. + + .. seealso:: + + :func:`_schema.insert_sentinel` + + :ref:`engine_insertmanyvalues` + + :ref:`engine_insertmanyvalues_sentinel_columns` + + + .. versionadded:: 2.0.10 + + """ + + return mapped_column( + name=name, + default=( + default if default is not None else _InsertSentinelColumnDefault() + ), + _omit_from_statements=omit_from_statements, + insert_sentinel=True, + use_existing_column=True, + nullable=True, + ) + + +@util.deprecated_params( + **{ + arg: ( + "2.0", + f"The :paramref:`_orm.column_property.{arg}` parameter is " + "deprecated for :func:`_orm.column_property`. This parameter " + "applies to a writeable-attribute in a Declarative Dataclasses " + "configuration only, and :func:`_orm.column_property` is treated " + "as a read-only attribute in this context.", + ) + for arg in ("init", "kw_only", "default", "default_factory") + } +) +def column_property( + column: _ORMColumnExprArgument[_T], + *additional_columns: _ORMColumnExprArgument[Any], + group: Optional[str] = None, + deferred: bool = False, + raiseload: bool = False, + comparator_factory: Optional[Type[PropComparator[_T]]] = None, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + active_history: bool = False, + expire_on_flush: bool = True, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, +) -> MappedSQLExpression[_T]: + r"""Provide a column-level property for use with a mapping. + + With Declarative mappings, :func:`_orm.column_property` is used to + map read-only SQL expressions to a mapped class. + + When using Imperative mappings, :func:`_orm.column_property` also + takes on the role of mapping table columns with additional features. + When using fully Declarative mappings, the :func:`_orm.mapped_column` + construct should be used for this purpose. + + With Declarative Dataclass mappings, :func:`_orm.column_property` + is considered to be **read only**, and will not be included in the + Dataclass ``__init__()`` constructor. + + The :func:`_orm.column_property` function returns an instance of + :class:`.ColumnProperty`. + + .. seealso:: + + :ref:`mapper_column_property_sql_expressions` - general use of + :func:`_orm.column_property` to map SQL expressions + + :ref:`orm_imperative_table_column_options` - usage of + :func:`_orm.column_property` with Imperative Table mappings to apply + additional options to a plain :class:`_schema.Column` object + + :param \*cols: + list of Column objects to be mapped. + + :param active_history=False: + + Used only for Imperative Table mappings, or legacy-style Declarative + mappings (i.e. which have not been upgraded to + :func:`_orm.mapped_column`), for column-based attributes that are + expected to be writeable; use :func:`_orm.mapped_column` with + :paramref:`_orm.mapped_column.active_history` for Declarative mappings. + See that parameter for functional details. + + :param comparator_factory: a class which extends + :class:`.ColumnProperty.Comparator` which provides custom SQL + clause generation for comparison operations. + + :param group: + a group name for this property when marked as deferred. + + :param deferred: + when True, the column property is "deferred", meaning that + it does not load immediately, and is instead loaded when the + attribute is first accessed on an instance. See also + :func:`~sqlalchemy.orm.deferred`. + + :param doc: + optional string that will be applied as the doc on the + class-bound descriptor. + + :param expire_on_flush=True: + Disable expiry on flush. A column_property() which refers + to a SQL expression (and not a single table-bound column) + is considered to be a "read only" property; populating it + has no effect on the state of data, and it can only return + database state. For this reason a column_property()'s value + is expired whenever the parent object is involved in a + flush, that is, has any kind of "dirty" state within a flush. + Setting this parameter to ``False`` will have the effect of + leaving any existing value present after the flush proceeds. + Note that the :class:`.Session` with default expiration + settings still expires + all attributes after a :meth:`.Session.commit` call, however. + + :param info: Optional data dictionary which will be populated into the + :attr:`.MapperProperty.info` attribute of this object. + + :param raiseload: if True, indicates the column should raise an error + when undeferred, rather than loading the value. This can be + altered at query time by using the :func:`.deferred` option with + raiseload=False. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`orm_queryguide_deferred_raiseload` + + :param init: + + :param default: + + :param default_factory: + + :param kw_only: + + """ + return MappedSQLExpression( + column, + *additional_columns, + attribute_options=_AttributeOptions( + False if init is _NoArg.NO_ARG else init, + repr, + default, + default_factory, + compare, + kw_only, + ), + group=group, + deferred=deferred, + raiseload=raiseload, + comparator_factory=comparator_factory, + active_history=active_history, + expire_on_flush=expire_on_flush, + info=info, + doc=doc, + _assume_readonly_dc_attributes=True, + ) + + +@overload +def composite( + _class_or_attr: _CompositeAttrType[Any], + *attrs: _CompositeAttrType[Any], + group: Optional[str] = None, + deferred: bool = False, + raiseload: bool = False, + comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None, + active_history: bool = False, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, + **__kw: Any, +) -> Composite[Any]: ... + + +@overload +def composite( + _class_or_attr: Type[_CC], + *attrs: _CompositeAttrType[Any], + group: Optional[str] = None, + deferred: bool = False, + raiseload: bool = False, + comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None, + active_history: bool = False, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, + **__kw: Any, +) -> Composite[_CC]: ... + + +@overload +def composite( + _class_or_attr: Callable[..., _CC], + *attrs: _CompositeAttrType[Any], + group: Optional[str] = None, + deferred: bool = False, + raiseload: bool = False, + comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None, + active_history: bool = False, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, + **__kw: Any, +) -> Composite[_CC]: ... + + +def composite( + _class_or_attr: Union[ + None, Type[_CC], Callable[..., _CC], _CompositeAttrType[Any] + ] = None, + *attrs: _CompositeAttrType[Any], + group: Optional[str] = None, + deferred: bool = False, + raiseload: bool = False, + comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None, + active_history: bool = False, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, + **__kw: Any, +) -> Composite[Any]: + r"""Return a composite column-based property for use with a Mapper. + + See the mapping documentation section :ref:`mapper_composite` for a + full usage example. + + The :class:`.MapperProperty` returned by :func:`.composite` + is the :class:`.Composite`. + + :param class\_: + The "composite type" class, or any classmethod or callable which + will produce a new instance of the composite object given the + column values in order. + + :param \*attrs: + List of elements to be mapped, which may include: + + * :class:`_schema.Column` objects + * :func:`_orm.mapped_column` constructs + * string names of other attributes on the mapped class, which may be + any other SQL or object-mapped attribute. This can for + example allow a composite that refers to a many-to-one relationship + + :param active_history=False: + When ``True``, indicates that the "previous" value for a + scalar attribute should be loaded when replaced, if not + already loaded. See the same flag on :func:`.column_property`. + + :param group: + A group name for this property when marked as deferred. + + :param deferred: + When True, the column property is "deferred", meaning that it does + not load immediately, and is instead loaded when the attribute is + first accessed on an instance. See also + :func:`~sqlalchemy.orm.deferred`. + + :param comparator_factory: a class which extends + :class:`.Composite.Comparator` which provides custom SQL + clause generation for comparison operations. + + :param doc: + optional string that will be applied as the doc on the + class-bound descriptor. + + :param info: Optional data dictionary which will be populated into the + :attr:`.MapperProperty.info` attribute of this object. + + :param init: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__init__()`` + method as generated by the dataclass process. + :param repr: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__repr__()`` + method as generated by the dataclass process. + :param default_factory: Specific to + :ref:`orm_declarative_native_dataclasses`, + specifies a default-value generation function that will take place + as part of the ``__init__()`` + method as generated by the dataclass process. + + :param compare: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be included in comparison operations when generating the + ``__eq__()`` and ``__ne__()`` methods for the mapped class. + + .. versionadded:: 2.0.0b4 + + :param kw_only: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be marked as keyword-only when generating the ``__init__()``. + + """ + if __kw: + raise _no_kw() + + return Composite( + _class_or_attr, + *attrs, + attribute_options=_AttributeOptions( + init, repr, default, default_factory, compare, kw_only + ), + group=group, + deferred=deferred, + raiseload=raiseload, + comparator_factory=comparator_factory, + active_history=active_history, + info=info, + doc=doc, + ) + + +def with_loader_criteria( + entity_or_base: _EntityType[Any], + where_criteria: Union[ + _ColumnExpressionArgument[bool], + Callable[[Any], _ColumnExpressionArgument[bool]], + ], + loader_only: bool = False, + include_aliases: bool = False, + propagate_to_loaders: bool = True, + track_closure_variables: bool = True, +) -> LoaderCriteriaOption: + """Add additional WHERE criteria to the load for all occurrences of + a particular entity. + + .. versionadded:: 1.4 + + The :func:`_orm.with_loader_criteria` option is intended to add + limiting criteria to a particular kind of entity in a query, + **globally**, meaning it will apply to the entity as it appears + in the SELECT query as well as within any subqueries, join + conditions, and relationship loads, including both eager and lazy + loaders, without the need for it to be specified in any particular + part of the query. The rendering logic uses the same system used by + single table inheritance to ensure a certain discriminator is applied + to a table. + + E.g., using :term:`2.0-style` queries, we can limit the way the + ``User.addresses`` collection is loaded, regardless of the kind + of loading used:: + + from sqlalchemy.orm import with_loader_criteria + + stmt = select(User).options( + selectinload(User.addresses), + with_loader_criteria(Address, Address.email_address != 'foo')) + ) + + Above, the "selectinload" for ``User.addresses`` will apply the + given filtering criteria to the WHERE clause. + + Another example, where the filtering will be applied to the + ON clause of the join, in this example using :term:`1.x style` + queries:: + + q = session.query(User).outerjoin(User.addresses).options( + with_loader_criteria(Address, Address.email_address != 'foo')) + ) + + The primary purpose of :func:`_orm.with_loader_criteria` is to use + it in the :meth:`_orm.SessionEvents.do_orm_execute` event handler + to ensure that all occurrences of a particular entity are filtered + in a certain way, such as filtering for access control roles. It + also can be used to apply criteria to relationship loads. In the + example below, we can apply a certain set of rules to all queries + emitted by a particular :class:`_orm.Session`:: + + session = Session(bind=engine) + + @event.listens_for("do_orm_execute", session) + def _add_filtering_criteria(execute_state): + + if ( + execute_state.is_select + and not execute_state.is_column_load + and not execute_state.is_relationship_load + ): + execute_state.statement = execute_state.statement.options( + with_loader_criteria( + SecurityRole, + lambda cls: cls.role.in_(['some_role']), + include_aliases=True + ) + ) + + In the above example, the :meth:`_orm.SessionEvents.do_orm_execute` + event will intercept all queries emitted using the + :class:`_orm.Session`. For those queries which are SELECT statements + and are not attribute or relationship loads a custom + :func:`_orm.with_loader_criteria` option is added to the query. The + :func:`_orm.with_loader_criteria` option will be used in the given + statement and will also be automatically propagated to all relationship + loads that descend from this query. + + The criteria argument given is a ``lambda`` that accepts a ``cls`` + argument. The given class will expand to include all mapped subclass + and need not itself be a mapped class. + + .. tip:: + + When using :func:`_orm.with_loader_criteria` option in + conjunction with the :func:`_orm.contains_eager` loader option, + it's important to note that :func:`_orm.with_loader_criteria` only + affects the part of the query that determines what SQL is rendered + in terms of the WHERE and FROM clauses. The + :func:`_orm.contains_eager` option does not affect the rendering of + the SELECT statement outside of the columns clause, so does not have + any interaction with the :func:`_orm.with_loader_criteria` option. + However, the way things "work" is that :func:`_orm.contains_eager` + is meant to be used with a query that is already selecting from the + additional entities in some way, where + :func:`_orm.with_loader_criteria` can apply it's additional + criteria. + + In the example below, assuming a mapping relationship as + ``A -> A.bs -> B``, the given :func:`_orm.with_loader_criteria` + option will affect the way in which the JOIN is rendered:: + + stmt = select(A).join(A.bs).options( + contains_eager(A.bs), + with_loader_criteria(B, B.flag == 1) + ) + + Above, the given :func:`_orm.with_loader_criteria` option will + affect the ON clause of the JOIN that is specified by + ``.join(A.bs)``, so is applied as expected. The + :func:`_orm.contains_eager` option has the effect that columns from + ``B`` are added to the columns clause:: + + SELECT + b.id, b.a_id, b.data, b.flag, + a.id AS id_1, + a.data AS data_1 + FROM a JOIN b ON a.id = b.a_id AND b.flag = :flag_1 + + + The use of the :func:`_orm.contains_eager` option within the above + statement has no effect on the behavior of the + :func:`_orm.with_loader_criteria` option. If the + :func:`_orm.contains_eager` option were omitted, the SQL would be + the same as regards the FROM and WHERE clauses, where + :func:`_orm.with_loader_criteria` continues to add its criteria to + the ON clause of the JOIN. The addition of + :func:`_orm.contains_eager` only affects the columns clause, in that + additional columns against ``b`` are added which are then consumed + by the ORM to produce ``B`` instances. + + .. warning:: The use of a lambda inside of the call to + :func:`_orm.with_loader_criteria` is only invoked **once per unique + class**. Custom functions should not be invoked within this lambda. + See :ref:`engine_lambda_caching` for an overview of the "lambda SQL" + feature, which is for advanced use only. + + :param entity_or_base: a mapped class, or a class that is a super + class of a particular set of mapped classes, to which the rule + will apply. + + :param where_criteria: a Core SQL expression that applies limiting + criteria. This may also be a "lambda:" or Python function that + accepts a target class as an argument, when the given class is + a base with many different mapped subclasses. + + .. note:: To support pickling, use a module-level Python function to + produce the SQL expression instead of a lambda or a fixed SQL + expression, which tend to not be picklable. + + :param include_aliases: if True, apply the rule to :func:`_orm.aliased` + constructs as well. + + :param propagate_to_loaders: defaults to True, apply to relationship + loaders such as lazy loaders. This indicates that the + option object itself including SQL expression is carried along with + each loaded instance. Set to ``False`` to prevent the object from + being assigned to individual instances. + + + .. seealso:: + + :ref:`examples_session_orm_events` - includes examples of using + :func:`_orm.with_loader_criteria`. + + :ref:`do_orm_execute_global_criteria` - basic example on how to + combine :func:`_orm.with_loader_criteria` with the + :meth:`_orm.SessionEvents.do_orm_execute` event. + + :param track_closure_variables: when False, closure variables inside + of a lambda expression will not be used as part of + any cache key. This allows more complex expressions to be used + inside of a lambda expression but requires that the lambda ensures + it returns the identical SQL every time given a particular class. + + .. versionadded:: 1.4.0b2 + + """ + return LoaderCriteriaOption( + entity_or_base, + where_criteria, + loader_only, + include_aliases, + propagate_to_loaders, + track_closure_variables, + ) + + +def relationship( + argument: Optional[_RelationshipArgumentType[Any]] = None, + secondary: Optional[_RelationshipSecondaryArgument] = None, + *, + uselist: Optional[bool] = None, + collection_class: Optional[ + Union[Type[Collection[Any]], Callable[[], Collection[Any]]] + ] = None, + primaryjoin: Optional[_RelationshipJoinConditionArgument] = None, + secondaryjoin: Optional[_RelationshipJoinConditionArgument] = None, + back_populates: Optional[str] = None, + order_by: _ORMOrderByArgument = False, + backref: Optional[ORMBackrefArgument] = None, + overlaps: Optional[str] = None, + post_update: bool = False, + cascade: str = "save-update, merge", + viewonly: bool = False, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Union[_NoArg, _T] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + lazy: _LazyLoadArgumentType = "select", + passive_deletes: Union[Literal["all"], bool] = False, + passive_updates: bool = True, + active_history: bool = False, + enable_typechecks: bool = True, + foreign_keys: Optional[_ORMColCollectionArgument] = None, + remote_side: Optional[_ORMColCollectionArgument] = None, + join_depth: Optional[int] = None, + comparator_factory: Optional[ + Type[RelationshipProperty.Comparator[Any]] + ] = None, + single_parent: bool = False, + innerjoin: bool = False, + distinct_target_key: Optional[bool] = None, + load_on_pending: bool = False, + query_class: Optional[Type[Query[Any]]] = None, + info: Optional[_InfoType] = None, + omit_join: Literal[None, False] = None, + sync_backref: Optional[bool] = None, + **kw: Any, +) -> _RelationshipDeclared[Any]: + """Provide a relationship between two mapped classes. + + This corresponds to a parent-child or associative table relationship. + The constructed class is an instance of :class:`.Relationship`. + + .. seealso:: + + :ref:`tutorial_orm_related_objects` - tutorial introduction + to :func:`_orm.relationship` in the :ref:`unified_tutorial` + + :ref:`relationship_config_toplevel` - narrative documentation + + :param argument: + This parameter refers to the class that is to be related. It + accepts several forms, including a direct reference to the target + class itself, the :class:`_orm.Mapper` instance for the target class, + a Python callable / lambda that will return a reference to the + class or :class:`_orm.Mapper` when called, and finally a string + name for the class, which will be resolved from the + :class:`_orm.registry` in use in order to locate the class, e.g.:: + + class SomeClass(Base): + # ... + + related = relationship("RelatedClass") + + The :paramref:`_orm.relationship.argument` may also be omitted from the + :func:`_orm.relationship` construct entirely, and instead placed inside + a :class:`_orm.Mapped` annotation on the left side, which should + include a Python collection type if the relationship is expected + to be a collection, such as:: + + class SomeClass(Base): + # ... + + related_items: Mapped[List["RelatedItem"]] = relationship() + + Or for a many-to-one or one-to-one relationship:: + + class SomeClass(Base): + # ... + + related_item: Mapped["RelatedItem"] = relationship() + + .. seealso:: + + :ref:`orm_declarative_properties` - further detail + on relationship configuration when using Declarative. + + :param secondary: + For a many-to-many relationship, specifies the intermediary + table, and is typically an instance of :class:`_schema.Table`. + In less common circumstances, the argument may also be specified + as an :class:`_expression.Alias` construct, or even a + :class:`_expression.Join` construct. + + :paramref:`_orm.relationship.secondary` may + also be passed as a callable function which is evaluated at + mapper initialization time. When using Declarative, it may also + be a string argument noting the name of a :class:`_schema.Table` + that is + present in the :class:`_schema.MetaData` + collection associated with the + parent-mapped :class:`_schema.Table`. + + .. warning:: When passed as a Python-evaluable string, the + argument is interpreted using Python's ``eval()`` function. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + See :ref:`declarative_relationship_eval` for details on + declarative evaluation of :func:`_orm.relationship` arguments. + + The :paramref:`_orm.relationship.secondary` keyword argument is + typically applied in the case where the intermediary + :class:`_schema.Table` + is not otherwise expressed in any direct class mapping. If the + "secondary" table is also explicitly mapped elsewhere (e.g. as in + :ref:`association_pattern`), one should consider applying the + :paramref:`_orm.relationship.viewonly` flag so that this + :func:`_orm.relationship` + is not used for persistence operations which + may conflict with those of the association object pattern. + + .. seealso:: + + :ref:`relationships_many_to_many` - Reference example of "many + to many". + + :ref:`self_referential_many_to_many` - Specifics on using + many-to-many in a self-referential case. + + :ref:`declarative_many_to_many` - Additional options when using + Declarative. + + :ref:`association_pattern` - an alternative to + :paramref:`_orm.relationship.secondary` + when composing association + table relationships, allowing additional attributes to be + specified on the association table. + + :ref:`composite_secondary_join` - a lesser-used pattern which + in some cases can enable complex :func:`_orm.relationship` SQL + conditions to be used. + + :param active_history=False: + When ``True``, indicates that the "previous" value for a + many-to-one reference should be loaded when replaced, if + not already loaded. Normally, history tracking logic for + simple many-to-ones only needs to be aware of the "new" + value in order to perform a flush. This flag is available + for applications that make use of + :func:`.attributes.get_history` which also need to know + the "previous" value of the attribute. + + :param backref: + A reference to a string relationship name, or a :func:`_orm.backref` + construct, which will be used to automatically generate a new + :func:`_orm.relationship` on the related class, which then refers to this + one using a bi-directional :paramref:`_orm.relationship.back_populates` + configuration. + + In modern Python, explicit use of :func:`_orm.relationship` + with :paramref:`_orm.relationship.back_populates` should be preferred, + as it is more robust in terms of mapper configuration as well as + more conceptually straightforward. It also integrates with + new :pep:`484` typing features introduced in SQLAlchemy 2.0 which + is not possible with dynamically generated attributes. + + .. seealso:: + + :ref:`relationships_backref` - notes on using + :paramref:`_orm.relationship.backref` + + :ref:`tutorial_orm_related_objects` - in the :ref:`unified_tutorial`, + presents an overview of bi-directional relationship configuration + and behaviors using :paramref:`_orm.relationship.back_populates` + + :func:`.backref` - allows control over :func:`_orm.relationship` + configuration when using :paramref:`_orm.relationship.backref`. + + + :param back_populates: + Indicates the name of a :func:`_orm.relationship` on the related + class that will be synchronized with this one. It is usually + expected that the :func:`_orm.relationship` on the related class + also refer to this one. This allows objects on both sides of + each :func:`_orm.relationship` to synchronize in-Python state + changes and also provides directives to the :term:`unit of work` + flush process how changes along these relationships should + be persisted. + + .. seealso:: + + :ref:`tutorial_orm_related_objects` - in the :ref:`unified_tutorial`, + presents an overview of bi-directional relationship configuration + and behaviors. + + :ref:`relationship_patterns` - includes many examples of + :paramref:`_orm.relationship.back_populates`. + + :paramref:`_orm.relationship.backref` - legacy form which allows + more succinct configuration, but does not support explicit typing + + :param overlaps: + A string name or comma-delimited set of names of other relationships + on either this mapper, a descendant mapper, or a target mapper with + which this relationship may write to the same foreign keys upon + persistence. The only effect this has is to eliminate the + warning that this relationship will conflict with another upon + persistence. This is used for such relationships that are truly + capable of conflicting with each other on write, but the application + will ensure that no such conflicts occur. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`error_qzyx` - usage example + + :param cascade: + A comma-separated list of cascade rules which determines how + Session operations should be "cascaded" from parent to child. + This defaults to ``False``, which means the default cascade + should be used - this default cascade is ``"save-update, merge"``. + + The available cascades are ``save-update``, ``merge``, + ``expunge``, ``delete``, ``delete-orphan``, and ``refresh-expire``. + An additional option, ``all`` indicates shorthand for + ``"save-update, merge, refresh-expire, + expunge, delete"``, and is often used as in ``"all, delete-orphan"`` + to indicate that related objects should follow along with the + parent object in all cases, and be deleted when de-associated. + + .. seealso:: + + :ref:`unitofwork_cascades` - Full detail on each of the available + cascade options. + + :param cascade_backrefs=False: + Legacy; this flag is always False. + + .. versionchanged:: 2.0 "cascade_backrefs" functionality has been + removed. + + :param collection_class: + A class or callable that returns a new list-holding object. will + be used in place of a plain list for storing elements. + + .. seealso:: + + :ref:`custom_collections` - Introductory documentation and + examples. + + :param comparator_factory: + A class which extends :class:`.Relationship.Comparator` + which provides custom SQL clause generation for comparison + operations. + + .. seealso:: + + :class:`.PropComparator` - some detail on redefining comparators + at this level. + + :ref:`custom_comparators` - Brief intro to this feature. + + + :param distinct_target_key=None: + Indicate if a "subquery" eager load should apply the DISTINCT + keyword to the innermost SELECT statement. When left as ``None``, + the DISTINCT keyword will be applied in those cases when the target + columns do not comprise the full primary key of the target table. + When set to ``True``, the DISTINCT keyword is applied to the + innermost SELECT unconditionally. + + It may be desirable to set this flag to False when the DISTINCT is + reducing performance of the innermost subquery beyond that of what + duplicate innermost rows may be causing. + + .. seealso:: + + :ref:`loading_toplevel` - includes an introduction to subquery + eager loading. + + :param doc: + Docstring which will be applied to the resulting descriptor. + + :param foreign_keys: + + A list of columns which are to be used as "foreign key" + columns, or columns which refer to the value in a remote + column, within the context of this :func:`_orm.relationship` + object's :paramref:`_orm.relationship.primaryjoin` condition. + That is, if the :paramref:`_orm.relationship.primaryjoin` + condition of this :func:`_orm.relationship` is ``a.id == + b.a_id``, and the values in ``b.a_id`` are required to be + present in ``a.id``, then the "foreign key" column of this + :func:`_orm.relationship` is ``b.a_id``. + + In normal cases, the :paramref:`_orm.relationship.foreign_keys` + parameter is **not required.** :func:`_orm.relationship` will + automatically determine which columns in the + :paramref:`_orm.relationship.primaryjoin` condition are to be + considered "foreign key" columns based on those + :class:`_schema.Column` objects that specify + :class:`_schema.ForeignKey`, + or are otherwise listed as referencing columns in a + :class:`_schema.ForeignKeyConstraint` construct. + :paramref:`_orm.relationship.foreign_keys` is only needed when: + + 1. There is more than one way to construct a join from the local + table to the remote table, as there are multiple foreign key + references present. Setting ``foreign_keys`` will limit the + :func:`_orm.relationship` + to consider just those columns specified + here as "foreign". + + 2. The :class:`_schema.Table` being mapped does not actually have + :class:`_schema.ForeignKey` or + :class:`_schema.ForeignKeyConstraint` + constructs present, often because the table + was reflected from a database that does not support foreign key + reflection (MySQL MyISAM). + + 3. The :paramref:`_orm.relationship.primaryjoin` + argument is used to + construct a non-standard join condition, which makes use of + columns or expressions that do not normally refer to their + "parent" column, such as a join condition expressed by a + complex comparison using a SQL function. + + The :func:`_orm.relationship` construct will raise informative + error messages that suggest the use of the + :paramref:`_orm.relationship.foreign_keys` parameter when + presented with an ambiguous condition. In typical cases, + if :func:`_orm.relationship` doesn't raise any exceptions, the + :paramref:`_orm.relationship.foreign_keys` parameter is usually + not needed. + + :paramref:`_orm.relationship.foreign_keys` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. warning:: When passed as a Python-evaluable string, the + argument is interpreted using Python's ``eval()`` function. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + See :ref:`declarative_relationship_eval` for details on + declarative evaluation of :func:`_orm.relationship` arguments. + + .. seealso:: + + :ref:`relationship_foreign_keys` + + :ref:`relationship_custom_foreign` + + :func:`.foreign` - allows direct annotation of the "foreign" + columns within a :paramref:`_orm.relationship.primaryjoin` + condition. + + :param info: Optional data dictionary which will be populated into the + :attr:`.MapperProperty.info` attribute of this object. + + :param innerjoin=False: + When ``True``, joined eager loads will use an inner join to join + against related tables instead of an outer join. The purpose + of this option is generally one of performance, as inner joins + generally perform better than outer joins. + + This flag can be set to ``True`` when the relationship references an + object via many-to-one using local foreign keys that are not + nullable, or when the reference is one-to-one or a collection that + is guaranteed to have one or at least one entry. + + The option supports the same "nested" and "unnested" options as + that of :paramref:`_orm.joinedload.innerjoin`. See that flag + for details on nested / unnested behaviors. + + .. seealso:: + + :paramref:`_orm.joinedload.innerjoin` - the option as specified by + loader option, including detail on nesting behavior. + + :ref:`what_kind_of_loading` - Discussion of some details of + various loader options. + + + :param join_depth: + When non-``None``, an integer value indicating how many levels + deep "eager" loaders should join on a self-referring or cyclical + relationship. The number counts how many times the same Mapper + shall be present in the loading condition along a particular join + branch. When left at its default of ``None``, eager loaders + will stop chaining when they encounter a the same target mapper + which is already higher up in the chain. This option applies + both to joined- and subquery- eager loaders. + + .. seealso:: + + :ref:`self_referential_eager_loading` - Introductory documentation + and examples. + + :param lazy='select': specifies + How the related items should be loaded. Default value is + ``select``. Values include: + + * ``select`` - items should be loaded lazily when the property is + first accessed, using a separate SELECT statement, or identity map + fetch for simple many-to-one references. + + * ``immediate`` - items should be loaded as the parents are loaded, + using a separate SELECT statement, or identity map fetch for + simple many-to-one references. + + * ``joined`` - items should be loaded "eagerly" in the same query as + that of the parent, using a JOIN or LEFT OUTER JOIN. Whether + the join is "outer" or not is determined by the + :paramref:`_orm.relationship.innerjoin` parameter. + + * ``subquery`` - items should be loaded "eagerly" as the parents are + loaded, using one additional SQL statement, which issues a JOIN to + a subquery of the original statement, for each collection + requested. + + * ``selectin`` - items should be loaded "eagerly" as the parents + are loaded, using one or more additional SQL statements, which + issues a JOIN to the immediate parent object, specifying primary + key identifiers using an IN clause. + + * ``noload`` - no loading should occur at any time. The related + collection will remain empty. The ``noload`` strategy is not + recommended for general use. For a general use "never load" + approach, see :ref:`write_only_relationship` + + * ``raise`` - lazy loading is disallowed; accessing + the attribute, if its value were not already loaded via eager + loading, will raise an :exc:`~sqlalchemy.exc.InvalidRequestError`. + This strategy can be used when objects are to be detached from + their attached :class:`.Session` after they are loaded. + + * ``raise_on_sql`` - lazy loading that emits SQL is disallowed; + accessing the attribute, if its value were not already loaded via + eager loading, will raise an + :exc:`~sqlalchemy.exc.InvalidRequestError`, **if the lazy load + needs to emit SQL**. If the lazy load can pull the related value + from the identity map or determine that it should be None, the + value is loaded. This strategy can be used when objects will + remain associated with the attached :class:`.Session`, however + additional SELECT statements should be blocked. + + * ``write_only`` - the attribute will be configured with a special + "virtual collection" that may receive + :meth:`_orm.WriteOnlyCollection.add` and + :meth:`_orm.WriteOnlyCollection.remove` commands to add or remove + individual objects, but will not under any circumstances load or + iterate the full set of objects from the database directly. Instead, + methods such as :meth:`_orm.WriteOnlyCollection.select`, + :meth:`_orm.WriteOnlyCollection.insert`, + :meth:`_orm.WriteOnlyCollection.update` and + :meth:`_orm.WriteOnlyCollection.delete` are provided which generate SQL + constructs that may be used to load and modify rows in bulk. Used for + large collections that are never appropriate to load at once into + memory. + + The ``write_only`` loader style is configured automatically when + the :class:`_orm.WriteOnlyMapped` annotation is provided on the + left hand side within a Declarative mapping. See the section + :ref:`write_only_relationship` for examples. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`write_only_relationship` - in the :ref:`queryguide_toplevel` + + * ``dynamic`` - the attribute will return a pre-configured + :class:`_query.Query` object for all read + operations, onto which further filtering operations can be + applied before iterating the results. + + The ``dynamic`` loader style is configured automatically when + the :class:`_orm.DynamicMapped` annotation is provided on the + left hand side within a Declarative mapping. See the section + :ref:`dynamic_relationship` for examples. + + .. legacy:: The "dynamic" lazy loader strategy is the legacy form of + what is now the "write_only" strategy described in the section + :ref:`write_only_relationship`. + + .. seealso:: + + :ref:`dynamic_relationship` - in the :ref:`queryguide_toplevel` + + :ref:`write_only_relationship` - more generally useful approach + for large collections that should not fully load into memory + + * True - a synonym for 'select' + + * False - a synonym for 'joined' + + * None - a synonym for 'noload' + + .. seealso:: + + :ref:`orm_queryguide_relationship_loaders` - Full documentation on + relationship loader configuration in the :ref:`queryguide_toplevel`. + + + :param load_on_pending=False: + Indicates loading behavior for transient or pending parent objects. + + When set to ``True``, causes the lazy-loader to + issue a query for a parent object that is not persistent, meaning it + has never been flushed. This may take effect for a pending object + when autoflush is disabled, or for a transient object that has been + "attached" to a :class:`.Session` but is not part of its pending + collection. + + The :paramref:`_orm.relationship.load_on_pending` + flag does not improve + behavior when the ORM is used normally - object references should be + constructed at the object level, not at the foreign key level, so + that they are present in an ordinary way before a flush proceeds. + This flag is not not intended for general use. + + .. seealso:: + + :meth:`.Session.enable_relationship_loading` - this method + establishes "load on pending" behavior for the whole object, and + also allows loading on objects that remain transient or + detached. + + :param order_by: + Indicates the ordering that should be applied when loading these + items. :paramref:`_orm.relationship.order_by` + is expected to refer to + one of the :class:`_schema.Column` + objects to which the target class is + mapped, or the attribute itself bound to the target class which + refers to the column. + + :paramref:`_orm.relationship.order_by` + may also be passed as a callable + function which is evaluated at mapper initialization time, and may + be passed as a Python-evaluable string when using Declarative. + + .. warning:: When passed as a Python-evaluable string, the + argument is interpreted using Python's ``eval()`` function. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + See :ref:`declarative_relationship_eval` for details on + declarative evaluation of :func:`_orm.relationship` arguments. + + :param passive_deletes=False: + Indicates loading behavior during delete operations. + + A value of True indicates that unloaded child items should not + be loaded during a delete operation on the parent. Normally, + when a parent item is deleted, all child items are loaded so + that they can either be marked as deleted, or have their + foreign key to the parent set to NULL. Marking this flag as + True usually implies an ON DELETE rule is in + place which will handle updating/deleting child rows on the + database side. + + Additionally, setting the flag to the string value 'all' will + disable the "nulling out" of the child foreign keys, when the parent + object is deleted and there is no delete or delete-orphan cascade + enabled. This is typically used when a triggering or error raise + scenario is in place on the database side. Note that the foreign + key attributes on in-session child objects will not be changed after + a flush occurs so this is a very special use-case setting. + Additionally, the "nulling out" will still occur if the child + object is de-associated with the parent. + + .. seealso:: + + :ref:`passive_deletes` - Introductory documentation + and examples. + + :param passive_updates=True: + Indicates the persistence behavior to take when a referenced + primary key value changes in place, indicating that the referencing + foreign key columns will also need their value changed. + + When True, it is assumed that ``ON UPDATE CASCADE`` is configured on + the foreign key in the database, and that the database will + handle propagation of an UPDATE from a source column to + dependent rows. When False, the SQLAlchemy + :func:`_orm.relationship` + construct will attempt to emit its own UPDATE statements to + modify related targets. However note that SQLAlchemy **cannot** + emit an UPDATE for more than one level of cascade. Also, + setting this flag to False is not compatible in the case where + the database is in fact enforcing referential integrity, unless + those constraints are explicitly "deferred", if the target backend + supports it. + + It is highly advised that an application which is employing + mutable primary keys keeps ``passive_updates`` set to True, + and instead uses the referential integrity features of the database + itself in order to handle the change efficiently and fully. + + .. seealso:: + + :ref:`passive_updates` - Introductory documentation and + examples. + + :paramref:`.mapper.passive_updates` - a similar flag which + takes effect for joined-table inheritance mappings. + + :param post_update: + This indicates that the relationship should be handled by a + second UPDATE statement after an INSERT or before a + DELETE. This flag is used to handle saving bi-directional + dependencies between two individual rows (i.e. each row + references the other), where it would otherwise be impossible to + INSERT or DELETE both rows fully since one row exists before the + other. Use this flag when a particular mapping arrangement will + incur two rows that are dependent on each other, such as a table + that has a one-to-many relationship to a set of child rows, and + also has a column that references a single child row within that + list (i.e. both tables contain a foreign key to each other). If + a flush operation returns an error that a "cyclical + dependency" was detected, this is a cue that you might want to + use :paramref:`_orm.relationship.post_update` to "break" the cycle. + + .. seealso:: + + :ref:`post_update` - Introductory documentation and examples. + + :param primaryjoin: + A SQL expression that will be used as the primary + join of the child object against the parent object, or in a + many-to-many relationship the join of the parent object to the + association table. By default, this value is computed based on the + foreign key relationships of the parent and child tables (or + association table). + + :paramref:`_orm.relationship.primaryjoin` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. warning:: When passed as a Python-evaluable string, the + argument is interpreted using Python's ``eval()`` function. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + See :ref:`declarative_relationship_eval` for details on + declarative evaluation of :func:`_orm.relationship` arguments. + + .. seealso:: + + :ref:`relationship_primaryjoin` + + :param remote_side: + Used for self-referential relationships, indicates the column or + list of columns that form the "remote side" of the relationship. + + :paramref:`_orm.relationship.remote_side` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. warning:: When passed as a Python-evaluable string, the + argument is interpreted using Python's ``eval()`` function. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + See :ref:`declarative_relationship_eval` for details on + declarative evaluation of :func:`_orm.relationship` arguments. + + .. seealso:: + + :ref:`self_referential` - in-depth explanation of how + :paramref:`_orm.relationship.remote_side` + is used to configure self-referential relationships. + + :func:`.remote` - an annotation function that accomplishes the + same purpose as :paramref:`_orm.relationship.remote_side`, + typically + when a custom :paramref:`_orm.relationship.primaryjoin` condition + is used. + + :param query_class: + A :class:`_query.Query` + subclass that will be used internally by the + ``AppenderQuery`` returned by a "dynamic" relationship, that + is, a relationship that specifies ``lazy="dynamic"`` or was + otherwise constructed using the :func:`_orm.dynamic_loader` + function. + + .. seealso:: + + :ref:`dynamic_relationship` - Introduction to "dynamic" + relationship loaders. + + :param secondaryjoin: + A SQL expression that will be used as the join of + an association table to the child object. By default, this value is + computed based on the foreign key relationships of the association + and child tables. + + :paramref:`_orm.relationship.secondaryjoin` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. warning:: When passed as a Python-evaluable string, the + argument is interpreted using Python's ``eval()`` function. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + See :ref:`declarative_relationship_eval` for details on + declarative evaluation of :func:`_orm.relationship` arguments. + + .. seealso:: + + :ref:`relationship_primaryjoin` + + :param single_parent: + When True, installs a validator which will prevent objects + from being associated with more than one parent at a time. + This is used for many-to-one or many-to-many relationships that + should be treated either as one-to-one or one-to-many. Its usage + is optional, except for :func:`_orm.relationship` constructs which + are many-to-one or many-to-many and also + specify the ``delete-orphan`` cascade option. The + :func:`_orm.relationship` construct itself will raise an error + instructing when this option is required. + + .. seealso:: + + :ref:`unitofwork_cascades` - includes detail on when the + :paramref:`_orm.relationship.single_parent` + flag may be appropriate. + + :param uselist: + A boolean that indicates if this property should be loaded as a + list or a scalar. In most cases, this value is determined + automatically by :func:`_orm.relationship` at mapper configuration + time. When using explicit :class:`_orm.Mapped` annotations, + :paramref:`_orm.relationship.uselist` may be derived from the + whether or not the annotation within :class:`_orm.Mapped` contains + a collection class. + Otherwise, :paramref:`_orm.relationship.uselist` may be derived from + the type and direction + of the relationship - one to many forms a list, many to one + forms a scalar, many to many is a list. If a scalar is desired + where normally a list would be present, such as a bi-directional + one-to-one relationship, use an appropriate :class:`_orm.Mapped` + annotation or set :paramref:`_orm.relationship.uselist` to False. + + The :paramref:`_orm.relationship.uselist` + flag is also available on an + existing :func:`_orm.relationship` + construct as a read-only attribute, + which can be used to determine if this :func:`_orm.relationship` + deals + with collections or scalar attributes:: + + >>> User.addresses.property.uselist + True + + .. seealso:: + + :ref:`relationships_one_to_one` - Introduction to the "one to + one" relationship pattern, which is typically when an alternate + setting for :paramref:`_orm.relationship.uselist` is involved. + + :param viewonly=False: + When set to ``True``, the relationship is used only for loading + objects, and not for any persistence operation. A + :func:`_orm.relationship` which specifies + :paramref:`_orm.relationship.viewonly` can work + with a wider range of SQL operations within the + :paramref:`_orm.relationship.primaryjoin` condition, including + operations that feature the use of a variety of comparison operators + as well as SQL functions such as :func:`_expression.cast`. The + :paramref:`_orm.relationship.viewonly` + flag is also of general use when defining any kind of + :func:`_orm.relationship` that doesn't represent + the full set of related objects, to prevent modifications of the + collection from resulting in persistence operations. + + .. seealso:: + + :ref:`relationship_viewonly_notes` - more details on best practices + when using :paramref:`_orm.relationship.viewonly`. + + :param sync_backref: + A boolean that enables the events used to synchronize the in-Python + attributes when this relationship is target of either + :paramref:`_orm.relationship.backref` or + :paramref:`_orm.relationship.back_populates`. + + Defaults to ``None``, which indicates that an automatic value should + be selected based on the value of the + :paramref:`_orm.relationship.viewonly` flag. When left at its + default, changes in state will be back-populated only if neither + sides of a relationship is viewonly. + + .. versionadded:: 1.3.17 + + .. versionchanged:: 1.4 - A relationship that specifies + :paramref:`_orm.relationship.viewonly` automatically implies + that :paramref:`_orm.relationship.sync_backref` is ``False``. + + .. seealso:: + + :paramref:`_orm.relationship.viewonly` + + :param omit_join: + Allows manual control over the "selectin" automatic join + optimization. Set to ``False`` to disable the "omit join" feature + added in SQLAlchemy 1.3; or leave as ``None`` to leave automatic + optimization in place. + + .. note:: This flag may only be set to ``False``. It is not + necessary to set it to ``True`` as the "omit_join" optimization is + automatically detected; if it is not detected, then the + optimization is not supported. + + .. versionchanged:: 1.3.11 setting ``omit_join`` to True will now + emit a warning as this was not the intended use of this flag. + + .. versionadded:: 1.3 + + :param init: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__init__()`` + method as generated by the dataclass process. + :param repr: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__repr__()`` + method as generated by the dataclass process. + :param default_factory: Specific to + :ref:`orm_declarative_native_dataclasses`, + specifies a default-value generation function that will take place + as part of the ``__init__()`` + method as generated by the dataclass process. + :param compare: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be included in comparison operations when generating the + ``__eq__()`` and ``__ne__()`` methods for the mapped class. + + .. versionadded:: 2.0.0b4 + + :param kw_only: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be marked as keyword-only when generating the ``__init__()``. + + + """ + + return _RelationshipDeclared( + argument, + secondary=secondary, + uselist=uselist, + collection_class=collection_class, + primaryjoin=primaryjoin, + secondaryjoin=secondaryjoin, + back_populates=back_populates, + order_by=order_by, + backref=backref, + overlaps=overlaps, + post_update=post_update, + cascade=cascade, + viewonly=viewonly, + attribute_options=_AttributeOptions( + init, repr, default, default_factory, compare, kw_only + ), + lazy=lazy, + passive_deletes=passive_deletes, + passive_updates=passive_updates, + active_history=active_history, + enable_typechecks=enable_typechecks, + foreign_keys=foreign_keys, + remote_side=remote_side, + join_depth=join_depth, + comparator_factory=comparator_factory, + single_parent=single_parent, + innerjoin=innerjoin, + distinct_target_key=distinct_target_key, + load_on_pending=load_on_pending, + query_class=query_class, + info=info, + omit_join=omit_join, + sync_backref=sync_backref, + **kw, + ) + + +def synonym( + name: str, + *, + map_column: Optional[bool] = None, + descriptor: Optional[Any] = None, + comparator_factory: Optional[Type[PropComparator[_T]]] = None, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Union[_NoArg, _T] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, +) -> Synonym[Any]: + """Denote an attribute name as a synonym to a mapped property, + in that the attribute will mirror the value and expression behavior + of another attribute. + + e.g.:: + + class MyClass(Base): + __tablename__ = 'my_table' + + id = Column(Integer, primary_key=True) + job_status = Column(String(50)) + + status = synonym("job_status") + + + :param name: the name of the existing mapped property. This + can refer to the string name ORM-mapped attribute + configured on the class, including column-bound attributes + and relationships. + + :param descriptor: a Python :term:`descriptor` that will be used + as a getter (and potentially a setter) when this attribute is + accessed at the instance level. + + :param map_column: **For classical mappings and mappings against + an existing Table object only**. if ``True``, the :func:`.synonym` + construct will locate the :class:`_schema.Column` + object upon the mapped + table that would normally be associated with the attribute name of + this synonym, and produce a new :class:`.ColumnProperty` that instead + maps this :class:`_schema.Column` + to the alternate name given as the "name" + argument of the synonym; in this way, the usual step of redefining + the mapping of the :class:`_schema.Column` + to be under a different name is + unnecessary. This is usually intended to be used when a + :class:`_schema.Column` + is to be replaced with an attribute that also uses a + descriptor, that is, in conjunction with the + :paramref:`.synonym.descriptor` parameter:: + + my_table = Table( + "my_table", metadata, + Column('id', Integer, primary_key=True), + Column('job_status', String(50)) + ) + + class MyClass: + @property + def _job_status_descriptor(self): + return "Status: %s" % self._job_status + + + mapper( + MyClass, my_table, properties={ + "job_status": synonym( + "_job_status", map_column=True, + descriptor=MyClass._job_status_descriptor) + } + ) + + Above, the attribute named ``_job_status`` is automatically + mapped to the ``job_status`` column:: + + >>> j1 = MyClass() + >>> j1._job_status = "employed" + >>> j1.job_status + Status: employed + + When using Declarative, in order to provide a descriptor in + conjunction with a synonym, use the + :func:`sqlalchemy.ext.declarative.synonym_for` helper. However, + note that the :ref:`hybrid properties ` feature + should usually be preferred, particularly when redefining attribute + behavior. + + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + :param comparator_factory: A subclass of :class:`.PropComparator` + that will provide custom comparison behavior at the SQL expression + level. + + .. note:: + + For the use case of providing an attribute which redefines both + Python-level and SQL-expression level behavior of an attribute, + please refer to the Hybrid attribute introduced at + :ref:`mapper_hybrids` for a more effective technique. + + .. seealso:: + + :ref:`synonyms` - Overview of synonyms + + :func:`.synonym_for` - a helper oriented towards Declarative + + :ref:`mapper_hybrids` - The Hybrid Attribute extension provides an + updated approach to augmenting attribute behavior more flexibly + than can be achieved with synonyms. + + """ + return Synonym( + name, + map_column=map_column, + descriptor=descriptor, + comparator_factory=comparator_factory, + attribute_options=_AttributeOptions( + init, repr, default, default_factory, compare, kw_only + ), + doc=doc, + info=info, + ) + + +def create_session( + bind: Optional[_SessionBind] = None, **kwargs: Any +) -> Session: + r"""Create a new :class:`.Session` + with no automation enabled by default. + + This function is used primarily for testing. The usual + route to :class:`.Session` creation is via its constructor + or the :func:`.sessionmaker` function. + + :param bind: optional, a single Connectable to use for all + database access in the created + :class:`~sqlalchemy.orm.session.Session`. + + :param \*\*kwargs: optional, passed through to the + :class:`.Session` constructor. + + :returns: an :class:`~sqlalchemy.orm.session.Session` instance + + The defaults of create_session() are the opposite of that of + :func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are + False. + + Usage:: + + >>> from sqlalchemy.orm import create_session + >>> session = create_session() + + It is recommended to use :func:`sessionmaker` instead of + create_session(). + + """ + + kwargs.setdefault("autoflush", False) + kwargs.setdefault("expire_on_commit", False) + return Session(bind=bind, **kwargs) + + +def _mapper_fn(*arg: Any, **kw: Any) -> NoReturn: + """Placeholder for the now-removed ``mapper()`` function. + + Classical mappings should be performed using the + :meth:`_orm.registry.map_imperatively` method. + + This symbol remains in SQLAlchemy 2.0 to suit the deprecated use case + of using the ``mapper()`` function as a target for ORM event listeners, + which failed to be marked as deprecated in the 1.4 series. + + Global ORM mapper listeners should instead use the :class:`_orm.Mapper` + class as the target. + + .. versionchanged:: 2.0 The ``mapper()`` function was removed; the + symbol remains temporarily as a placeholder for the event listening + use case. + + """ + raise InvalidRequestError( + "The 'sqlalchemy.orm.mapper()' function is removed as of " + "SQLAlchemy 2.0. Use the " + "'sqlalchemy.orm.registry.map_imperatively()` " + "method of the ``sqlalchemy.orm.registry`` class to perform " + "classical mapping." + ) + + +def dynamic_loader( + argument: Optional[_RelationshipArgumentType[Any]] = None, **kw: Any +) -> RelationshipProperty[Any]: + """Construct a dynamically-loading mapper property. + + This is essentially the same as + using the ``lazy='dynamic'`` argument with :func:`relationship`:: + + dynamic_loader(SomeClass) + + # is the same as + + relationship(SomeClass, lazy="dynamic") + + See the section :ref:`dynamic_relationship` for more details + on dynamic loading. + + """ + kw["lazy"] = "dynamic" + return relationship(argument, **kw) + + +def backref(name: str, **kwargs: Any) -> ORMBackrefArgument: + """When using the :paramref:`_orm.relationship.backref` parameter, + provides specific parameters to be used when the new + :func:`_orm.relationship` is generated. + + E.g.:: + + 'items':relationship( + SomeItem, backref=backref('parent', lazy='subquery')) + + The :paramref:`_orm.relationship.backref` parameter is generally + considered to be legacy; for modern applications, using + explicit :func:`_orm.relationship` constructs linked together using + the :paramref:`_orm.relationship.back_populates` parameter should be + preferred. + + .. seealso:: + + :ref:`relationships_backref` - background on backrefs + + """ + + return (name, kwargs) + + +def deferred( + column: _ORMColumnExprArgument[_T], + *additional_columns: _ORMColumnExprArgument[Any], + group: Optional[str] = None, + raiseload: bool = False, + comparator_factory: Optional[Type[PropComparator[_T]]] = None, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + default: Optional[Any] = _NoArg.NO_ARG, + default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + active_history: bool = False, + expire_on_flush: bool = True, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, +) -> MappedSQLExpression[_T]: + r"""Indicate a column-based mapped attribute that by default will + not load unless accessed. + + When using :func:`_orm.mapped_column`, the same functionality as + that of :func:`_orm.deferred` construct is provided by using the + :paramref:`_orm.mapped_column.deferred` parameter. + + :param \*columns: columns to be mapped. This is typically a single + :class:`_schema.Column` object, + however a collection is supported in order + to support multiple columns mapped under the same attribute. + + :param raiseload: boolean, if True, indicates an exception should be raised + if the load operation is to take place. + + .. versionadded:: 1.4 + + + Additional arguments are the same as that of :func:`_orm.column_property`. + + .. seealso:: + + :ref:`orm_queryguide_deferred_imperative` + + """ + return MappedSQLExpression( + column, + *additional_columns, + attribute_options=_AttributeOptions( + init, repr, default, default_factory, compare, kw_only + ), + group=group, + deferred=True, + raiseload=raiseload, + comparator_factory=comparator_factory, + active_history=active_history, + expire_on_flush=expire_on_flush, + info=info, + doc=doc, + ) + + +def query_expression( + default_expr: _ORMColumnExprArgument[_T] = sql.null(), + *, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + compare: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + expire_on_flush: bool = True, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, +) -> MappedSQLExpression[_T]: + """Indicate an attribute that populates from a query-time SQL expression. + + :param default_expr: Optional SQL expression object that will be used in + all cases if not assigned later with :func:`_orm.with_expression`. + + .. versionadded:: 1.2 + + .. seealso:: + + :ref:`orm_queryguide_with_expression` - background and usage examples + + """ + prop = MappedSQLExpression( + default_expr, + attribute_options=_AttributeOptions( + False, + repr, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + compare, + _NoArg.NO_ARG, + ), + expire_on_flush=expire_on_flush, + info=info, + doc=doc, + _assume_readonly_dc_attributes=True, + ) + + prop.strategy_key = (("query_expression", True),) + return prop + + +def clear_mappers() -> None: + """Remove all mappers from all classes. + + .. versionchanged:: 1.4 This function now locates all + :class:`_orm.registry` objects and calls upon the + :meth:`_orm.registry.dispose` method of each. + + This function removes all instrumentation from classes and disposes + of their associated mappers. Once called, the classes are unmapped + and can be later re-mapped with new mappers. + + :func:`.clear_mappers` is *not* for normal use, as there is literally no + valid usage for it outside of very specific testing scenarios. Normally, + mappers are permanent structural components of user-defined classes, and + are never discarded independently of their class. If a mapped class + itself is garbage collected, its mapper is automatically disposed of as + well. As such, :func:`.clear_mappers` is only for usage in test suites + that re-use the same classes with different mappings, which is itself an + extremely rare use case - the only such use case is in fact SQLAlchemy's + own test suite, and possibly the test suites of other ORM extension + libraries which intend to test various combinations of mapper construction + upon a fixed set of classes. + + """ + + mapperlib._dispose_registries(mapperlib._all_registries(), False) + + +# I would really like a way to get the Type[] here that shows up +# in a different way in typing tools, however there is no current method +# that is accepted by mypy (subclass of Type[_O] works in pylance, rejected +# by mypy). +AliasedType = Annotated[Type[_O], "aliased"] + + +@overload +def aliased( + element: Type[_O], + alias: Optional[FromClause] = None, + name: Optional[str] = None, + flat: bool = False, + adapt_on_names: bool = False, +) -> AliasedType[_O]: ... + + +@overload +def aliased( + element: Union[AliasedClass[_O], Mapper[_O], AliasedInsp[_O]], + alias: Optional[FromClause] = None, + name: Optional[str] = None, + flat: bool = False, + adapt_on_names: bool = False, +) -> AliasedClass[_O]: ... + + +@overload +def aliased( + element: FromClause, + alias: None = None, + name: Optional[str] = None, + flat: bool = False, + adapt_on_names: bool = False, +) -> FromClause: ... + + +def aliased( + element: Union[_EntityType[_O], FromClause], + alias: Optional[FromClause] = None, + name: Optional[str] = None, + flat: bool = False, + adapt_on_names: bool = False, +) -> Union[AliasedClass[_O], FromClause, AliasedType[_O]]: + """Produce an alias of the given element, usually an :class:`.AliasedClass` + instance. + + E.g.:: + + my_alias = aliased(MyClass) + + stmt = select(MyClass, my_alias).filter(MyClass.id > my_alias.id) + result = session.execute(stmt) + + The :func:`.aliased` function is used to create an ad-hoc mapping of a + mapped class to a new selectable. By default, a selectable is generated + from the normally mapped selectable (typically a :class:`_schema.Table` + ) using the + :meth:`_expression.FromClause.alias` method. However, :func:`.aliased` + can also be + used to link the class to a new :func:`_expression.select` statement. + Also, the :func:`.with_polymorphic` function is a variant of + :func:`.aliased` that is intended to specify a so-called "polymorphic + selectable", that corresponds to the union of several joined-inheritance + subclasses at once. + + For convenience, the :func:`.aliased` function also accepts plain + :class:`_expression.FromClause` constructs, such as a + :class:`_schema.Table` or + :func:`_expression.select` construct. In those cases, the + :meth:`_expression.FromClause.alias` + method is called on the object and the new + :class:`_expression.Alias` object returned. The returned + :class:`_expression.Alias` is not + ORM-mapped in this case. + + .. seealso:: + + :ref:`tutorial_orm_entity_aliases` - in the :ref:`unified_tutorial` + + :ref:`orm_queryguide_orm_aliases` - in the :ref:`queryguide_toplevel` + + :param element: element to be aliased. Is normally a mapped class, + but for convenience can also be a :class:`_expression.FromClause` + element. + + :param alias: Optional selectable unit to map the element to. This is + usually used to link the object to a subquery, and should be an aliased + select construct as one would produce from the + :meth:`_query.Query.subquery` method or + the :meth:`_expression.Select.subquery` or + :meth:`_expression.Select.alias` methods of the :func:`_expression.select` + construct. + + :param name: optional string name to use for the alias, if not specified + by the ``alias`` parameter. The name, among other things, forms the + attribute name that will be accessible via tuples returned by a + :class:`_query.Query` object. Not supported when creating aliases + of :class:`_sql.Join` objects. + + :param flat: Boolean, will be passed through to the + :meth:`_expression.FromClause.alias` call so that aliases of + :class:`_expression.Join` objects will alias the individual tables + inside the join, rather than creating a subquery. This is generally + supported by all modern databases with regards to right-nested joins + and generally produces more efficient queries. + + When :paramref:`_orm.aliased.flat` is combined with + :paramref:`_orm.aliased.name`, the resulting joins will alias individual + tables using a naming scheme similar to ``_``. This + naming scheme is for visibility / debugging purposes only and the + specific scheme is subject to change without notice. + + .. versionadded:: 2.0.32 added support for combining + :paramref:`_orm.aliased.name` with :paramref:`_orm.aliased.flat`. + Previously, this would raise ``NotImplementedError``. + + :param adapt_on_names: if True, more liberal "matching" will be used when + mapping the mapped columns of the ORM entity to those of the + given selectable - a name-based match will be performed if the + given selectable doesn't otherwise have a column that corresponds + to one on the entity. The use case for this is when associating + an entity with some derived selectable such as one that uses + aggregate functions:: + + class UnitPrice(Base): + __tablename__ = 'unit_price' + ... + unit_id = Column(Integer) + price = Column(Numeric) + + aggregated_unit_price = Session.query( + func.sum(UnitPrice.price).label('price') + ).group_by(UnitPrice.unit_id).subquery() + + aggregated_unit_price = aliased(UnitPrice, + alias=aggregated_unit_price, adapt_on_names=True) + + Above, functions on ``aggregated_unit_price`` which refer to + ``.price`` will return the + ``func.sum(UnitPrice.price).label('price')`` column, as it is + matched on the name "price". Ordinarily, the "price" function + wouldn't have any "column correspondence" to the actual + ``UnitPrice.price`` column as it is not a proxy of the original. + + """ + return AliasedInsp._alias_factory( + element, + alias=alias, + name=name, + flat=flat, + adapt_on_names=adapt_on_names, + ) + + +def with_polymorphic( + base: Union[Type[_O], Mapper[_O]], + classes: Union[Literal["*"], Iterable[Type[Any]]], + selectable: Union[Literal[False, None], FromClause] = False, + flat: bool = False, + polymorphic_on: Optional[ColumnElement[Any]] = None, + aliased: bool = False, + innerjoin: bool = False, + adapt_on_names: bool = False, + name: Optional[str] = None, + _use_mapper_path: bool = False, +) -> AliasedClass[_O]: + """Produce an :class:`.AliasedClass` construct which specifies + columns for descendant mappers of the given base. + + Using this method will ensure that each descendant mapper's + tables are included in the FROM clause, and will allow filter() + criterion to be used against those tables. The resulting + instances will also have those columns already loaded so that + no "post fetch" of those columns will be required. + + .. seealso:: + + :ref:`with_polymorphic` - full discussion of + :func:`_orm.with_polymorphic`. + + :param base: Base class to be aliased. + + :param classes: a single class or mapper, or list of + class/mappers, which inherit from the base class. + Alternatively, it may also be the string ``'*'``, in which case + all descending mapped classes will be added to the FROM clause. + + :param aliased: when True, the selectable will be aliased. For a + JOIN, this means the JOIN will be SELECTed from inside of a subquery + unless the :paramref:`_orm.with_polymorphic.flat` flag is set to + True, which is recommended for simpler use cases. + + :param flat: Boolean, will be passed through to the + :meth:`_expression.FromClause.alias` call so that aliases of + :class:`_expression.Join` objects will alias the individual tables + inside the join, rather than creating a subquery. This is generally + supported by all modern databases with regards to right-nested joins + and generally produces more efficient queries. Setting this flag is + recommended as long as the resulting SQL is functional. + + :param selectable: a table or subquery that will + be used in place of the generated FROM clause. This argument is + required if any of the desired classes use concrete table + inheritance, since SQLAlchemy currently cannot generate UNIONs + among tables automatically. If used, the ``selectable`` argument + must represent the full set of tables and columns mapped by every + mapped class. Otherwise, the unaccounted mapped columns will + result in their table being appended directly to the FROM clause + which will usually lead to incorrect results. + + When left at its default value of ``False``, the polymorphic + selectable assigned to the base mapper is used for selecting rows. + However, it may also be passed as ``None``, which will bypass the + configured polymorphic selectable and instead construct an ad-hoc + selectable for the target classes given; for joined table inheritance + this will be a join that includes all target mappers and their + subclasses. + + :param polymorphic_on: a column to be used as the "discriminator" + column for the given selectable. If not given, the polymorphic_on + attribute of the base classes' mapper will be used, if any. This + is useful for mappings that don't have polymorphic loading + behavior by default. + + :param innerjoin: if True, an INNER JOIN will be used. This should + only be specified if querying for one specific subtype only + + :param adapt_on_names: Passes through the + :paramref:`_orm.aliased.adapt_on_names` + parameter to the aliased object. This may be useful in situations where + the given selectable is not directly related to the existing mapped + selectable. + + .. versionadded:: 1.4.33 + + :param name: Name given to the generated :class:`.AliasedClass`. + + .. versionadded:: 2.0.31 + + """ + return AliasedInsp._with_polymorphic_factory( + base, + classes, + selectable=selectable, + flat=flat, + polymorphic_on=polymorphic_on, + adapt_on_names=adapt_on_names, + aliased=aliased, + innerjoin=innerjoin, + name=name, + _use_mapper_path=_use_mapper_path, + ) + + +def join( + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + isouter: bool = False, + full: bool = False, +) -> _ORMJoin: + r"""Produce an inner join between left and right clauses. + + :func:`_orm.join` is an extension to the core join interface + provided by :func:`_expression.join()`, where the + left and right selectable may be not only core selectable + objects such as :class:`_schema.Table`, but also mapped classes or + :class:`.AliasedClass` instances. The "on" clause can + be a SQL expression or an ORM mapped attribute + referencing a configured :func:`_orm.relationship`. + + :func:`_orm.join` is not commonly needed in modern usage, + as its functionality is encapsulated within that of the + :meth:`_sql.Select.join` and :meth:`_query.Query.join` + methods. which feature a + significant amount of automation beyond :func:`_orm.join` + by itself. Explicit use of :func:`_orm.join` + with ORM-enabled SELECT statements involves use of the + :meth:`_sql.Select.select_from` method, as in:: + + from sqlalchemy.orm import join + stmt = select(User).\ + select_from(join(User, Address, User.addresses)).\ + filter(Address.email_address=='foo@bar.com') + + In modern SQLAlchemy the above join can be written more + succinctly as:: + + stmt = select(User).\ + join(User.addresses).\ + filter(Address.email_address=='foo@bar.com') + + .. warning:: using :func:`_orm.join` directly may not work properly + with modern ORM options such as :func:`_orm.with_loader_criteria`. + It is strongly recommended to use the idiomatic join patterns + provided by methods such as :meth:`.Select.join` and + :meth:`.Select.join_from` when creating ORM joins. + + .. seealso:: + + :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel` for + background on idiomatic ORM join patterns + + """ + return _ORMJoin(left, right, onclause, isouter, full) + + +def outerjoin( + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + full: bool = False, +) -> _ORMJoin: + """Produce a left outer join between left and right clauses. + + This is the "outer join" version of the :func:`_orm.join` function, + featuring the same behavior except that an OUTER JOIN is generated. + See that function's documentation for other usage details. + + """ + return _ORMJoin(left, right, onclause, True, full) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py new file mode 100644 index 00000000..f8ac0590 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py @@ -0,0 +1,179 @@ +# orm/_typing.py +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import operator +from typing import Any +from typing import Dict +from typing import Mapping +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from ..engine.interfaces import _CoreKnownExecutionOptions +from ..sql import roles +from ..sql._orm_types import DMLStrategyArgument as DMLStrategyArgument +from ..sql._orm_types import ( + SynchronizeSessionArgument as SynchronizeSessionArgument, +) +from ..sql._typing import _HasClauseElement +from ..sql.elements import ColumnElement +from ..util.typing import Protocol +from ..util.typing import TypeGuard + +if TYPE_CHECKING: + from .attributes import AttributeImpl + from .attributes import CollectionAttributeImpl + from .attributes import HasCollectionAdapter + from .attributes import QueryableAttribute + from .base import PassiveFlag + from .decl_api import registry as _registry_type + from .interfaces import InspectionAttr + from .interfaces import MapperProperty + from .interfaces import ORMOption + from .interfaces import UserDefinedOption + from .mapper import Mapper + from .relationships import RelationshipProperty + from .state import InstanceState + from .util import AliasedClass + from .util import AliasedInsp + from ..sql._typing import _CE + from ..sql.base import ExecutableOption + +_T = TypeVar("_T", bound=Any) + + +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + +_O = TypeVar("_O", bound=object) +"""The 'ORM mapped object' type. + +""" + + +if TYPE_CHECKING: + _RegistryType = _registry_type + +_InternalEntityType = Union["Mapper[_T]", "AliasedInsp[_T]"] + +_ExternalEntityType = Union[Type[_T], "AliasedClass[_T]"] + +_EntityType = Union[ + Type[_T], "AliasedClass[_T]", "Mapper[_T]", "AliasedInsp[_T]" +] + + +_ClassDict = Mapping[str, Any] +_InstanceDict = Dict[str, Any] + +_IdentityKeyType = Tuple[Type[_T], Tuple[Any, ...], Optional[Any]] + +_ORMColumnExprArgument = Union[ + ColumnElement[_T], + _HasClauseElement[_T], + roles.ExpressionElementRole[_T], +] + + +_ORMCOLEXPR = TypeVar("_ORMCOLEXPR", bound=ColumnElement[Any]) + + +class _OrmKnownExecutionOptions(_CoreKnownExecutionOptions, total=False): + populate_existing: bool + autoflush: bool + synchronize_session: SynchronizeSessionArgument + dml_strategy: DMLStrategyArgument + is_delete_using: bool + is_update_from: bool + render_nulls: bool + + +OrmExecuteOptionsParameter = Union[ + _OrmKnownExecutionOptions, Mapping[str, Any] +] + + +class _ORMAdapterProto(Protocol): + """protocol for the :class:`.AliasedInsp._orm_adapt_element` method + which is a synonym for :class:`.AliasedInsp._adapt_element`. + + + """ + + def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE: ... + + +class _LoaderCallable(Protocol): + def __call__( + self, state: InstanceState[Any], passive: PassiveFlag + ) -> Any: ... + + +def is_orm_option( + opt: ExecutableOption, +) -> TypeGuard[ORMOption]: + return not opt._is_core + + +def is_user_defined_option( + opt: ExecutableOption, +) -> TypeGuard[UserDefinedOption]: + return not opt._is_core and opt._is_user_defined # type: ignore + + +def is_composite_class(obj: Any) -> bool: + # inlining is_dataclass(obj) + return hasattr(obj, "__composite_values__") or hasattr( + obj, "__dataclass_fields__" + ) + + +if TYPE_CHECKING: + + def insp_is_mapper_property( + obj: Any, + ) -> TypeGuard[MapperProperty[Any]]: ... + + def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]: ... + + def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]: ... + + def insp_is_attribute( + obj: InspectionAttr, + ) -> TypeGuard[QueryableAttribute[Any]]: ... + + def attr_is_internal_proxy( + obj: InspectionAttr, + ) -> TypeGuard[QueryableAttribute[Any]]: ... + + def prop_is_relationship( + prop: MapperProperty[Any], + ) -> TypeGuard[RelationshipProperty[Any]]: ... + + def is_collection_impl( + impl: AttributeImpl, + ) -> TypeGuard[CollectionAttributeImpl]: ... + + def is_has_collection_adapter( + impl: AttributeImpl, + ) -> TypeGuard[HasCollectionAdapter]: ... + +else: + insp_is_mapper_property = operator.attrgetter("is_property") + insp_is_mapper = operator.attrgetter("is_mapper") + insp_is_aliased_class = operator.attrgetter("is_aliased_class") + insp_is_attribute = operator.attrgetter("is_attribute") + attr_is_internal_proxy = operator.attrgetter("_is_internal_proxy") + is_collection_impl = operator.attrgetter("collection") + prop_is_relationship = operator.attrgetter("_is_relationship") + is_has_collection_adapter = operator.attrgetter( + "_is_has_collection_adapter" + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py new file mode 100644 index 00000000..33cca564 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py @@ -0,0 +1,2835 @@ +# orm/attributes.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Defines instrumentation for class attributes and their interaction +with instances. + +This module is usually not directly visible to user applications, but +defines a large part of the ORM's interactivity. + + +""" + +from __future__ import annotations + +import dataclasses +import operator +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import Iterable +from typing import List +from typing import NamedTuple +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import collections +from . import exc as orm_exc +from . import interfaces +from ._typing import insp_is_aliased_class +from .base import _DeclarativeMapped +from .base import ATTR_EMPTY +from .base import ATTR_WAS_SET +from .base import CALLABLES_OK +from .base import DEFERRED_HISTORY_LOAD +from .base import INCLUDE_PENDING_MUTATIONS # noqa +from .base import INIT_OK +from .base import instance_dict as instance_dict +from .base import instance_state as instance_state +from .base import instance_str +from .base import LOAD_AGAINST_COMMITTED +from .base import LoaderCallableStatus +from .base import manager_of_class as manager_of_class +from .base import Mapped as Mapped # noqa +from .base import NEVER_SET # noqa +from .base import NO_AUTOFLUSH +from .base import NO_CHANGE # noqa +from .base import NO_KEY +from .base import NO_RAISE +from .base import NO_VALUE +from .base import NON_PERSISTENT_OK # noqa +from .base import opt_manager_of_class as opt_manager_of_class +from .base import PASSIVE_CLASS_MISMATCH # noqa +from .base import PASSIVE_NO_FETCH +from .base import PASSIVE_NO_FETCH_RELATED # noqa +from .base import PASSIVE_NO_INITIALIZE +from .base import PASSIVE_NO_RESULT +from .base import PASSIVE_OFF +from .base import PASSIVE_ONLY_PERSISTENT +from .base import PASSIVE_RETURN_NO_VALUE +from .base import PassiveFlag +from .base import RELATED_OBJECT_OK # noqa +from .base import SQL_OK # noqa +from .base import SQLORMExpression +from .base import state_str +from .. import event +from .. import exc +from .. import inspection +from .. import util +from ..event import dispatcher +from ..event import EventTarget +from ..sql import base as sql_base +from ..sql import cache_key +from ..sql import coercions +from ..sql import roles +from ..sql import visitors +from ..sql.cache_key import HasCacheKey +from ..sql.visitors import _TraverseInternalsType +from ..sql.visitors import InternalTraversal +from ..util.typing import Literal +from ..util.typing import Self +from ..util.typing import TypeGuard + +if TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _ExternalEntityType + from ._typing import _InstanceDict + from ._typing import _InternalEntityType + from ._typing import _LoaderCallable + from ._typing import _O + from .collections import _AdaptedCollectionProtocol + from .collections import CollectionAdapter + from .interfaces import MapperProperty + from .relationships import RelationshipProperty + from .state import InstanceState + from .util import AliasedInsp + from .writeonly import WriteOnlyAttributeImpl + from ..event.base import _Dispatch + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _DMLColumnArgument + from ..sql._typing import _InfoType + from ..sql._typing import _PropagateAttrsType + from ..sql.annotation import _AnnotationDict + from ..sql.elements import ColumnElement + from ..sql.elements import Label + from ..sql.operators import OperatorType + from ..sql.selectable import FromClause + + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + + +_AllPendingType = Sequence[ + Tuple[Optional["InstanceState[Any]"], Optional[object]] +] + + +_UNKNOWN_ATTR_KEY = object() + + +@inspection._self_inspects +class QueryableAttribute( + _DeclarativeMapped[_T_co], + SQLORMExpression[_T_co], + interfaces.InspectionAttr, + interfaces.PropComparator[_T_co], + roles.JoinTargetRole, + roles.OnClauseRole, + sql_base.Immutable, + cache_key.SlotsMemoizedHasCacheKey, + util.MemoizedSlots, + EventTarget, +): + """Base class for :term:`descriptor` objects that intercept + attribute events on behalf of a :class:`.MapperProperty` + object. The actual :class:`.MapperProperty` is accessible + via the :attr:`.QueryableAttribute.property` + attribute. + + + .. seealso:: + + :class:`.InstrumentedAttribute` + + :class:`.MapperProperty` + + :attr:`_orm.Mapper.all_orm_descriptors` + + :attr:`_orm.Mapper.attrs` + """ + + __slots__ = ( + "class_", + "key", + "impl", + "comparator", + "property", + "parent", + "expression", + "_of_type", + "_extra_criteria", + "_slots_dispatch", + "_propagate_attrs", + "_doc", + ) + + is_attribute = True + + dispatch: dispatcher[QueryableAttribute[_T_co]] + + class_: _ExternalEntityType[Any] + key: str + parententity: _InternalEntityType[Any] + impl: AttributeImpl + comparator: interfaces.PropComparator[_T_co] + _of_type: Optional[_InternalEntityType[Any]] + _extra_criteria: Tuple[ColumnElement[bool], ...] + _doc: Optional[str] + + # PropComparator has a __visit_name__ to participate within + # traversals. Disambiguate the attribute vs. a comparator. + __visit_name__ = "orm_instrumented_attribute" + + def __init__( + self, + class_: _ExternalEntityType[_O], + key: str, + parententity: _InternalEntityType[_O], + comparator: interfaces.PropComparator[_T_co], + impl: Optional[AttributeImpl] = None, + of_type: Optional[_InternalEntityType[Any]] = None, + extra_criteria: Tuple[ColumnElement[bool], ...] = (), + ): + self.class_ = class_ + self.key = key + + self._parententity = self.parent = parententity + + # this attribute is non-None after mappers are set up, however in the + # interim class manager setup, there's a check for None to see if it + # needs to be populated, so we assign None here leaving the attribute + # in a temporarily not-type-correct state + self.impl = impl # type: ignore + + assert comparator is not None + self.comparator = comparator + self._of_type = of_type + self._extra_criteria = extra_criteria + self._doc = None + + manager = opt_manager_of_class(class_) + # manager is None in the case of AliasedClass + if manager: + # propagate existing event listeners from + # immediate superclass + for base in manager._bases: + if key in base: + self.dispatch._update(base[key].dispatch) + if base[key].dispatch._active_history: + self.dispatch._active_history = True # type: ignore + + _cache_key_traversal = [ + ("key", visitors.ExtendedInternalTraversal.dp_string), + ("_parententity", visitors.ExtendedInternalTraversal.dp_multi), + ("_of_type", visitors.ExtendedInternalTraversal.dp_multi), + ("_extra_criteria", visitors.InternalTraversal.dp_clauseelement_list), + ] + + def __reduce__(self) -> Any: + # this method is only used in terms of the + # sqlalchemy.ext.serializer extension + return ( + _queryable_attribute_unreduce, + ( + self.key, + self._parententity.mapper.class_, + self._parententity, + self._parententity.entity, + ), + ) + + @property + def _impl_uses_objects(self) -> bool: + return self.impl.uses_objects + + def get_history( + self, instance: Any, passive: PassiveFlag = PASSIVE_OFF + ) -> History: + return self.impl.get_history( + instance_state(instance), instance_dict(instance), passive + ) + + @property + def info(self) -> _InfoType: + """Return the 'info' dictionary for the underlying SQL element. + + The behavior here is as follows: + + * If the attribute is a column-mapped property, i.e. + :class:`.ColumnProperty`, which is mapped directly + to a schema-level :class:`_schema.Column` object, this attribute + will return the :attr:`.SchemaItem.info` dictionary associated + with the core-level :class:`_schema.Column` object. + + * If the attribute is a :class:`.ColumnProperty` but is mapped to + any other kind of SQL expression other than a + :class:`_schema.Column`, + the attribute will refer to the :attr:`.MapperProperty.info` + dictionary associated directly with the :class:`.ColumnProperty`, + assuming the SQL expression itself does not have its own ``.info`` + attribute (which should be the case, unless a user-defined SQL + construct has defined one). + + * If the attribute refers to any other kind of + :class:`.MapperProperty`, including :class:`.Relationship`, + the attribute will refer to the :attr:`.MapperProperty.info` + dictionary associated with that :class:`.MapperProperty`. + + * To access the :attr:`.MapperProperty.info` dictionary of the + :class:`.MapperProperty` unconditionally, including for a + :class:`.ColumnProperty` that's associated directly with a + :class:`_schema.Column`, the attribute can be referred to using + :attr:`.QueryableAttribute.property` attribute, as + ``MyClass.someattribute.property.info``. + + .. seealso:: + + :attr:`.SchemaItem.info` + + :attr:`.MapperProperty.info` + + """ + return self.comparator.info + + parent: _InternalEntityType[Any] + """Return an inspection instance representing the parent. + + This will be either an instance of :class:`_orm.Mapper` + or :class:`.AliasedInsp`, depending upon the nature + of the parent entity which this attribute is associated + with. + + """ + + expression: ColumnElement[_T_co] + """The SQL expression object represented by this + :class:`.QueryableAttribute`. + + This will typically be an instance of a :class:`_sql.ColumnElement` + subclass representing a column expression. + + """ + + def _memoized_attr_expression(self) -> ColumnElement[_T]: + annotations: _AnnotationDict + + # applies only to Proxy() as used by hybrid. + # currently is an exception to typing rather than feeding through + # non-string keys. + # ideally Proxy() would have a separate set of methods to deal + # with this case. + entity_namespace = self._entity_namespace + assert isinstance(entity_namespace, HasCacheKey) + + if self.key is _UNKNOWN_ATTR_KEY: + annotations = {"entity_namespace": entity_namespace} + else: + annotations = { + "proxy_key": self.key, + "proxy_owner": self._parententity, + "entity_namespace": entity_namespace, + } + + ce = self.comparator.__clause_element__() + try: + if TYPE_CHECKING: + assert isinstance(ce, ColumnElement) + anno = ce._annotate + except AttributeError as ae: + raise exc.InvalidRequestError( + 'When interpreting attribute "%s" as a SQL expression, ' + "expected __clause_element__() to return " + "a ClauseElement object, got: %r" % (self, ce) + ) from ae + else: + return anno(annotations) + + def _memoized_attr__propagate_attrs(self) -> _PropagateAttrsType: + # this suits the case in coercions where we don't actually + # call ``__clause_element__()`` but still need to get + # resolved._propagate_attrs. See #6558. + return util.immutabledict( + { + "compile_state_plugin": "orm", + "plugin_subject": self._parentmapper, + } + ) + + @property + def _entity_namespace(self) -> _InternalEntityType[Any]: + return self._parententity + + @property + def _annotations(self) -> _AnnotationDict: + return self.__clause_element__()._annotations + + def __clause_element__(self) -> ColumnElement[_T_co]: + return self.expression + + @property + def _from_objects(self) -> List[FromClause]: + return self.expression._from_objects + + def _bulk_update_tuples( + self, value: Any + ) -> Sequence[Tuple[_DMLColumnArgument, Any]]: + """Return setter tuples for a bulk UPDATE.""" + + return self.comparator._bulk_update_tuples(value) + + def adapt_to_entity(self, adapt_to_entity: AliasedInsp[Any]) -> Self: + assert not self._of_type + return self.__class__( + adapt_to_entity.entity, + self.key, + impl=self.impl, + comparator=self.comparator.adapt_to_entity(adapt_to_entity), + parententity=adapt_to_entity, + ) + + def of_type(self, entity: _EntityType[_T]) -> QueryableAttribute[_T]: + return QueryableAttribute( + self.class_, + self.key, + self._parententity, + impl=self.impl, + comparator=self.comparator.of_type(entity), + of_type=inspection.inspect(entity), + extra_criteria=self._extra_criteria, + ) + + def and_( + self, *clauses: _ColumnExpressionArgument[bool] + ) -> QueryableAttribute[bool]: + if TYPE_CHECKING: + assert isinstance(self.comparator, RelationshipProperty.Comparator) + + exprs = tuple( + coercions.expect(roles.WhereHavingRole, clause) + for clause in util.coerce_generator_arg(clauses) + ) + + return QueryableAttribute( + self.class_, + self.key, + self._parententity, + impl=self.impl, + comparator=self.comparator.and_(*exprs), + of_type=self._of_type, + extra_criteria=self._extra_criteria + exprs, + ) + + def _clone(self, **kw: Any) -> QueryableAttribute[_T]: + return QueryableAttribute( + self.class_, + self.key, + self._parententity, + impl=self.impl, + comparator=self.comparator, + of_type=self._of_type, + extra_criteria=self._extra_criteria, + ) + + def label(self, name: Optional[str]) -> Label[_T_co]: + return self.__clause_element__().label(name) + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(self.comparator, *other, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(other, self.comparator, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def hasparent( + self, state: InstanceState[Any], optimistic: bool = False + ) -> bool: + return self.impl.hasparent(state, optimistic=optimistic) is not False + + def __getattr__(self, key: str) -> Any: + try: + return util.MemoizedSlots.__getattr__(self, key) + except AttributeError: + pass + + try: + return getattr(self.comparator, key) + except AttributeError as err: + raise AttributeError( + "Neither %r object nor %r object associated with %s " + "has an attribute %r" + % ( + type(self).__name__, + type(self.comparator).__name__, + self, + key, + ) + ) from err + + def __str__(self) -> str: + return f"{self.class_.__name__}.{self.key}" + + def _memoized_attr_property(self) -> Optional[MapperProperty[Any]]: + return self.comparator.property + + +def _queryable_attribute_unreduce( + key: str, + mapped_class: Type[_O], + parententity: _InternalEntityType[_O], + entity: _ExternalEntityType[Any], +) -> Any: + # this method is only used in terms of the + # sqlalchemy.ext.serializer extension + if insp_is_aliased_class(parententity): + return entity._get_from_serialized(key, mapped_class, parententity) + else: + return getattr(entity, key) + + +class InstrumentedAttribute(QueryableAttribute[_T_co]): + """Class bound instrumented attribute which adds basic + :term:`descriptor` methods. + + See :class:`.QueryableAttribute` for a description of most features. + + + """ + + __slots__ = () + + inherit_cache = True + """:meta private:""" + + # hack to make __doc__ writeable on instances of + # InstrumentedAttribute, while still keeping classlevel + # __doc__ correct + + @util.rw_hybridproperty + def __doc__(self) -> Optional[str]: + return self._doc + + @__doc__.setter # type: ignore + def __doc__(self, value: Optional[str]) -> None: + self._doc = value + + @__doc__.classlevel # type: ignore + def __doc__(cls) -> Optional[str]: + return super().__doc__ + + def __set__(self, instance: object, value: Any) -> None: + self.impl.set( + instance_state(instance), instance_dict(instance), value, None + ) + + def __delete__(self, instance: object) -> None: + self.impl.delete(instance_state(instance), instance_dict(instance)) + + @overload + def __get__( + self, instance: None, owner: Any + ) -> InstrumentedAttribute[_T_co]: ... + + @overload + def __get__(self, instance: object, owner: Any) -> _T_co: ... + + def __get__( + self, instance: Optional[object], owner: Any + ) -> Union[InstrumentedAttribute[_T_co], _T_co]: + if instance is None: + return self + + dict_ = instance_dict(instance) + if self.impl.supports_population and self.key in dict_: + return dict_[self.key] # type: ignore[no-any-return] + else: + try: + state = instance_state(instance) + except AttributeError as err: + raise orm_exc.UnmappedInstanceError(instance) from err + return self.impl.get(state, dict_) # type: ignore[no-any-return] + + +@dataclasses.dataclass(frozen=True) +class AdHocHasEntityNamespace(HasCacheKey): + _traverse_internals: ClassVar[_TraverseInternalsType] = [ + ("_entity_namespace", InternalTraversal.dp_has_cache_key), + ] + + # py37 compat, no slots=True on dataclass + __slots__ = ("_entity_namespace",) + _entity_namespace: _InternalEntityType[Any] + is_mapper: ClassVar[bool] = False + is_aliased_class: ClassVar[bool] = False + + @property + def entity_namespace(self): + return self._entity_namespace.entity_namespace + + +def create_proxied_attribute( + descriptor: Any, +) -> Callable[..., QueryableAttribute[Any]]: + """Create an QueryableAttribute / user descriptor hybrid. + + Returns a new QueryableAttribute type that delegates descriptor + behavior and getattr() to the given descriptor. + """ + + # TODO: can move this to descriptor_props if the need for this + # function is removed from ext/hybrid.py + + class Proxy(QueryableAttribute[Any]): + """Presents the :class:`.QueryableAttribute` interface as a + proxy on top of a Python descriptor / :class:`.PropComparator` + combination. + + """ + + _extra_criteria = () + + # the attribute error catches inside of __getattr__ basically create a + # singularity if you try putting slots on this too + # __slots__ = ("descriptor", "original_property", "_comparator") + + def __init__( + self, + class_, + key, + descriptor, + comparator, + adapt_to_entity=None, + doc=None, + original_property=None, + ): + self.class_ = class_ + self.key = key + self.descriptor = descriptor + self.original_property = original_property + self._comparator = comparator + self._adapt_to_entity = adapt_to_entity + self._doc = self.__doc__ = doc + + @property + def _parententity(self): + return inspection.inspect(self.class_, raiseerr=False) + + @property + def parent(self): + return inspection.inspect(self.class_, raiseerr=False) + + _is_internal_proxy = True + + _cache_key_traversal = [ + ("key", visitors.ExtendedInternalTraversal.dp_string), + ("_parententity", visitors.ExtendedInternalTraversal.dp_multi), + ] + + @property + def _impl_uses_objects(self): + return ( + self.original_property is not None + and getattr(self.class_, self.key).impl.uses_objects + ) + + @property + def _entity_namespace(self): + if hasattr(self._comparator, "_parententity"): + return self._comparator._parententity + else: + # used by hybrid attributes which try to remain + # agnostic of any ORM concepts like mappers + return AdHocHasEntityNamespace(self._parententity) + + @property + def property(self): + return self.comparator.property + + @util.memoized_property + def comparator(self): + if callable(self._comparator): + self._comparator = self._comparator() + if self._adapt_to_entity: + self._comparator = self._comparator.adapt_to_entity( + self._adapt_to_entity + ) + return self._comparator + + def adapt_to_entity(self, adapt_to_entity): + return self.__class__( + adapt_to_entity.entity, + self.key, + self.descriptor, + self._comparator, + adapt_to_entity, + ) + + def _clone(self, **kw): + return self.__class__( + self.class_, + self.key, + self.descriptor, + self._comparator, + adapt_to_entity=self._adapt_to_entity, + original_property=self.original_property, + ) + + def __get__(self, instance, owner): + retval = self.descriptor.__get__(instance, owner) + # detect if this is a plain Python @property, which just returns + # itself for class level access. If so, then return us. + # Otherwise, return the object returned by the descriptor. + if retval is self.descriptor and instance is None: + return self + else: + return retval + + def __str__(self) -> str: + return f"{self.class_.__name__}.{self.key}" + + def __getattr__(self, attribute): + """Delegate __getattr__ to the original descriptor and/or + comparator.""" + + # this is unfortunately very complicated, and is easily prone + # to recursion overflows when implementations of related + # __getattr__ schemes are changed + + try: + return util.MemoizedSlots.__getattr__(self, attribute) + except AttributeError: + pass + + try: + return getattr(descriptor, attribute) + except AttributeError as err: + if attribute == "comparator": + raise AttributeError("comparator") from err + try: + # comparator itself might be unreachable + comparator = self.comparator + except AttributeError as err2: + raise AttributeError( + "Neither %r object nor unconfigured comparator " + "object associated with %s has an attribute %r" + % (type(descriptor).__name__, self, attribute) + ) from err2 + else: + try: + return getattr(comparator, attribute) + except AttributeError as err3: + raise AttributeError( + "Neither %r object nor %r object " + "associated with %s has an attribute %r" + % ( + type(descriptor).__name__, + type(comparator).__name__, + self, + attribute, + ) + ) from err3 + + Proxy.__name__ = type(descriptor).__name__ + "Proxy" + + util.monkeypatch_proxied_specials( + Proxy, type(descriptor), name="descriptor", from_instance=descriptor + ) + return Proxy + + +OP_REMOVE = util.symbol("REMOVE") +OP_APPEND = util.symbol("APPEND") +OP_REPLACE = util.symbol("REPLACE") +OP_BULK_REPLACE = util.symbol("BULK_REPLACE") +OP_MODIFIED = util.symbol("MODIFIED") + + +class AttributeEventToken: + """A token propagated throughout the course of a chain of attribute + events. + + Serves as an indicator of the source of the event and also provides + a means of controlling propagation across a chain of attribute + operations. + + The :class:`.Event` object is sent as the ``initiator`` argument + when dealing with events such as :meth:`.AttributeEvents.append`, + :meth:`.AttributeEvents.set`, + and :meth:`.AttributeEvents.remove`. + + The :class:`.Event` object is currently interpreted by the backref + event handlers, and is used to control the propagation of operations + across two mutually-dependent attributes. + + .. versionchanged:: 2.0 Changed the name from ``AttributeEvent`` + to ``AttributeEventToken``. + + :attribute impl: The :class:`.AttributeImpl` which is the current event + initiator. + + :attribute op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE`, + :attr:`.OP_REPLACE`, or :attr:`.OP_BULK_REPLACE`, indicating the + source operation. + + """ + + __slots__ = "impl", "op", "parent_token" + + def __init__(self, attribute_impl: AttributeImpl, op: util.symbol): + self.impl = attribute_impl + self.op = op + self.parent_token = self.impl.parent_token + + def __eq__(self, other): + return ( + isinstance(other, AttributeEventToken) + and other.impl is self.impl + and other.op == self.op + ) + + @property + def key(self): + return self.impl.key + + def hasparent(self, state): + return self.impl.hasparent(state) + + +AttributeEvent = AttributeEventToken # legacy +Event = AttributeEventToken # legacy + + +class AttributeImpl: + """internal implementation for instrumented attributes.""" + + collection: bool + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + dynamic: bool + + _is_has_collection_adapter = False + + _replace_token: AttributeEventToken + _remove_token: AttributeEventToken + _append_token: AttributeEventToken + + def __init__( + self, + class_: _ExternalEntityType[_O], + key: str, + callable_: Optional[_LoaderCallable], + dispatch: _Dispatch[QueryableAttribute[Any]], + trackparent: bool = False, + compare_function: Optional[Callable[..., bool]] = None, + active_history: bool = False, + parent_token: Optional[AttributeEventToken] = None, + load_on_unexpire: bool = True, + send_modified_events: bool = True, + accepts_scalar_loader: Optional[bool] = None, + **kwargs: Any, + ): + r"""Construct an AttributeImpl. + + :param \class_: associated class + + :param key: string name of the attribute + + :param \callable_: + optional function which generates a callable based on a parent + instance, which produces the "default" values for a scalar or + collection attribute when it's first accessed, if not present + already. + + :param trackparent: + if True, attempt to track if an instance has a parent attached + to it via this attribute. + + :param compare_function: + a function that compares two values which are normally + assignable to this attribute. + + :param active_history: + indicates that get_history() should always return the "old" value, + even if it means executing a lazy callable upon attribute change. + + :param parent_token: + Usually references the MapperProperty, used as a key for + the hasparent() function to identify an "owning" attribute. + Allows multiple AttributeImpls to all match a single + owner attribute. + + :param load_on_unexpire: + if False, don't include this attribute in a load-on-expired + operation, i.e. the "expired_attribute_loader" process. + The attribute can still be in the "expired" list and be + considered to be "expired". Previously, this flag was called + "expire_missing" and is only used by a deferred column + attribute. + + :param send_modified_events: + if False, the InstanceState._modified_event method will have no + effect; this means the attribute will never show up as changed in a + history entry. + + """ + self.class_ = class_ + self.key = key + self.callable_ = callable_ + self.dispatch = dispatch + self.trackparent = trackparent + self.parent_token = parent_token or self + self.send_modified_events = send_modified_events + if compare_function is None: + self.is_equal = operator.eq + else: + self.is_equal = compare_function + + if accepts_scalar_loader is not None: + self.accepts_scalar_loader = accepts_scalar_loader + else: + self.accepts_scalar_loader = self.default_accepts_scalar_loader + + _deferred_history = kwargs.pop("_deferred_history", False) + self._deferred_history = _deferred_history + + if active_history: + self.dispatch._active_history = True + + self.load_on_unexpire = load_on_unexpire + self._modified_token = AttributeEventToken(self, OP_MODIFIED) + + __slots__ = ( + "class_", + "key", + "callable_", + "dispatch", + "trackparent", + "parent_token", + "send_modified_events", + "is_equal", + "load_on_unexpire", + "_modified_token", + "accepts_scalar_loader", + "_deferred_history", + ) + + def __str__(self) -> str: + return f"{self.class_.__name__}.{self.key}" + + def _get_active_history(self): + """Backwards compat for impl.active_history""" + + return self.dispatch._active_history + + def _set_active_history(self, value): + self.dispatch._active_history = value + + active_history = property(_get_active_history, _set_active_history) + + def hasparent( + self, state: InstanceState[Any], optimistic: bool = False + ) -> bool: + """Return the boolean value of a `hasparent` flag attached to + the given state. + + The `optimistic` flag determines what the default return value + should be if no `hasparent` flag can be located. + + As this function is used to determine if an instance is an + *orphan*, instances that were loaded from storage should be + assumed to not be orphans, until a True/False value for this + flag is set. + + An instance attribute that is loaded by a callable function + will also not have a `hasparent` flag. + + """ + msg = "This AttributeImpl is not configured to track parents." + assert self.trackparent, msg + + return ( + state.parents.get(id(self.parent_token), optimistic) is not False + ) + + def sethasparent( + self, + state: InstanceState[Any], + parent_state: InstanceState[Any], + value: bool, + ) -> None: + """Set a boolean flag on the given item corresponding to + whether or not it is attached to a parent object via the + attribute represented by this ``InstrumentedAttribute``. + + """ + msg = "This AttributeImpl is not configured to track parents." + assert self.trackparent, msg + + id_ = id(self.parent_token) + if value: + state.parents[id_] = parent_state + else: + if id_ in state.parents: + last_parent = state.parents[id_] + + if ( + last_parent is not False + and last_parent.key != parent_state.key + ): + if last_parent.obj() is None: + raise orm_exc.StaleDataError( + "Removing state %s from parent " + "state %s along attribute '%s', " + "but the parent record " + "has gone stale, can't be sure this " + "is the most recent parent." + % ( + state_str(state), + state_str(parent_state), + self.key, + ) + ) + + return + + state.parents[id_] = False + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_OFF, + ) -> History: + raise NotImplementedError() + + def get_all_pending( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_NO_INITIALIZE, + ) -> _AllPendingType: + """Return a list of tuples of (state, obj) + for all objects in this attribute's current state + + history. + + Only applies to object-based attributes. + + This is an inlining of existing functionality + which roughly corresponds to: + + get_state_history( + state, + key, + passive=PASSIVE_NO_INITIALIZE).sum() + + """ + raise NotImplementedError() + + def _default_value( + self, state: InstanceState[Any], dict_: _InstanceDict + ) -> Any: + """Produce an empty value for an uninitialized scalar attribute.""" + + assert self.key not in dict_, ( + "_default_value should only be invoked for an " + "uninitialized or expired attribute" + ) + + value = None + for fn in self.dispatch.init_scalar: + ret = fn(state, value, dict_) + if ret is not ATTR_EMPTY: + value = ret + + return value + + def get( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_OFF, + ) -> Any: + """Retrieve a value from the given object. + If a callable is assembled on this object's attribute, and + passive is False, the callable will be executed and the + resulting value will be set as the new value for this attribute. + """ + if self.key in dict_: + return dict_[self.key] + else: + # if history present, don't load + key = self.key + if ( + key not in state.committed_state + or state.committed_state[key] is NO_VALUE + ): + if not passive & CALLABLES_OK: + return PASSIVE_NO_RESULT + + value = self._fire_loader_callables(state, key, passive) + + if value is PASSIVE_NO_RESULT or value is NO_VALUE: + return value + elif value is ATTR_WAS_SET: + try: + return dict_[key] + except KeyError as err: + # TODO: no test coverage here. + raise KeyError( + "Deferred loader for attribute " + "%r failed to populate " + "correctly" % key + ) from err + elif value is not ATTR_EMPTY: + return self.set_committed_value(state, dict_, value) + + if not passive & INIT_OK: + return NO_VALUE + else: + return self._default_value(state, dict_) + + def _fire_loader_callables( + self, state: InstanceState[Any], key: str, passive: PassiveFlag + ) -> Any: + if ( + self.accepts_scalar_loader + and self.load_on_unexpire + and key in state.expired_attributes + ): + return state._load_expired(state, passive) + elif key in state.callables: + callable_ = state.callables[key] + return callable_(state, passive) + elif self.callable_: + return self.callable_(state, passive) + else: + return ATTR_EMPTY + + def append( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PASSIVE_OFF, + ) -> None: + self.set(state, dict_, value, initiator, passive=passive) + + def remove( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PASSIVE_OFF, + ) -> None: + self.set( + state, dict_, None, initiator, passive=passive, check_old=value + ) + + def pop( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PASSIVE_OFF, + ) -> None: + self.set( + state, + dict_, + None, + initiator, + passive=passive, + check_old=value, + pop=True, + ) + + def set( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PASSIVE_OFF, + check_old: Any = None, + pop: bool = False, + ) -> None: + raise NotImplementedError() + + def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None: + raise NotImplementedError() + + def get_committed_value( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_OFF, + ) -> Any: + """return the unchanged value of this attribute""" + + if self.key in state.committed_state: + value = state.committed_state[self.key] + if value is NO_VALUE: + return None + else: + return value + else: + return self.get(state, dict_, passive=passive) + + def set_committed_value(self, state, dict_, value): + """set an attribute value on the given instance and 'commit' it.""" + + dict_[self.key] = value + state._commit(dict_, [self.key]) + return value + + +class ScalarAttributeImpl(AttributeImpl): + """represents a scalar value-holding InstrumentedAttribute.""" + + default_accepts_scalar_loader = True + uses_objects = False + supports_population = True + collection = False + dynamic = False + + __slots__ = "_replace_token", "_append_token", "_remove_token" + + def __init__(self, *arg, **kw): + super().__init__(*arg, **kw) + self._replace_token = self._append_token = AttributeEventToken( + self, OP_REPLACE + ) + self._remove_token = AttributeEventToken(self, OP_REMOVE) + + def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None: + if self.dispatch._active_history: + old = self.get(state, dict_, PASSIVE_RETURN_NO_VALUE) + else: + old = dict_.get(self.key, NO_VALUE) + + if self.dispatch.remove: + self.fire_remove_event(state, dict_, old, self._remove_token) + state._modified_event(dict_, self, old) + + existing = dict_.pop(self.key, NO_VALUE) + if ( + existing is NO_VALUE + and old is NO_VALUE + and not state.expired + and self.key not in state.expired_attributes + ): + raise AttributeError("%s object does not have a value" % self) + + def get_history( + self, + state: InstanceState[Any], + dict_: Dict[str, Any], + passive: PassiveFlag = PASSIVE_OFF, + ) -> History: + if self.key in dict_: + return History.from_scalar_attribute(self, state, dict_[self.key]) + elif self.key in state.committed_state: + return History.from_scalar_attribute(self, state, NO_VALUE) + else: + if passive & INIT_OK: + passive ^= INIT_OK + current = self.get(state, dict_, passive=passive) + if current is PASSIVE_NO_RESULT: + return HISTORY_BLANK + else: + return History.from_scalar_attribute(self, state, current) + + def set( + self, + state: InstanceState[Any], + dict_: Dict[str, Any], + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PASSIVE_OFF, + check_old: Optional[object] = None, + pop: bool = False, + ) -> None: + if self.dispatch._active_history: + old = self.get(state, dict_, PASSIVE_RETURN_NO_VALUE) + else: + old = dict_.get(self.key, NO_VALUE) + + if self.dispatch.set: + value = self.fire_replace_event( + state, dict_, value, old, initiator + ) + state._modified_event(dict_, self, old) + dict_[self.key] = value + + def fire_replace_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: _T, + previous: Any, + initiator: Optional[AttributeEventToken], + ) -> _T: + for fn in self.dispatch.set: + value = fn( + state, value, previous, initiator or self._replace_token + ) + return value + + def fire_remove_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + ) -> None: + for fn in self.dispatch.remove: + fn(state, value, initiator or self._remove_token) + + +class ScalarObjectAttributeImpl(ScalarAttributeImpl): + """represents a scalar-holding InstrumentedAttribute, + where the target object is also instrumented. + + Adds events to delete/set operations. + + """ + + default_accepts_scalar_loader = False + uses_objects = True + supports_population = True + collection = False + + __slots__ = () + + def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None: + if self.dispatch._active_history: + old = self.get( + state, + dict_, + passive=PASSIVE_ONLY_PERSISTENT + | NO_AUTOFLUSH + | LOAD_AGAINST_COMMITTED, + ) + else: + old = self.get( + state, + dict_, + passive=PASSIVE_NO_FETCH ^ INIT_OK + | LOAD_AGAINST_COMMITTED + | NO_RAISE, + ) + + self.fire_remove_event(state, dict_, old, self._remove_token) + + existing = dict_.pop(self.key, NO_VALUE) + + # if the attribute is expired, we currently have no way to tell + # that an object-attribute was expired vs. not loaded. So + # for this test, we look to see if the object has a DB identity. + if ( + existing is NO_VALUE + and old is not PASSIVE_NO_RESULT + and state.key is None + ): + raise AttributeError("%s object does not have a value" % self) + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_OFF, + ) -> History: + if self.key in dict_: + current = dict_[self.key] + else: + if passive & INIT_OK: + passive ^= INIT_OK + current = self.get(state, dict_, passive=passive) + if current is PASSIVE_NO_RESULT: + return HISTORY_BLANK + + if not self._deferred_history: + return History.from_object_attribute(self, state, current) + else: + original = state.committed_state.get(self.key, _NO_HISTORY) + if original is PASSIVE_NO_RESULT: + loader_passive = passive | ( + PASSIVE_ONLY_PERSISTENT + | NO_AUTOFLUSH + | LOAD_AGAINST_COMMITTED + | NO_RAISE + | DEFERRED_HISTORY_LOAD + ) + original = self._fire_loader_callables( + state, self.key, loader_passive + ) + return History.from_object_attribute( + self, state, current, original=original + ) + + def get_all_pending( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_NO_INITIALIZE, + ) -> _AllPendingType: + if self.key in dict_: + current = dict_[self.key] + elif passive & CALLABLES_OK: + current = self.get(state, dict_, passive=passive) + else: + return [] + + ret: _AllPendingType + + # can't use __hash__(), can't use __eq__() here + if ( + current is not None + and current is not PASSIVE_NO_RESULT + and current is not NO_VALUE + ): + ret = [(instance_state(current), current)] + else: + ret = [(None, None)] + + if self.key in state.committed_state: + original = state.committed_state[self.key] + if ( + original is not None + and original is not PASSIVE_NO_RESULT + and original is not NO_VALUE + and original is not current + ): + ret.append((instance_state(original), original)) + return ret + + def set( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PASSIVE_OFF, + check_old: Any = None, + pop: bool = False, + ) -> None: + """Set a value on the given InstanceState.""" + + if self.dispatch._active_history: + old = self.get( + state, + dict_, + passive=PASSIVE_ONLY_PERSISTENT + | NO_AUTOFLUSH + | LOAD_AGAINST_COMMITTED, + ) + else: + old = self.get( + state, + dict_, + passive=PASSIVE_NO_FETCH ^ INIT_OK + | LOAD_AGAINST_COMMITTED + | NO_RAISE, + ) + + if ( + check_old is not None + and old is not PASSIVE_NO_RESULT + and check_old is not old + ): + if pop: + return + else: + raise ValueError( + "Object %s not associated with %s on attribute '%s'" + % (instance_str(check_old), state_str(state), self.key) + ) + + value = self.fire_replace_event(state, dict_, value, old, initiator) + dict_[self.key] = value + + def fire_remove_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + ) -> None: + if self.trackparent and value not in ( + None, + PASSIVE_NO_RESULT, + NO_VALUE, + ): + self.sethasparent(instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, initiator or self._remove_token) + + state._modified_event(dict_, self, value) + + def fire_replace_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: _T, + previous: Any, + initiator: Optional[AttributeEventToken], + ) -> _T: + if self.trackparent: + if previous is not value and previous not in ( + None, + PASSIVE_NO_RESULT, + NO_VALUE, + ): + self.sethasparent(instance_state(previous), state, False) + + for fn in self.dispatch.set: + value = fn( + state, value, previous, initiator or self._replace_token + ) + + state._modified_event(dict_, self, previous) + + if self.trackparent: + if value is not None: + self.sethasparent(instance_state(value), state, True) + + return value + + +class HasCollectionAdapter: + __slots__ = () + + collection: bool + _is_has_collection_adapter = True + + def _dispose_previous_collection( + self, + state: InstanceState[Any], + collection: _AdaptedCollectionProtocol, + adapter: CollectionAdapter, + fire_event: bool, + ) -> None: + raise NotImplementedError() + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Literal[None] = ..., + passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., + ) -> CollectionAdapter: ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: _AdaptedCollectionProtocol = ..., + passive: PassiveFlag = ..., + ) -> CollectionAdapter: ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = ..., + passive: PassiveFlag = ..., + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: ... + + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: + raise NotImplementedError() + + def set( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + check_old: Any = None, + pop: bool = False, + _adapt: bool = True, + ) -> None: + raise NotImplementedError() + + +if TYPE_CHECKING: + + def _is_collection_attribute_impl( + impl: AttributeImpl, + ) -> TypeGuard[CollectionAttributeImpl]: ... + +else: + _is_collection_attribute_impl = operator.attrgetter("collection") + + +class CollectionAttributeImpl(HasCollectionAdapter, AttributeImpl): + """A collection-holding attribute that instruments changes in membership. + + Only handles collections of instrumented objects. + + InstrumentedCollectionAttribute holds an arbitrary, user-specified + container object (defaulting to a list) and brokers access to the + CollectionAdapter, a "view" onto that object that presents consistent bag + semantics to the orm layer independent of the user data implementation. + + """ + + uses_objects = True + collection = True + default_accepts_scalar_loader = False + supports_population = True + dynamic = False + + _bulk_replace_token: AttributeEventToken + + __slots__ = ( + "copy", + "collection_factory", + "_append_token", + "_remove_token", + "_bulk_replace_token", + "_duck_typed_as", + ) + + def __init__( + self, + class_, + key, + callable_, + dispatch, + typecallable=None, + trackparent=False, + copy_function=None, + compare_function=None, + **kwargs, + ): + super().__init__( + class_, + key, + callable_, + dispatch, + trackparent=trackparent, + compare_function=compare_function, + **kwargs, + ) + + if copy_function is None: + copy_function = self.__copy + self.copy = copy_function + self.collection_factory = typecallable + self._append_token = AttributeEventToken(self, OP_APPEND) + self._remove_token = AttributeEventToken(self, OP_REMOVE) + self._bulk_replace_token = AttributeEventToken(self, OP_BULK_REPLACE) + self._duck_typed_as = util.duck_type_collection( + self.collection_factory() + ) + + if getattr(self.collection_factory, "_sa_linker", None): + + @event.listens_for(self, "init_collection") + def link(target, collection, collection_adapter): + collection._sa_linker(collection_adapter) + + @event.listens_for(self, "dispose_collection") + def unlink(target, collection, collection_adapter): + collection._sa_linker(None) + + def __copy(self, item): + return [y for y in collections.collection_adapter(item)] + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_OFF, + ) -> History: + current = self.get(state, dict_, passive=passive) + + if current is PASSIVE_NO_RESULT: + if ( + passive & PassiveFlag.INCLUDE_PENDING_MUTATIONS + and self.key in state._pending_mutations + ): + pending = state._pending_mutations[self.key] + return pending.merge_with_history(HISTORY_BLANK) + else: + return HISTORY_BLANK + else: + if passive & PassiveFlag.INCLUDE_PENDING_MUTATIONS: + # this collection is loaded / present. should not be any + # pending mutations + assert self.key not in state._pending_mutations + + return History.from_collection(self, state, current) + + def get_all_pending( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PASSIVE_NO_INITIALIZE, + ) -> _AllPendingType: + # NOTE: passive is ignored here at the moment + + if self.key not in dict_: + return [] + + current = dict_[self.key] + current = getattr(current, "_sa_adapter") + + if self.key in state.committed_state: + original = state.committed_state[self.key] + if original is not NO_VALUE: + current_states = [ + ((c is not None) and instance_state(c) or None, c) + for c in current + ] + original_states = [ + ((c is not None) and instance_state(c) or None, c) + for c in original + ] + + current_set = dict(current_states) + original_set = dict(original_states) + + return ( + [ + (s, o) + for s, o in current_states + if s not in original_set + ] + + [(s, o) for s, o in current_states if s in original_set] + + [ + (s, o) + for s, o in original_states + if s not in current_set + ] + ) + + return [(instance_state(o), o) for o in current] + + def fire_append_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: _T, + initiator: Optional[AttributeEventToken], + key: Optional[Any], + ) -> _T: + for fn in self.dispatch.append: + value = fn(state, value, initiator or self._append_token, key=key) + + state._modified_event(dict_, self, NO_VALUE, True) + + if self.trackparent and value is not None: + self.sethasparent(instance_state(value), state, True) + + return value + + def fire_append_wo_mutation_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: _T, + initiator: Optional[AttributeEventToken], + key: Optional[Any], + ) -> _T: + for fn in self.dispatch.append_wo_mutation: + value = fn(state, value, initiator or self._append_token, key=key) + + return value + + def fire_pre_remove_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + initiator: Optional[AttributeEventToken], + key: Optional[Any], + ) -> None: + """A special event used for pop() operations. + + The "remove" event needs to have the item to be removed passed to + it, which in the case of pop from a set, we don't have a way to access + the item before the operation. the event is used for all pop() + operations (even though set.pop is the one where it is really needed). + + """ + state._modified_event(dict_, self, NO_VALUE, True) + + def fire_remove_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + key: Optional[Any], + ) -> None: + if self.trackparent and value is not None: + self.sethasparent(instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, initiator or self._remove_token, key=key) + + state._modified_event(dict_, self, NO_VALUE, True) + + def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None: + if self.key not in dict_: + return + + state._modified_event(dict_, self, NO_VALUE, True) + + collection = self.get_collection(state, state.dict) + collection.clear_with_event() + + # key is always present because we checked above. e.g. + # del is a no-op if collection not present. + del dict_[self.key] + + def _default_value( + self, state: InstanceState[Any], dict_: _InstanceDict + ) -> _AdaptedCollectionProtocol: + """Produce an empty collection for an un-initialized attribute""" + + assert self.key not in dict_, ( + "_default_value should only be invoked for an " + "uninitialized or expired attribute" + ) + + if self.key in state._empty_collections: + return state._empty_collections[self.key] + + adapter, user_data = self._initialize_collection(state) + adapter._set_empty(user_data) + return user_data + + def _initialize_collection( + self, state: InstanceState[Any] + ) -> Tuple[CollectionAdapter, _AdaptedCollectionProtocol]: + adapter, collection = state.manager.initialize_collection( + self.key, state, self.collection_factory + ) + + self.dispatch.init_collection(state, collection, adapter) + + return adapter, collection + + def append( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PASSIVE_OFF, + ) -> None: + collection = self.get_collection( + state, dict_, user_data=None, passive=passive + ) + if collection is PASSIVE_NO_RESULT: + value = self.fire_append_event( + state, dict_, value, initiator, key=NO_KEY + ) + assert ( + self.key not in dict_ + ), "Collection was loaded during event handling." + state._get_pending_mutation(self.key).append(value) + else: + if TYPE_CHECKING: + assert isinstance(collection, CollectionAdapter) + collection.append_with_event(value, initiator) + + def remove( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PASSIVE_OFF, + ) -> None: + collection = self.get_collection( + state, state.dict, user_data=None, passive=passive + ) + if collection is PASSIVE_NO_RESULT: + self.fire_remove_event(state, dict_, value, initiator, key=NO_KEY) + assert ( + self.key not in dict_ + ), "Collection was loaded during event handling." + state._get_pending_mutation(self.key).remove(value) + else: + if TYPE_CHECKING: + assert isinstance(collection, CollectionAdapter) + collection.remove_with_event(value, initiator) + + def pop( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PASSIVE_OFF, + ) -> None: + try: + # TODO: better solution here would be to add + # a "popper" role to collections.py to complement + # "remover". + self.remove(state, dict_, value, initiator, passive=passive) + except (ValueError, KeyError, IndexError): + pass + + def set( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + check_old: Any = None, + pop: bool = False, + _adapt: bool = True, + ) -> None: + iterable = orig_iterable = value + new_keys = None + + # pulling a new collection first so that an adaptation exception does + # not trigger a lazy load of the old collection. + new_collection, user_data = self._initialize_collection(state) + if _adapt: + if new_collection._converter is not None: + iterable = new_collection._converter(iterable) + else: + setting_type = util.duck_type_collection(iterable) + receiving_type = self._duck_typed_as + + if setting_type is not receiving_type: + given = ( + iterable is None + and "None" + or iterable.__class__.__name__ + ) + wanted = self._duck_typed_as.__name__ + raise TypeError( + "Incompatible collection type: %s is not %s-like" + % (given, wanted) + ) + + # If the object is an adapted collection, return the (iterable) + # adapter. + if hasattr(iterable, "_sa_iterator"): + iterable = iterable._sa_iterator() + elif setting_type is dict: + new_keys = list(iterable) + iterable = iterable.values() + else: + iterable = iter(iterable) + elif util.duck_type_collection(iterable) is dict: + new_keys = list(value) + + new_values = list(iterable) + + evt = self._bulk_replace_token + + self.dispatch.bulk_replace(state, new_values, evt, keys=new_keys) + + # propagate NO_RAISE in passive through to the get() for the + # existing object (ticket #8862) + old = self.get( + state, + dict_, + passive=PASSIVE_ONLY_PERSISTENT ^ (passive & PassiveFlag.NO_RAISE), + ) + if old is PASSIVE_NO_RESULT: + old = self._default_value(state, dict_) + elif old is orig_iterable: + # ignore re-assignment of the current collection, as happens + # implicitly with in-place operators (foo.collection |= other) + return + + # place a copy of "old" in state.committed_state + state._modified_event(dict_, self, old, True) + + old_collection = old._sa_adapter + + dict_[self.key] = user_data + + collections.bulk_replace( + new_values, old_collection, new_collection, initiator=evt + ) + + self._dispose_previous_collection(state, old, old_collection, True) + + def _dispose_previous_collection( + self, + state: InstanceState[Any], + collection: _AdaptedCollectionProtocol, + adapter: CollectionAdapter, + fire_event: bool, + ) -> None: + del collection._sa_adapter + + # discarding old collection make sure it is not referenced in empty + # collections. + state._empty_collections.pop(self.key, None) + if fire_event: + self.dispatch.dispose_collection(state, collection, adapter) + + def _invalidate_collection( + self, collection: _AdaptedCollectionProtocol + ) -> None: + adapter = getattr(collection, "_sa_adapter") + adapter.invalidated = True + + def set_committed_value( + self, state: InstanceState[Any], dict_: _InstanceDict, value: Any + ) -> _AdaptedCollectionProtocol: + """Set an attribute value on the given instance and 'commit' it.""" + + collection, user_data = self._initialize_collection(state) + + if value: + collection.append_multiple_without_event(value) + + state.dict[self.key] = user_data + + state._commit(dict_, [self.key]) + + if self.key in state._pending_mutations: + # pending items exist. issue a modified event, + # add/remove new items. + state._modified_event(dict_, self, user_data, True) + + pending = state._pending_mutations.pop(self.key) + added = pending.added_items + removed = pending.deleted_items + for item in added: + collection.append_without_event(item) + for item in removed: + collection.remove_without_event(item) + + return user_data + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Literal[None] = ..., + passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., + ) -> CollectionAdapter: ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: _AdaptedCollectionProtocol = ..., + passive: PassiveFlag = ..., + ) -> CollectionAdapter: ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = ..., + passive: PassiveFlag = PASSIVE_OFF, + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: ... + + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = None, + passive: PassiveFlag = PASSIVE_OFF, + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: + """Retrieve the CollectionAdapter associated with the given state. + + if user_data is None, retrieves it from the state using normal + "get()" rules, which will fire lazy callables or return the "empty" + collection value. + + """ + if user_data is None: + fetch_user_data = self.get(state, dict_, passive=passive) + if fetch_user_data is LoaderCallableStatus.PASSIVE_NO_RESULT: + return fetch_user_data + else: + user_data = cast("_AdaptedCollectionProtocol", fetch_user_data) + + return user_data._sa_adapter + + +def backref_listeners( + attribute: QueryableAttribute[Any], key: str, uselist: bool +) -> None: + """Apply listeners to synchronize a two-way relationship.""" + + # use easily recognizable names for stack traces. + + # in the sections marked "tokens to test for a recursive loop", + # this is somewhat brittle and very performance-sensitive logic + # that is specific to how we might arrive at each event. a marker + # that can target us directly to arguments being invoked against + # the impl might be simpler, but could interfere with other systems. + + parent_token = attribute.impl.parent_token + parent_impl = attribute.impl + + def _acceptable_key_err(child_state, initiator, child_impl): + raise ValueError( + "Bidirectional attribute conflict detected: " + 'Passing object %s to attribute "%s" ' + 'triggers a modify event on attribute "%s" ' + 'via the backref "%s".' + % ( + state_str(child_state), + initiator.parent_token, + child_impl.parent_token, + attribute.impl.parent_token, + ) + ) + + def emit_backref_from_scalar_set_event( + state, child, oldchild, initiator, **kw + ): + if oldchild is child: + return child + if ( + oldchild is not None + and oldchild is not PASSIVE_NO_RESULT + and oldchild is not NO_VALUE + ): + # With lazy=None, there's no guarantee that the full collection is + # present when updating via a backref. + old_state, old_dict = ( + instance_state(oldchild), + instance_dict(oldchild), + ) + impl = old_state.manager[key].impl + + # tokens to test for a recursive loop. + if not impl.collection and not impl.dynamic: + check_recursive_token = impl._replace_token + else: + check_recursive_token = impl._remove_token + + if initiator is not check_recursive_token: + impl.pop( + old_state, + old_dict, + state.obj(), + parent_impl._append_token, + passive=PASSIVE_NO_FETCH, + ) + + if child is not None: + child_state, child_dict = ( + instance_state(child), + instance_dict(child), + ) + child_impl = child_state.manager[key].impl + + if ( + initiator.parent_token is not parent_token + and initiator.parent_token is not child_impl.parent_token + ): + _acceptable_key_err(state, initiator, child_impl) + + # tokens to test for a recursive loop. + check_append_token = child_impl._append_token + check_bulk_replace_token = ( + child_impl._bulk_replace_token + if _is_collection_attribute_impl(child_impl) + else None + ) + + if ( + initiator is not check_append_token + and initiator is not check_bulk_replace_token + ): + child_impl.append( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH, + ) + return child + + def emit_backref_from_collection_append_event( + state, child, initiator, **kw + ): + if child is None: + return + + child_state, child_dict = instance_state(child), instance_dict(child) + child_impl = child_state.manager[key].impl + + if ( + initiator.parent_token is not parent_token + and initiator.parent_token is not child_impl.parent_token + ): + _acceptable_key_err(state, initiator, child_impl) + + # tokens to test for a recursive loop. + check_append_token = child_impl._append_token + check_bulk_replace_token = ( + child_impl._bulk_replace_token + if _is_collection_attribute_impl(child_impl) + else None + ) + + if ( + initiator is not check_append_token + and initiator is not check_bulk_replace_token + ): + child_impl.append( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH, + ) + return child + + def emit_backref_from_collection_remove_event( + state, child, initiator, **kw + ): + if ( + child is not None + and child is not PASSIVE_NO_RESULT + and child is not NO_VALUE + ): + child_state, child_dict = ( + instance_state(child), + instance_dict(child), + ) + child_impl = child_state.manager[key].impl + + check_replace_token: Optional[AttributeEventToken] + + # tokens to test for a recursive loop. + if not child_impl.collection and not child_impl.dynamic: + check_remove_token = child_impl._remove_token + check_replace_token = child_impl._replace_token + check_for_dupes_on_remove = uselist and not parent_impl.dynamic + else: + check_remove_token = child_impl._remove_token + check_replace_token = ( + child_impl._bulk_replace_token + if _is_collection_attribute_impl(child_impl) + else None + ) + check_for_dupes_on_remove = False + + if ( + initiator is not check_remove_token + and initiator is not check_replace_token + ): + if not check_for_dupes_on_remove or not util.has_dupes( + # when this event is called, the item is usually + # present in the list, except for a pop() operation. + state.dict[parent_impl.key], + child, + ): + child_impl.pop( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH, + ) + + if uselist: + event.listen( + attribute, + "append", + emit_backref_from_collection_append_event, + retval=True, + raw=True, + include_key=True, + ) + else: + event.listen( + attribute, + "set", + emit_backref_from_scalar_set_event, + retval=True, + raw=True, + include_key=True, + ) + # TODO: need coverage in test/orm/ of remove event + event.listen( + attribute, + "remove", + emit_backref_from_collection_remove_event, + retval=True, + raw=True, + include_key=True, + ) + + +_NO_HISTORY = util.symbol("NO_HISTORY") +_NO_STATE_SYMBOLS = frozenset([id(PASSIVE_NO_RESULT), id(NO_VALUE)]) + + +class History(NamedTuple): + """A 3-tuple of added, unchanged and deleted values, + representing the changes which have occurred on an instrumented + attribute. + + The easiest way to get a :class:`.History` object for a particular + attribute on an object is to use the :func:`_sa.inspect` function:: + + from sqlalchemy import inspect + + hist = inspect(myobject).attrs.myattribute.history + + Each tuple member is an iterable sequence: + + * ``added`` - the collection of items added to the attribute (the first + tuple element). + + * ``unchanged`` - the collection of items that have not changed on the + attribute (the second tuple element). + + * ``deleted`` - the collection of items that have been removed from the + attribute (the third tuple element). + + """ + + added: Union[Tuple[()], List[Any]] + unchanged: Union[Tuple[()], List[Any]] + deleted: Union[Tuple[()], List[Any]] + + def __bool__(self) -> bool: + return self != HISTORY_BLANK + + def empty(self) -> bool: + """Return True if this :class:`.History` has no changes + and no existing, unchanged state. + + """ + + return not bool((self.added or self.deleted) or self.unchanged) + + def sum(self) -> Sequence[Any]: + """Return a collection of added + unchanged + deleted.""" + + return ( + (self.added or []) + (self.unchanged or []) + (self.deleted or []) + ) + + def non_deleted(self) -> Sequence[Any]: + """Return a collection of added + unchanged.""" + + return (self.added or []) + (self.unchanged or []) + + def non_added(self) -> Sequence[Any]: + """Return a collection of unchanged + deleted.""" + + return (self.unchanged or []) + (self.deleted or []) + + def has_changes(self) -> bool: + """Return True if this :class:`.History` has changes.""" + + return bool(self.added or self.deleted) + + def _merge(self, added: Iterable[Any], deleted: Iterable[Any]) -> History: + return History( + list(self.added) + list(added), + self.unchanged, + list(self.deleted) + list(deleted), + ) + + def as_state(self) -> History: + return History( + [ + (c is not None) and instance_state(c) or None + for c in self.added + ], + [ + (c is not None) and instance_state(c) or None + for c in self.unchanged + ], + [ + (c is not None) and instance_state(c) or None + for c in self.deleted + ], + ) + + @classmethod + def from_scalar_attribute( + cls, + attribute: ScalarAttributeImpl, + state: InstanceState[Any], + current: Any, + ) -> History: + original = state.committed_state.get(attribute.key, _NO_HISTORY) + + deleted: Union[Tuple[()], List[Any]] + + if original is _NO_HISTORY: + if current is NO_VALUE: + return cls((), (), ()) + else: + return cls((), [current], ()) + # don't let ClauseElement expressions here trip things up + elif ( + current is not NO_VALUE + and attribute.is_equal(current, original) is True + ): + return cls((), [current], ()) + else: + # current convention on native scalars is to not + # include information + # about missing previous value in "deleted", but + # we do include None, which helps in some primary + # key situations + if id(original) in _NO_STATE_SYMBOLS: + deleted = () + # indicate a "del" operation occurred when we don't have + # the previous value as: ([None], (), ()) + if id(current) in _NO_STATE_SYMBOLS: + current = None + else: + deleted = [original] + if current is NO_VALUE: + return cls((), (), deleted) + else: + return cls([current], (), deleted) + + @classmethod + def from_object_attribute( + cls, + attribute: ScalarObjectAttributeImpl, + state: InstanceState[Any], + current: Any, + original: Any = _NO_HISTORY, + ) -> History: + deleted: Union[Tuple[()], List[Any]] + + if original is _NO_HISTORY: + original = state.committed_state.get(attribute.key, _NO_HISTORY) + + if original is _NO_HISTORY: + if current is NO_VALUE: + return cls((), (), ()) + else: + return cls((), [current], ()) + elif current is original and current is not NO_VALUE: + return cls((), [current], ()) + else: + # current convention on related objects is to not + # include information + # about missing previous value in "deleted", and + # to also not include None - the dependency.py rules + # ignore the None in any case. + if id(original) in _NO_STATE_SYMBOLS or original is None: + deleted = () + # indicate a "del" operation occurred when we don't have + # the previous value as: ([None], (), ()) + if id(current) in _NO_STATE_SYMBOLS: + current = None + else: + deleted = [original] + if current is NO_VALUE: + return cls((), (), deleted) + else: + return cls([current], (), deleted) + + @classmethod + def from_collection( + cls, + attribute: CollectionAttributeImpl, + state: InstanceState[Any], + current: Any, + ) -> History: + original = state.committed_state.get(attribute.key, _NO_HISTORY) + if current is NO_VALUE: + return cls((), (), ()) + + current = getattr(current, "_sa_adapter") + if original is NO_VALUE: + return cls(list(current), (), ()) + elif original is _NO_HISTORY: + return cls((), list(current), ()) + else: + current_states = [ + ((c is not None) and instance_state(c) or None, c) + for c in current + ] + original_states = [ + ((c is not None) and instance_state(c) or None, c) + for c in original + ] + + current_set = dict(current_states) + original_set = dict(original_states) + + return cls( + [o for s, o in current_states if s not in original_set], + [o for s, o in current_states if s in original_set], + [o for s, o in original_states if s not in current_set], + ) + + +HISTORY_BLANK = History((), (), ()) + + +def get_history( + obj: object, key: str, passive: PassiveFlag = PASSIVE_OFF +) -> History: + """Return a :class:`.History` record for the given object + and attribute key. + + This is the **pre-flush** history for a given attribute, which is + reset each time the :class:`.Session` flushes changes to the + current database transaction. + + .. note:: + + Prefer to use the :attr:`.AttributeState.history` and + :meth:`.AttributeState.load_history` accessors to retrieve the + :class:`.History` for instance attributes. + + + :param obj: an object whose class is instrumented by the + attributes package. + + :param key: string attribute name. + + :param passive: indicates loading behavior for the attribute + if the value is not already present. This is a + bitflag attribute, which defaults to the symbol + :attr:`.PASSIVE_OFF` indicating all necessary SQL + should be emitted. + + .. seealso:: + + :attr:`.AttributeState.history` + + :meth:`.AttributeState.load_history` - retrieve history + using loader callables if the value is not locally present. + + """ + + return get_state_history(instance_state(obj), key, passive) + + +def get_state_history( + state: InstanceState[Any], key: str, passive: PassiveFlag = PASSIVE_OFF +) -> History: + return state.get_history(key, passive) + + +def has_parent( + cls: Type[_O], obj: _O, key: str, optimistic: bool = False +) -> bool: + """TODO""" + manager = manager_of_class(cls) + state = instance_state(obj) + return manager.has_parent(state, key, optimistic) + + +def register_attribute( + class_: Type[_O], + key: str, + *, + comparator: interfaces.PropComparator[_T], + parententity: _InternalEntityType[_O], + doc: Optional[str] = None, + **kw: Any, +) -> InstrumentedAttribute[_T]: + desc = register_descriptor( + class_, key, comparator=comparator, parententity=parententity, doc=doc + ) + register_attribute_impl(class_, key, **kw) + return desc + + +def register_attribute_impl( + class_: Type[_O], + key: str, + uselist: bool = False, + callable_: Optional[_LoaderCallable] = None, + useobject: bool = False, + impl_class: Optional[Type[AttributeImpl]] = None, + backref: Optional[str] = None, + **kw: Any, +) -> QueryableAttribute[Any]: + manager = manager_of_class(class_) + if uselist: + factory = kw.pop("typecallable", None) + typecallable = manager.instrument_collection_class( + key, factory or list + ) + else: + typecallable = kw.pop("typecallable", None) + + dispatch = cast( + "_Dispatch[QueryableAttribute[Any]]", manager[key].dispatch + ) # noqa: E501 + + impl: AttributeImpl + + if impl_class: + # TODO: this appears to be the WriteOnlyAttributeImpl / + # DynamicAttributeImpl constructor which is hardcoded + impl = cast("Type[WriteOnlyAttributeImpl]", impl_class)( + class_, key, dispatch, **kw + ) + elif uselist: + impl = CollectionAttributeImpl( + class_, key, callable_, dispatch, typecallable=typecallable, **kw + ) + elif useobject: + impl = ScalarObjectAttributeImpl( + class_, key, callable_, dispatch, **kw + ) + else: + impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw) + + manager[key].impl = impl + + if backref: + backref_listeners(manager[key], backref, uselist) + + manager.post_configure_attribute(key) + return manager[key] + + +def register_descriptor( + class_: Type[Any], + key: str, + *, + comparator: interfaces.PropComparator[_T], + parententity: _InternalEntityType[Any], + doc: Optional[str] = None, +) -> InstrumentedAttribute[_T]: + manager = manager_of_class(class_) + + descriptor = InstrumentedAttribute( + class_, key, comparator=comparator, parententity=parententity + ) + + descriptor.__doc__ = doc # type: ignore + + manager.instrument_attribute(key, descriptor) + return descriptor + + +def unregister_attribute(class_: Type[Any], key: str) -> None: + manager_of_class(class_).uninstrument_attribute(key) + + +def init_collection(obj: object, key: str) -> CollectionAdapter: + """Initialize a collection attribute and return the collection adapter. + + This function is used to provide direct access to collection internals + for a previously unloaded attribute. e.g.:: + + collection_adapter = init_collection(someobject, 'elements') + for elem in values: + collection_adapter.append_without_event(elem) + + For an easier way to do the above, see + :func:`~sqlalchemy.orm.attributes.set_committed_value`. + + :param obj: a mapped object + + :param key: string attribute name where the collection is located. + + """ + state = instance_state(obj) + dict_ = state.dict + return init_state_collection(state, dict_, key) + + +def init_state_collection( + state: InstanceState[Any], dict_: _InstanceDict, key: str +) -> CollectionAdapter: + """Initialize a collection attribute and return the collection adapter. + + Discards any existing collection which may be there. + + """ + attr = state.manager[key].impl + + if TYPE_CHECKING: + assert isinstance(attr, HasCollectionAdapter) + + old = dict_.pop(key, None) # discard old collection + if old is not None: + old_collection = old._sa_adapter + attr._dispose_previous_collection(state, old, old_collection, False) + + user_data = attr._default_value(state, dict_) + adapter: CollectionAdapter = attr.get_collection( + state, dict_, user_data, passive=PassiveFlag.PASSIVE_NO_FETCH + ) + adapter._reset_empty() + + return adapter + + +def set_committed_value(instance, key, value): + """Set the value of an attribute with no history events. + + Cancels any previous history present. The value should be + a scalar value for scalar-holding attributes, or + an iterable for any collection-holding attribute. + + This is the same underlying method used when a lazy loader + fires off and loads additional data from the database. + In particular, this method can be used by application code + which has loaded additional attributes or collections through + separate queries, which can then be attached to an instance + as though it were part of its original loaded state. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + state.manager[key].impl.set_committed_value(state, dict_, value) + + +def set_attribute( + instance: object, + key: str, + value: Any, + initiator: Optional[AttributeEventToken] = None, +) -> None: + """Set the value of an attribute, firing history events. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + Custom attribute management schemes will need to make usage + of this method to establish attribute state as understood + by SQLAlchemy. + + :param instance: the object that will be modified + + :param key: string name of the attribute + + :param value: value to assign + + :param initiator: an instance of :class:`.Event` that would have + been propagated from a previous event listener. This argument + is used when the :func:`.set_attribute` function is being used within + an existing event listening function where an :class:`.Event` object + is being supplied; the object may be used to track the origin of the + chain of events. + + .. versionadded:: 1.2.3 + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + state.manager[key].impl.set(state, dict_, value, initiator) + + +def get_attribute(instance: object, key: str) -> Any: + """Get the value of an attribute, firing any callables required. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + Custom attribute management schemes will need to make usage + of this method to make usage of attribute state as understood + by SQLAlchemy. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + return state.manager[key].impl.get(state, dict_) + + +def del_attribute(instance: object, key: str) -> None: + """Delete the value of an attribute, firing history events. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + Custom attribute management schemes will need to make usage + of this method to establish attribute state as understood + by SQLAlchemy. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + state.manager[key].impl.delete(state, dict_) + + +def flag_modified(instance: object, key: str) -> None: + """Mark an attribute on an instance as 'modified'. + + This sets the 'modified' flag on the instance and + establishes an unconditional change event for the given attribute. + The attribute must have a value present, else an + :class:`.InvalidRequestError` is raised. + + To mark an object "dirty" without referring to any specific attribute + so that it is considered within a flush, use the + :func:`.attributes.flag_dirty` call. + + .. seealso:: + + :func:`.attributes.flag_dirty` + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + impl = state.manager[key].impl + impl.dispatch.modified(state, impl._modified_token) + state._modified_event(dict_, impl, NO_VALUE, is_userland=True) + + +def flag_dirty(instance: object) -> None: + """Mark an instance as 'dirty' without any specific attribute mentioned. + + This is a special operation that will allow the object to travel through + the flush process for interception by events such as + :meth:`.SessionEvents.before_flush`. Note that no SQL will be emitted in + the flush process for an object that has no changes, even if marked dirty + via this method. However, a :meth:`.SessionEvents.before_flush` handler + will be able to see the object in the :attr:`.Session.dirty` collection and + may establish changes on it, which will then be included in the SQL + emitted. + + .. versionadded:: 1.2 + + .. seealso:: + + :func:`.attributes.flag_modified` + + """ + + state, dict_ = instance_state(instance), instance_dict(instance) + state._modified_event(dict_, None, NO_VALUE, is_userland=True) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py new file mode 100644 index 00000000..c9005298 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py @@ -0,0 +1,971 @@ +# orm/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Constants and rudimental functions used throughout the ORM. + +""" + +from __future__ import annotations + +from enum import Enum +import operator +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import no_type_check +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import exc +from ._typing import insp_is_mapper +from .. import exc as sa_exc +from .. import inspection +from .. import util +from ..sql import roles +from ..sql.elements import SQLColumnExpression +from ..sql.elements import SQLCoreOperations +from ..util import FastIntFlag +from ..util.langhelpers import TypingOnly +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _ExternalEntityType + from ._typing import _InternalEntityType + from .attributes import InstrumentedAttribute + from .dynamic import AppenderQuery + from .instrumentation import ClassManager + from .interfaces import PropComparator + from .mapper import Mapper + from .state import InstanceState + from .util import AliasedClass + from .writeonly import WriteOnlyCollection + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _InfoType + from ..sql.elements import ColumnElement + from ..sql.operators import OperatorType + +_T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + +_O = TypeVar("_O", bound=object) + + +class LoaderCallableStatus(Enum): + PASSIVE_NO_RESULT = 0 + """Symbol returned by a loader callable or other attribute/history + retrieval operation when a value could not be determined, based + on loader callable flags. + """ + + PASSIVE_CLASS_MISMATCH = 1 + """Symbol indicating that an object is locally present for a given + primary key identity but it is not of the requested class. The + return value is therefore None and no SQL should be emitted.""" + + ATTR_WAS_SET = 2 + """Symbol returned by a loader callable to indicate the + retrieved value, or values, were assigned to their attributes + on the target object. + """ + + ATTR_EMPTY = 3 + """Symbol used internally to indicate an attribute had no callable.""" + + NO_VALUE = 4 + """Symbol which may be placed as the 'previous' value of an attribute, + indicating no value was loaded for an attribute when it was modified, + and flags indicated we were not to load it. + """ + + NEVER_SET = NO_VALUE + """ + Synonymous with NO_VALUE + + .. versionchanged:: 1.4 NEVER_SET was merged with NO_VALUE + + """ + + +( + PASSIVE_NO_RESULT, + PASSIVE_CLASS_MISMATCH, + ATTR_WAS_SET, + ATTR_EMPTY, + NO_VALUE, +) = tuple(LoaderCallableStatus) + +NEVER_SET = NO_VALUE + + +class PassiveFlag(FastIntFlag): + """Bitflag interface that passes options onto loader callables""" + + NO_CHANGE = 0 + """No callables or SQL should be emitted on attribute access + and no state should change + """ + + CALLABLES_OK = 1 + """Loader callables can be fired off if a value + is not present. + """ + + SQL_OK = 2 + """Loader callables can emit SQL at least on scalar value attributes.""" + + RELATED_OBJECT_OK = 4 + """Callables can use SQL to load related objects as well + as scalar value attributes. + """ + + INIT_OK = 8 + """Attributes should be initialized with a blank + value (None or an empty collection) upon get, if no other + value can be obtained. + """ + + NON_PERSISTENT_OK = 16 + """Callables can be emitted if the parent is not persistent.""" + + LOAD_AGAINST_COMMITTED = 32 + """Callables should use committed values as primary/foreign keys during a + load. + """ + + NO_AUTOFLUSH = 64 + """Loader callables should disable autoflush.""", + + NO_RAISE = 128 + """Loader callables should not raise any assertions""" + + DEFERRED_HISTORY_LOAD = 256 + """indicates special load of the previous value of an attribute""" + + INCLUDE_PENDING_MUTATIONS = 512 + + # pre-packaged sets of flags used as inputs + PASSIVE_OFF = ( + RELATED_OBJECT_OK | NON_PERSISTENT_OK | INIT_OK | CALLABLES_OK | SQL_OK + ) + "Callables can be emitted in all cases." + + PASSIVE_RETURN_NO_VALUE = PASSIVE_OFF ^ INIT_OK + """PASSIVE_OFF ^ INIT_OK""" + + PASSIVE_NO_INITIALIZE = PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK + "PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK" + + PASSIVE_NO_FETCH = PASSIVE_OFF ^ SQL_OK + "PASSIVE_OFF ^ SQL_OK" + + PASSIVE_NO_FETCH_RELATED = PASSIVE_OFF ^ RELATED_OBJECT_OK + "PASSIVE_OFF ^ RELATED_OBJECT_OK" + + PASSIVE_ONLY_PERSISTENT = PASSIVE_OFF ^ NON_PERSISTENT_OK + "PASSIVE_OFF ^ NON_PERSISTENT_OK" + + PASSIVE_MERGE = PASSIVE_OFF | NO_RAISE + """PASSIVE_OFF | NO_RAISE + + Symbol used specifically for session.merge() and similar cases + + """ + + +( + NO_CHANGE, + CALLABLES_OK, + SQL_OK, + RELATED_OBJECT_OK, + INIT_OK, + NON_PERSISTENT_OK, + LOAD_AGAINST_COMMITTED, + NO_AUTOFLUSH, + NO_RAISE, + DEFERRED_HISTORY_LOAD, + INCLUDE_PENDING_MUTATIONS, + PASSIVE_OFF, + PASSIVE_RETURN_NO_VALUE, + PASSIVE_NO_INITIALIZE, + PASSIVE_NO_FETCH, + PASSIVE_NO_FETCH_RELATED, + PASSIVE_ONLY_PERSISTENT, + PASSIVE_MERGE, +) = PassiveFlag.__members__.values() + +DEFAULT_MANAGER_ATTR = "_sa_class_manager" +DEFAULT_STATE_ATTR = "_sa_instance_state" + + +class EventConstants(Enum): + EXT_CONTINUE = 1 + EXT_STOP = 2 + EXT_SKIP = 3 + NO_KEY = 4 + """indicates an :class:`.AttributeEvent` event that did not have any + key argument. + + .. versionadded:: 2.0 + + """ + + +EXT_CONTINUE, EXT_STOP, EXT_SKIP, NO_KEY = tuple(EventConstants) + + +class RelationshipDirection(Enum): + """enumeration which indicates the 'direction' of a + :class:`_orm.RelationshipProperty`. + + :class:`.RelationshipDirection` is accessible from the + :attr:`_orm.Relationship.direction` attribute of + :class:`_orm.RelationshipProperty`. + + """ + + ONETOMANY = 1 + """Indicates the one-to-many direction for a :func:`_orm.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. + + """ + + MANYTOONE = 2 + """Indicates the many-to-one direction for a :func:`_orm.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. + + """ + + MANYTOMANY = 3 + """Indicates the many-to-many direction for a :func:`_orm.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. + + """ + + +ONETOMANY, MANYTOONE, MANYTOMANY = tuple(RelationshipDirection) + + +class InspectionAttrExtensionType(Enum): + """Symbols indicating the type of extension that a + :class:`.InspectionAttr` is part of.""" + + +class NotExtension(InspectionAttrExtensionType): + NOT_EXTENSION = "not_extension" + """Symbol indicating an :class:`InspectionAttr` that's + not part of sqlalchemy.ext. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attribute. + + """ + + +_never_set = frozenset([NEVER_SET]) + +_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT]) + +_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED") + +_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE") + +_RAISE_FOR_STATE = util.symbol("RAISE_FOR_STATE") + + +_F = TypeVar("_F", bound=Callable[..., Any]) +_Self = TypeVar("_Self") + + +def _assertions( + *assertions: Any, +) -> Callable[[_F], _F]: + @util.decorator + def generate(fn: _F, self: _Self, *args: Any, **kw: Any) -> _Self: + for assertion in assertions: + assertion(self, fn.__name__) + fn(self, *args, **kw) + return self + + return generate + + +if TYPE_CHECKING: + + def manager_of_class(cls: Type[_O]) -> ClassManager[_O]: ... + + @overload + def opt_manager_of_class(cls: AliasedClass[Any]) -> None: ... + + @overload + def opt_manager_of_class( + cls: _ExternalEntityType[_O], + ) -> Optional[ClassManager[_O]]: ... + + def opt_manager_of_class( + cls: _ExternalEntityType[_O], + ) -> Optional[ClassManager[_O]]: ... + + def instance_state(instance: _O) -> InstanceState[_O]: ... + + def instance_dict(instance: object) -> Dict[str, Any]: ... + +else: + # these can be replaced by sqlalchemy.ext.instrumentation + # if augmented class instrumentation is enabled. + + def manager_of_class(cls): + try: + return cls.__dict__[DEFAULT_MANAGER_ATTR] + except KeyError as ke: + raise exc.UnmappedClassError( + cls, f"Can't locate an instrumentation manager for class {cls}" + ) from ke + + def opt_manager_of_class(cls): + return cls.__dict__.get(DEFAULT_MANAGER_ATTR) + + instance_state = operator.attrgetter(DEFAULT_STATE_ATTR) + + instance_dict = operator.attrgetter("__dict__") + + +def instance_str(instance: object) -> str: + """Return a string describing an instance.""" + + return state_str(instance_state(instance)) + + +def state_str(state: InstanceState[Any]) -> str: + """Return a string describing an instance via its InstanceState.""" + + if state is None: + return "None" + else: + return "<%s at 0x%x>" % (state.class_.__name__, id(state.obj())) + + +def state_class_str(state: InstanceState[Any]) -> str: + """Return a string describing an instance's class via its + InstanceState. + """ + + if state is None: + return "None" + else: + return "<%s>" % (state.class_.__name__,) + + +def attribute_str(instance: object, attribute: str) -> str: + return instance_str(instance) + "." + attribute + + +def state_attribute_str(state: InstanceState[Any], attribute: str) -> str: + return state_str(state) + "." + attribute + + +def object_mapper(instance: _T) -> Mapper[_T]: + """Given an object, return the primary Mapper associated with the object + instance. + + Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError` + if no mapping is configured. + + This function is available via the inspection system as:: + + inspect(instance).mapper + + Using the inspection system will raise + :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is + not part of a mapping. + + """ + return object_state(instance).mapper + + +def object_state(instance: _T) -> InstanceState[_T]: + """Given an object, return the :class:`.InstanceState` + associated with the object. + + Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError` + if no mapping is configured. + + Equivalent functionality is available via the :func:`_sa.inspect` + function as:: + + inspect(instance) + + Using the inspection system will raise + :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is + not part of a mapping. + + """ + state = _inspect_mapped_object(instance) + if state is None: + raise exc.UnmappedInstanceError(instance) + else: + return state + + +@inspection._inspects(object) +def _inspect_mapped_object(instance: _T) -> Optional[InstanceState[_T]]: + try: + return instance_state(instance) + except (exc.UnmappedClassError,) + exc.NO_STATE: + return None + + +def _class_to_mapper( + class_or_mapper: Union[Mapper[_T], Type[_T]] +) -> Mapper[_T]: + # can't get mypy to see an overload for this + insp = inspection.inspect(class_or_mapper, False) + if insp is not None: + return insp.mapper # type: ignore + else: + assert isinstance(class_or_mapper, type) + raise exc.UnmappedClassError(class_or_mapper) + + +def _mapper_or_none( + entity: Union[Type[_T], _InternalEntityType[_T]] +) -> Optional[Mapper[_T]]: + """Return the :class:`_orm.Mapper` for the given class or None if the + class is not mapped. + """ + + # can't get mypy to see an overload for this + insp = inspection.inspect(entity, False) + if insp is not None: + return insp.mapper # type: ignore + else: + return None + + +def _is_mapped_class(entity: Any) -> bool: + """Return True if the given object is a mapped class, + :class:`_orm.Mapper`, or :class:`.AliasedClass`. + """ + + insp = inspection.inspect(entity, False) + return ( + insp is not None + and not insp.is_clause_element + and (insp.is_mapper or insp.is_aliased_class) + ) + + +def _is_aliased_class(entity: Any) -> bool: + insp = inspection.inspect(entity, False) + return insp is not None and getattr(insp, "is_aliased_class", False) + + +@no_type_check +def _entity_descriptor(entity: _EntityType[Any], key: str) -> Any: + """Return a class attribute given an entity and string name. + + May return :class:`.InstrumentedAttribute` or user-defined + attribute. + + """ + insp = inspection.inspect(entity) + if insp.is_selectable: + description = entity + entity = insp.c + elif insp.is_aliased_class: + entity = insp.entity + description = entity + elif hasattr(insp, "mapper"): + description = entity = insp.mapper.class_ + else: + description = entity + + try: + return getattr(entity, key) + except AttributeError as err: + raise sa_exc.InvalidRequestError( + "Entity '%s' has no property '%s'" % (description, key) + ) from err + + +if TYPE_CHECKING: + + def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]: ... + +else: + _state_mapper = util.dottedgetter("manager.mapper") + + +def _inspect_mapped_class( + class_: Type[_O], configure: bool = False +) -> Optional[Mapper[_O]]: + try: + class_manager = opt_manager_of_class(class_) + if class_manager is None or not class_manager.is_mapped: + return None + mapper = class_manager.mapper + except exc.NO_STATE: + return None + else: + if configure: + mapper._check_configure() + return mapper + + +def _parse_mapper_argument(arg: Union[Mapper[_O], Type[_O]]) -> Mapper[_O]: + insp = inspection.inspect(arg, raiseerr=False) + if insp_is_mapper(insp): + return insp + + raise sa_exc.ArgumentError(f"Mapper or mapped class expected, got {arg!r}") + + +def class_mapper(class_: Type[_O], configure: bool = True) -> Mapper[_O]: + """Given a class, return the primary :class:`_orm.Mapper` associated + with the key. + + Raises :exc:`.UnmappedClassError` if no mapping is configured + on the given class, or :exc:`.ArgumentError` if a non-class + object is passed. + + Equivalent functionality is available via the :func:`_sa.inspect` + function as:: + + inspect(some_mapped_class) + + Using the inspection system will raise + :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped. + + """ + mapper = _inspect_mapped_class(class_, configure=configure) + if mapper is None: + if not isinstance(class_, type): + raise sa_exc.ArgumentError( + "Class object expected, got '%r'." % (class_,) + ) + raise exc.UnmappedClassError(class_) + else: + return mapper + + +class InspectionAttr: + """A base class applied to all ORM objects and attributes that are + related to things that can be returned by the :func:`_sa.inspect` function. + + The attributes defined here allow the usage of simple boolean + checks to test basic facts about the object returned. + + While the boolean checks here are basically the same as using + the Python isinstance() function, the flags here can be used without + the need to import all of these classes, and also such that + the SQLAlchemy class system can change while leaving the flags + here intact for forwards-compatibility. + + """ + + __slots__: Tuple[str, ...] = () + + is_selectable = False + """Return True if this object is an instance of + :class:`_expression.Selectable`.""" + + is_aliased_class = False + """True if this object is an instance of :class:`.AliasedClass`.""" + + is_instance = False + """True if this object is an instance of :class:`.InstanceState`.""" + + is_mapper = False + """True if this object is an instance of :class:`_orm.Mapper`.""" + + is_bundle = False + """True if this object is an instance of :class:`.Bundle`.""" + + is_property = False + """True if this object is an instance of :class:`.MapperProperty`.""" + + is_attribute = False + """True if this object is a Python :term:`descriptor`. + + This can refer to one of many types. Usually a + :class:`.QueryableAttribute` which handles attributes events on behalf + of a :class:`.MapperProperty`. But can also be an extension type + such as :class:`.AssociationProxy` or :class:`.hybrid_property`. + The :attr:`.InspectionAttr.extension_type` will refer to a constant + identifying the specific subtype. + + .. seealso:: + + :attr:`_orm.Mapper.all_orm_descriptors` + + """ + + _is_internal_proxy = False + """True if this object is an internal proxy object. + + .. versionadded:: 1.2.12 + + """ + + is_clause_element = False + """True if this object is an instance of + :class:`_expression.ClauseElement`.""" + + extension_type: InspectionAttrExtensionType = NotExtension.NOT_EXTENSION + """The extension type, if any. + Defaults to :attr:`.interfaces.NotExtension.NOT_EXTENSION` + + .. seealso:: + + :class:`.HybridExtensionType` + + :class:`.AssociationProxyExtensionType` + + """ + + +class InspectionAttrInfo(InspectionAttr): + """Adds the ``.info`` attribute to :class:`.InspectionAttr`. + + The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo` + is that the former is compatible as a mixin for classes that specify + ``__slots__``; this is essentially an implementation artifact. + + """ + + __slots__ = () + + @util.ro_memoized_property + def info(self) -> _InfoType: + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`_orm.relationship`, or + :func:`.composite` + functions. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} + + +class SQLORMOperations(SQLCoreOperations[_T_co], TypingOnly): + __slots__ = () + + if typing.TYPE_CHECKING: + + def of_type( + self, class_: _EntityType[Any] + ) -> PropComparator[_T_co]: ... + + def and_( + self, *criteria: _ColumnExpressionArgument[bool] + ) -> PropComparator[bool]: ... + + def any( # noqa: A001 + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: ... + + def has( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: ... + + +class ORMDescriptor(Generic[_T_co], TypingOnly): + """Represent any Python descriptor that provides a SQL expression + construct at the class level.""" + + __slots__ = () + + if typing.TYPE_CHECKING: + + @overload + def __get__( + self, instance: Any, owner: Literal[None] + ) -> ORMDescriptor[_T_co]: ... + + @overload + def __get__( + self, instance: Literal[None], owner: Any + ) -> SQLCoreOperations[_T_co]: ... + + @overload + def __get__(self, instance: object, owner: Any) -> _T_co: ... + + def __get__( + self, instance: object, owner: Any + ) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]: ... + + +class _MappedAnnotationBase(Generic[_T_co], TypingOnly): + """common class for Mapped and similar ORM container classes. + + these are classes that can appear on the left side of an ORM declarative + mapping, containing a mapped class or in some cases a collection + surrounding a mapped class. + + """ + + __slots__ = () + + +class SQLORMExpression( + SQLORMOperations[_T_co], SQLColumnExpression[_T_co], TypingOnly +): + """A type that may be used to indicate any ORM-level attribute or + object that acts in place of one, in the context of SQL expression + construction. + + :class:`.SQLORMExpression` extends from the Core + :class:`.SQLColumnExpression` to add additional SQL methods that are ORM + specific, such as :meth:`.PropComparator.of_type`, and is part of the bases + for :class:`.InstrumentedAttribute`. It may be used in :pep:`484` typing to + indicate arguments or return values that should behave as ORM-level + attribute expressions. + + .. versionadded:: 2.0.0b4 + + + """ + + __slots__ = () + + +class Mapped( + SQLORMExpression[_T_co], + ORMDescriptor[_T_co], + _MappedAnnotationBase[_T_co], + roles.DDLConstraintColumnRole, +): + """Represent an ORM mapped attribute on a mapped class. + + This class represents the complete descriptor interface for any class + attribute that will have been :term:`instrumented` by the ORM + :class:`_orm.Mapper` class. Provides appropriate information to type + checkers such as pylance and mypy so that ORM-mapped attributes + are correctly typed. + + The most prominent use of :class:`_orm.Mapped` is in + the :ref:`Declarative Mapping ` form + of :class:`_orm.Mapper` configuration, where used explicitly it drives + the configuration of ORM attributes such as :func:`_orm.mapped_class` + and :func:`_orm.relationship`. + + .. seealso:: + + :ref:`orm_explicit_declarative_base` + + :ref:`orm_declarative_table` + + .. tip:: + + The :class:`_orm.Mapped` class represents attributes that are handled + directly by the :class:`_orm.Mapper` class. It does not include other + Python descriptor classes that are provided as extensions, including + :ref:`hybrids_toplevel` and the :ref:`associationproxy_toplevel`. + While these systems still make use of ORM-specific superclasses + and structures, they are not :term:`instrumented` by the + :class:`_orm.Mapper` and instead provide their own functionality + when they are accessed on a class. + + .. versionadded:: 1.4 + + + """ + + __slots__ = () + + if typing.TYPE_CHECKING: + + @overload + def __get__( + self, instance: None, owner: Any + ) -> InstrumentedAttribute[_T_co]: ... + + @overload + def __get__(self, instance: object, owner: Any) -> _T_co: ... + + def __get__( + self, instance: Optional[object], owner: Any + ) -> Union[InstrumentedAttribute[_T_co], _T_co]: ... + + @classmethod + def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]: ... + + def __set__( + self, instance: Any, value: Union[SQLCoreOperations[_T_co], _T_co] + ) -> None: ... + + def __delete__(self, instance: Any) -> None: ... + + +class _MappedAttribute(Generic[_T_co], TypingOnly): + """Mixin for attributes which should be replaced by mapper-assigned + attributes. + + """ + + __slots__ = () + + +class _DeclarativeMapped(Mapped[_T_co], _MappedAttribute[_T_co]): + """Mixin for :class:`.MapperProperty` subclasses that allows them to + be compatible with ORM-annotated declarative mappings. + + """ + + __slots__ = () + + # MappedSQLExpression, Relationship, Composite etc. dont actually do + # SQL expression behavior. yet there is code that compares them with + # __eq__(), __ne__(), etc. Since #8847 made Mapped even more full + # featured including ColumnOperators, we need to have those methods + # be no-ops for these objects, so return NotImplemented to fall back + # to normal comparison behavior. + def operate(self, op: OperatorType, *other: Any, **kwargs: Any) -> Any: + return NotImplemented + + __sa_operate__ = operate + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> Any: + return NotImplemented + + +class DynamicMapped(_MappedAnnotationBase[_T_co]): + """Represent the ORM mapped attribute type for a "dynamic" relationship. + + The :class:`_orm.DynamicMapped` type annotation may be used in an + :ref:`Annotated Declarative Table ` mapping + to indicate that the ``lazy="dynamic"`` loader strategy should be used + for a particular :func:`_orm.relationship`. + + .. legacy:: The "dynamic" lazy loader strategy is the legacy form of what + is now the "write_only" strategy described in the section + :ref:`write_only_relationship`. + + E.g.:: + + class User(Base): + __tablename__ = "user" + id: Mapped[int] = mapped_column(primary_key=True) + addresses: DynamicMapped[Address] = relationship( + cascade="all,delete-orphan" + ) + + See the section :ref:`dynamic_relationship` for background. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`dynamic_relationship` - complete background + + :class:`.WriteOnlyMapped` - fully 2.0 style version + + """ + + __slots__ = () + + if TYPE_CHECKING: + + @overload + def __get__( + self, instance: None, owner: Any + ) -> InstrumentedAttribute[_T_co]: ... + + @overload + def __get__( + self, instance: object, owner: Any + ) -> AppenderQuery[_T_co]: ... + + def __get__( + self, instance: Optional[object], owner: Any + ) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]: ... + + def __set__( + self, instance: Any, value: typing.Collection[_T_co] + ) -> None: ... + + +class WriteOnlyMapped(_MappedAnnotationBase[_T_co]): + """Represent the ORM mapped attribute type for a "write only" relationship. + + The :class:`_orm.WriteOnlyMapped` type annotation may be used in an + :ref:`Annotated Declarative Table ` mapping + to indicate that the ``lazy="write_only"`` loader strategy should be used + for a particular :func:`_orm.relationship`. + + E.g.:: + + class User(Base): + __tablename__ = "user" + id: Mapped[int] = mapped_column(primary_key=True) + addresses: WriteOnlyMapped[Address] = relationship( + cascade="all,delete-orphan" + ) + + See the section :ref:`write_only_relationship` for background. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`write_only_relationship` - complete background + + :class:`.DynamicMapped` - includes legacy :class:`_orm.Query` support + + """ + + __slots__ = () + + if TYPE_CHECKING: + + @overload + def __get__( + self, instance: None, owner: Any + ) -> InstrumentedAttribute[_T_co]: ... + + @overload + def __get__( + self, instance: object, owner: Any + ) -> WriteOnlyCollection[_T_co]: ... + + def __get__( + self, instance: Optional[object], owner: Any + ) -> Union[ + InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co] + ]: ... + + def __set__( + self, instance: Any, value: typing.Collection[_T_co] + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py new file mode 100644 index 00000000..2a23caad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py @@ -0,0 +1,2084 @@ +# orm/bulk_persistence.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""additional ORM persistence classes related to "bulk" operations, +specifically outside of the flush() process. + +""" + +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import Optional +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import attributes +from . import context +from . import evaluator +from . import exc as orm_exc +from . import loading +from . import persistence +from .base import NO_VALUE +from .context import AbstractORMCompileState +from .context import FromStatement +from .context import ORMFromStatementCompileState +from .context import QueryContext +from .. import exc as sa_exc +from .. import util +from ..engine import Dialect +from ..engine import result as _result +from ..sql import coercions +from ..sql import dml +from ..sql import expression +from ..sql import roles +from ..sql import select +from ..sql import sqltypes +from ..sql.base import _entity_namespace_key +from ..sql.base import CompileState +from ..sql.base import Options +from ..sql.dml import DeleteDMLState +from ..sql.dml import InsertDMLState +from ..sql.dml import UpdateDMLState +from ..util import EMPTY_DICT +from ..util.typing import Literal + +if TYPE_CHECKING: + from ._typing import DMLStrategyArgument + from ._typing import OrmExecuteOptionsParameter + from ._typing import SynchronizeSessionArgument + from .mapper import Mapper + from .session import _BindArguments + from .session import ORMExecuteState + from .session import Session + from .session import SessionTransaction + from .state import InstanceState + from ..engine import Connection + from ..engine import cursor + from ..engine.interfaces import _CoreAnyExecuteParams + +_O = TypeVar("_O", bound=object) + + +@overload +def _bulk_insert( + mapper: Mapper[_O], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + session_transaction: SessionTransaction, + *, + isstates: bool, + return_defaults: bool, + render_nulls: bool, + use_orm_insert_stmt: Literal[None] = ..., + execution_options: Optional[OrmExecuteOptionsParameter] = ..., +) -> None: ... + + +@overload +def _bulk_insert( + mapper: Mapper[_O], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + session_transaction: SessionTransaction, + *, + isstates: bool, + return_defaults: bool, + render_nulls: bool, + use_orm_insert_stmt: Optional[dml.Insert] = ..., + execution_options: Optional[OrmExecuteOptionsParameter] = ..., +) -> cursor.CursorResult[Any]: ... + + +def _bulk_insert( + mapper: Mapper[_O], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + session_transaction: SessionTransaction, + *, + isstates: bool, + return_defaults: bool, + render_nulls: bool, + use_orm_insert_stmt: Optional[dml.Insert] = None, + execution_options: Optional[OrmExecuteOptionsParameter] = None, +) -> Optional[cursor.CursorResult[Any]]: + base_mapper = mapper.base_mapper + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_insert()" + ) + + if isstates: + if TYPE_CHECKING: + mappings = cast(Iterable[InstanceState[_O]], mappings) + + if return_defaults: + # list of states allows us to attach .key for return_defaults case + states = [(state, state.dict) for state in mappings] + mappings = [dict_ for (state, dict_) in states] + else: + mappings = [state.dict for state in mappings] + else: + if TYPE_CHECKING: + mappings = cast(Iterable[Dict[str, Any]], mappings) + + if return_defaults: + # use dictionaries given, so that newly populated defaults + # can be delivered back to the caller (see #11661). This is **not** + # compatible with other use cases such as a session-executed + # insert() construct, as this will confuse the case of + # insert-per-subclass for joined inheritance cases (see + # test_bulk_statements.py::BulkDMLReturningJoinedInhTest). + # + # So in this conditional, we have **only** called + # session.bulk_insert_mappings() which does not have this + # requirement + mappings = list(mappings) + else: + # for all other cases we need to establish a local dictionary + # so that the incoming dictionaries aren't mutated + mappings = [dict(m) for m in mappings] + _expand_composites(mapper, mappings) + + connection = session_transaction.connection(base_mapper) + + return_result: Optional[cursor.CursorResult[Any]] = None + + mappers_to_run = [ + (table, mp) + for table, mp in base_mapper._sorted_tables.items() + if table in mapper._pks_by_table + ] + + if return_defaults: + # not used by new-style bulk inserts, only used for legacy + bookkeeping = True + elif len(mappers_to_run) > 1: + # if we have more than one table, mapper to run where we will be + # either horizontally splicing, or copying values between tables, + # we need the "bookkeeping" / deterministic returning order + bookkeeping = True + else: + bookkeeping = False + + for table, super_mapper in mappers_to_run: + # find bindparams in the statement. For bulk, we don't really know if + # a key in the params applies to a different table since we are + # potentially inserting for multiple tables here; looking at the + # bindparam() is a lot more direct. in most cases this will + # use _generate_cache_key() which is memoized, although in practice + # the ultimate statement that's executed is probably not the same + # object so that memoization might not matter much. + extra_bp_names = ( + [ + b.key + for b in use_orm_insert_stmt._get_embedded_bindparams() + if b.key in mappings[0] + ] + if use_orm_insert_stmt is not None + else () + ) + + records = ( + ( + None, + state_dict, + params, + mapper, + connection, + value_params, + has_all_pks, + has_all_defaults, + ) + for ( + state, + state_dict, + params, + mp, + conn, + value_params, + has_all_pks, + has_all_defaults, + ) in persistence._collect_insert_commands( + table, + ((None, mapping, mapper, connection) for mapping in mappings), + bulk=True, + return_defaults=bookkeeping, + render_nulls=render_nulls, + include_bulk_keys=extra_bp_names, + ) + ) + + result = persistence._emit_insert_statements( + base_mapper, + None, + super_mapper, + table, + records, + bookkeeping=bookkeeping, + use_orm_insert_stmt=use_orm_insert_stmt, + execution_options=execution_options, + ) + if use_orm_insert_stmt is not None: + if not use_orm_insert_stmt._returning or return_result is None: + return_result = result + elif result.returns_rows: + assert bookkeeping + return_result = return_result.splice_horizontally(result) + + if return_defaults and isstates: + identity_cls = mapper._identity_class + identity_props = [p.key for p in mapper._identity_key_props] + for state, dict_ in states: + state.key = ( + identity_cls, + tuple([dict_[key] for key in identity_props]), + None, + ) + + if use_orm_insert_stmt is not None: + assert return_result is not None + return return_result + + +@overload +def _bulk_update( + mapper: Mapper[Any], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + session_transaction: SessionTransaction, + *, + isstates: bool, + update_changed_only: bool, + use_orm_update_stmt: Literal[None] = ..., + enable_check_rowcount: bool = True, +) -> None: ... + + +@overload +def _bulk_update( + mapper: Mapper[Any], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + session_transaction: SessionTransaction, + *, + isstates: bool, + update_changed_only: bool, + use_orm_update_stmt: Optional[dml.Update] = ..., + enable_check_rowcount: bool = True, +) -> _result.Result[Any]: ... + + +def _bulk_update( + mapper: Mapper[Any], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + session_transaction: SessionTransaction, + *, + isstates: bool, + update_changed_only: bool, + use_orm_update_stmt: Optional[dml.Update] = None, + enable_check_rowcount: bool = True, +) -> Optional[_result.Result[Any]]: + base_mapper = mapper.base_mapper + + search_keys = mapper._primary_key_propkeys + if mapper._version_id_prop: + search_keys = {mapper._version_id_prop.key}.union(search_keys) + + def _changed_dict(mapper, state): + return { + k: v + for k, v in state.dict.items() + if k in state.committed_state or k in search_keys + } + + if isstates: + if update_changed_only: + mappings = [_changed_dict(mapper, state) for state in mappings] + else: + mappings = [state.dict for state in mappings] + else: + mappings = [dict(m) for m in mappings] + _expand_composites(mapper, mappings) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_update()" + ) + + connection = session_transaction.connection(base_mapper) + + # find bindparams in the statement. see _bulk_insert for similar + # notes for the insert case + extra_bp_names = ( + [ + b.key + for b in use_orm_update_stmt._get_embedded_bindparams() + if b.key in mappings[0] + ] + if use_orm_update_stmt is not None + else () + ) + + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper) or table not in mapper._pks_by_table: + continue + + records = persistence._collect_update_commands( + None, + table, + ( + ( + None, + mapping, + mapper, + connection, + ( + mapping[mapper._version_id_prop.key] + if mapper._version_id_prop + else None + ), + ) + for mapping in mappings + ), + bulk=True, + use_orm_update_stmt=use_orm_update_stmt, + include_bulk_keys=extra_bp_names, + ) + persistence._emit_update_statements( + base_mapper, + None, + super_mapper, + table, + records, + bookkeeping=False, + use_orm_update_stmt=use_orm_update_stmt, + enable_check_rowcount=enable_check_rowcount, + ) + + if use_orm_update_stmt is not None: + return _result.null_result() + + +def _expand_composites(mapper, mappings): + composite_attrs = mapper.composites + if not composite_attrs: + return + + composite_keys = set(composite_attrs.keys()) + populators = { + key: composite_attrs[key]._populate_composite_bulk_save_mappings_fn() + for key in composite_keys + } + for mapping in mappings: + for key in composite_keys.intersection(mapping): + populators[key](mapping) + + +class ORMDMLState(AbstractORMCompileState): + is_dml_returning = True + from_statement_ctx: Optional[ORMFromStatementCompileState] = None + + @classmethod + def _get_orm_crud_kv_pairs( + cls, mapper, statement, kv_iterator, needs_to_be_cacheable + ): + core_get_crud_kv_pairs = UpdateDMLState._get_crud_kv_pairs + + for k, v in kv_iterator: + k = coercions.expect(roles.DMLColumnRole, k) + + if isinstance(k, str): + desc = _entity_namespace_key(mapper, k, default=NO_VALUE) + if desc is NO_VALUE: + yield ( + coercions.expect(roles.DMLColumnRole, k), + ( + coercions.expect( + roles.ExpressionElementRole, + v, + type_=sqltypes.NullType(), + is_crud=True, + ) + if needs_to_be_cacheable + else v + ), + ) + else: + yield from core_get_crud_kv_pairs( + statement, + desc._bulk_update_tuples(v), + needs_to_be_cacheable, + ) + elif "entity_namespace" in k._annotations: + k_anno = k._annotations + attr = _entity_namespace_key( + k_anno["entity_namespace"], k_anno["proxy_key"] + ) + yield from core_get_crud_kv_pairs( + statement, + attr._bulk_update_tuples(v), + needs_to_be_cacheable, + ) + else: + yield ( + k, + ( + v + if not needs_to_be_cacheable + else coercions.expect( + roles.ExpressionElementRole, + v, + type_=sqltypes.NullType(), + is_crud=True, + ) + ), + ) + + @classmethod + def _get_multi_crud_kv_pairs(cls, statement, kv_iterator): + plugin_subject = statement._propagate_attrs["plugin_subject"] + + if not plugin_subject or not plugin_subject.mapper: + return UpdateDMLState._get_multi_crud_kv_pairs( + statement, kv_iterator + ) + + return [ + dict( + cls._get_orm_crud_kv_pairs( + plugin_subject.mapper, statement, value_dict.items(), False + ) + ) + for value_dict in kv_iterator + ] + + @classmethod + def _get_crud_kv_pairs(cls, statement, kv_iterator, needs_to_be_cacheable): + assert ( + needs_to_be_cacheable + ), "no test coverage for needs_to_be_cacheable=False" + + plugin_subject = statement._propagate_attrs["plugin_subject"] + + if not plugin_subject or not plugin_subject.mapper: + return UpdateDMLState._get_crud_kv_pairs( + statement, kv_iterator, needs_to_be_cacheable + ) + + return list( + cls._get_orm_crud_kv_pairs( + plugin_subject.mapper, + statement, + kv_iterator, + needs_to_be_cacheable, + ) + ) + + @classmethod + def get_entity_description(cls, statement): + ext_info = statement.table._annotations["parententity"] + mapper = ext_info.mapper + if ext_info.is_aliased_class: + _label_name = ext_info.name + else: + _label_name = mapper.class_.__name__ + + return { + "name": _label_name, + "type": mapper.class_, + "expr": ext_info.entity, + "entity": ext_info.entity, + "table": mapper.local_table, + } + + @classmethod + def get_returning_column_descriptions(cls, statement): + def _ent_for_col(c): + return c._annotations.get("parententity", None) + + def _attr_for_col(c, ent): + if ent is None: + return c + proxy_key = c._annotations.get("proxy_key", None) + if not proxy_key: + return c + else: + return getattr(ent.entity, proxy_key, c) + + return [ + { + "name": c.key, + "type": c.type, + "expr": _attr_for_col(c, ent), + "aliased": ent.is_aliased_class, + "entity": ent.entity, + } + for c, ent in [ + (c, _ent_for_col(c)) for c in statement._all_selected_columns + ] + ] + + def _setup_orm_returning( + self, + compiler, + orm_level_statement, + dml_level_statement, + dml_mapper, + *, + use_supplemental_cols=True, + ): + """establish ORM column handlers for an INSERT, UPDATE, or DELETE + which uses explicit returning(). + + called within compilation level create_for_statement. + + The _return_orm_returning() method then receives the Result + after the statement was executed, and applies ORM loading to the + state that we first established here. + + """ + + if orm_level_statement._returning: + fs = FromStatement( + orm_level_statement._returning, + dml_level_statement, + _adapt_on_names=False, + ) + fs = fs.execution_options(**orm_level_statement._execution_options) + fs = fs.options(*orm_level_statement._with_options) + self.select_statement = fs + self.from_statement_ctx = fsc = ( + ORMFromStatementCompileState.create_for_statement(fs, compiler) + ) + fsc.setup_dml_returning_compile_state(dml_mapper) + + dml_level_statement = dml_level_statement._generate() + dml_level_statement._returning = () + + cols_to_return = [c for c in fsc.primary_columns if c is not None] + + # since we are splicing result sets together, make sure there + # are columns of some kind returned in each result set + if not cols_to_return: + cols_to_return.extend(dml_mapper.primary_key) + + if use_supplemental_cols: + dml_level_statement = dml_level_statement.return_defaults( + # this is a little weird looking, but by passing + # primary key as the main list of cols, this tells + # return_defaults to omit server-default cols (and + # actually all cols, due to some weird thing we should + # clean up in crud.py). + # Since we have cols_to_return, just return what we asked + # for (plus primary key, which ORM persistence needs since + # we likely set bookkeeping=True here, which is another + # whole thing...). We dont want to clutter the + # statement up with lots of other cols the user didn't + # ask for. see #9685 + *dml_mapper.primary_key, + supplemental_cols=cols_to_return, + ) + else: + dml_level_statement = dml_level_statement.returning( + *cols_to_return + ) + + return dml_level_statement + + @classmethod + def _return_orm_returning( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + result, + ): + execution_context = result.context + compile_state = execution_context.compiled.compile_state + + if ( + compile_state.from_statement_ctx + and not compile_state.from_statement_ctx.compile_options._is_star + ): + load_options = execution_options.get( + "_sa_orm_load_options", QueryContext.default_load_options + ) + + querycontext = QueryContext( + compile_state.from_statement_ctx, + compile_state.select_statement, + statement, + params, + session, + load_options, + execution_options, + bind_arguments, + ) + return loading.instances(result, querycontext) + else: + return result + + +class BulkUDCompileState(ORMDMLState): + class default_update_options(Options): + _dml_strategy: DMLStrategyArgument = "auto" + _synchronize_session: SynchronizeSessionArgument = "auto" + _can_use_returning: bool = False + _is_delete_using: bool = False + _is_update_from: bool = False + _autoflush: bool = True + _subject_mapper: Optional[Mapper[Any]] = None + _resolved_values = EMPTY_DICT + _eval_condition = None + _matched_rows = None + _identity_token = None + + @classmethod + def can_use_returning( + cls, + dialect: Dialect, + mapper: Mapper[Any], + *, + is_multitable: bool = False, + is_update_from: bool = False, + is_delete_using: bool = False, + is_executemany: bool = False, + ) -> bool: + raise NotImplementedError() + + @classmethod + def orm_pre_session_exec( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + is_pre_event, + ): + ( + update_options, + execution_options, + ) = BulkUDCompileState.default_update_options.from_execution_options( + "_sa_orm_update_options", + { + "synchronize_session", + "autoflush", + "identity_token", + "is_delete_using", + "is_update_from", + "dml_strategy", + }, + execution_options, + statement._execution_options, + ) + bind_arguments["clause"] = statement + try: + plugin_subject = statement._propagate_attrs["plugin_subject"] + except KeyError: + assert False, "statement had 'orm' plugin but no plugin_subject" + else: + if plugin_subject: + bind_arguments["mapper"] = plugin_subject.mapper + update_options += {"_subject_mapper": plugin_subject.mapper} + + if "parententity" not in statement.table._annotations: + update_options += {"_dml_strategy": "core_only"} + elif not isinstance(params, list): + if update_options._dml_strategy == "auto": + update_options += {"_dml_strategy": "orm"} + elif update_options._dml_strategy == "bulk": + raise sa_exc.InvalidRequestError( + 'Can\'t use "bulk" ORM insert strategy without ' + "passing separate parameters" + ) + else: + if update_options._dml_strategy == "auto": + update_options += {"_dml_strategy": "bulk"} + + sync = update_options._synchronize_session + if sync is not None: + if sync not in ("auto", "evaluate", "fetch", False): + raise sa_exc.ArgumentError( + "Valid strategies for session synchronization " + "are 'auto', 'evaluate', 'fetch', False" + ) + if update_options._dml_strategy == "bulk" and sync == "fetch": + raise sa_exc.InvalidRequestError( + "The 'fetch' synchronization strategy is not available " + "for 'bulk' ORM updates (i.e. multiple parameter sets)" + ) + + if not is_pre_event: + if update_options._autoflush: + session._autoflush() + + if update_options._dml_strategy == "orm": + if update_options._synchronize_session == "auto": + update_options = cls._do_pre_synchronize_auto( + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ) + elif update_options._synchronize_session == "evaluate": + update_options = cls._do_pre_synchronize_evaluate( + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ) + elif update_options._synchronize_session == "fetch": + update_options = cls._do_pre_synchronize_fetch( + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ) + elif update_options._dml_strategy == "bulk": + if update_options._synchronize_session == "auto": + update_options += {"_synchronize_session": "evaluate"} + + # indicators from the "pre exec" step that are then + # added to the DML statement, which will also be part of the cache + # key. The compile level create_for_statement() method will then + # consume these at compiler time. + statement = statement._annotate( + { + "synchronize_session": update_options._synchronize_session, + "is_delete_using": update_options._is_delete_using, + "is_update_from": update_options._is_update_from, + "dml_strategy": update_options._dml_strategy, + "can_use_returning": update_options._can_use_returning, + } + ) + + return ( + statement, + util.immutabledict(execution_options).union( + {"_sa_orm_update_options": update_options} + ), + ) + + @classmethod + def orm_setup_cursor_result( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + result, + ): + # this stage of the execution is called after the + # do_orm_execute event hook. meaning for an extension like + # horizontal sharding, this step happens *within* the horizontal + # sharding event handler which calls session.execute() re-entrantly + # and will occur for each backend individually. + # the sharding extension then returns its own merged result from the + # individual ones we return here. + + update_options = execution_options["_sa_orm_update_options"] + if update_options._dml_strategy == "orm": + if update_options._synchronize_session == "evaluate": + cls._do_post_synchronize_evaluate( + session, statement, result, update_options + ) + elif update_options._synchronize_session == "fetch": + cls._do_post_synchronize_fetch( + session, statement, result, update_options + ) + elif update_options._dml_strategy == "bulk": + if update_options._synchronize_session == "evaluate": + cls._do_post_synchronize_bulk_evaluate( + session, params, result, update_options + ) + return result + + return cls._return_orm_returning( + session, + statement, + params, + execution_options, + bind_arguments, + result, + ) + + @classmethod + def _adjust_for_extra_criteria(cls, global_attributes, ext_info): + """Apply extra criteria filtering. + + For all distinct single-table-inheritance mappers represented in the + table being updated or deleted, produce additional WHERE criteria such + that only the appropriate subtypes are selected from the total results. + + Additionally, add WHERE criteria originating from LoaderCriteriaOptions + collected from the statement. + + """ + + return_crit = () + + adapter = ext_info._adapter if ext_info.is_aliased_class else None + + if ( + "additional_entity_criteria", + ext_info.mapper, + ) in global_attributes: + return_crit += tuple( + ae._resolve_where_criteria(ext_info) + for ae in global_attributes[ + ("additional_entity_criteria", ext_info.mapper) + ] + if ae.include_aliases or ae.entity is ext_info + ) + + if ext_info.mapper._single_table_criterion is not None: + return_crit += (ext_info.mapper._single_table_criterion,) + + if adapter: + return_crit = tuple(adapter.traverse(crit) for crit in return_crit) + + return return_crit + + @classmethod + def _interpret_returning_rows(cls, mapper, rows): + """translate from local inherited table columns to base mapper + primary key columns. + + Joined inheritance mappers always establish the primary key in terms of + the base table. When we UPDATE a sub-table, we can only get + RETURNING for the sub-table's columns. + + Here, we create a lookup from the local sub table's primary key + columns to the base table PK columns so that we can get identity + key values from RETURNING that's against the joined inheritance + sub-table. + + the complexity here is to support more than one level deep of + inheritance, where we have to link columns to each other across + the inheritance hierarchy. + + """ + + if mapper.local_table is not mapper.base_mapper.local_table: + return rows + + # this starts as a mapping of + # local_pk_col: local_pk_col. + # we will then iteratively rewrite the "value" of the dict with + # each successive superclass column + local_pk_to_base_pk = {pk: pk for pk in mapper.local_table.primary_key} + + for mp in mapper.iterate_to_root(): + if mp.inherits is None: + break + elif mp.local_table is mp.inherits.local_table: + continue + + t_to_e = dict(mp._table_to_equated[mp.inherits.local_table]) + col_to_col = {sub_pk: super_pk for super_pk, sub_pk in t_to_e[mp]} + for pk, super_ in local_pk_to_base_pk.items(): + local_pk_to_base_pk[pk] = col_to_col[super_] + + lookup = { + local_pk_to_base_pk[lpk]: idx + for idx, lpk in enumerate(mapper.local_table.primary_key) + } + primary_key_convert = [ + lookup[bpk] for bpk in mapper.base_mapper.primary_key + ] + return [tuple(row[idx] for idx in primary_key_convert) for row in rows] + + @classmethod + def _get_matched_objects_on_criteria(cls, update_options, states): + mapper = update_options._subject_mapper + eval_condition = update_options._eval_condition + + raw_data = [ + (state.obj(), state, state.dict) + for state in states + if state.mapper.isa(mapper) and not state.expired + ] + + identity_token = update_options._identity_token + if identity_token is not None: + raw_data = [ + (obj, state, dict_) + for obj, state, dict_ in raw_data + if state.identity_token == identity_token + ] + + result = [] + for obj, state, dict_ in raw_data: + evaled_condition = eval_condition(obj) + + # caution: don't use "in ()" or == here, _EXPIRE_OBJECT + # evaluates as True for all comparisons + if ( + evaled_condition is True + or evaled_condition is evaluator._EXPIRED_OBJECT + ): + result.append( + ( + obj, + state, + dict_, + evaled_condition is evaluator._EXPIRED_OBJECT, + ) + ) + return result + + @classmethod + def _eval_condition_from_statement(cls, update_options, statement): + mapper = update_options._subject_mapper + target_cls = mapper.class_ + + evaluator_compiler = evaluator._EvaluatorCompiler(target_cls) + crit = () + if statement._where_criteria: + crit += statement._where_criteria + + global_attributes = {} + for opt in statement._with_options: + if opt._is_criteria_option: + opt.get_global_criteria(global_attributes) + + if global_attributes: + crit += cls._adjust_for_extra_criteria(global_attributes, mapper) + + if crit: + eval_condition = evaluator_compiler.process(*crit) + else: + # workaround for mypy https://github.com/python/mypy/issues/14027 + def _eval_condition(obj): + return True + + eval_condition = _eval_condition + + return eval_condition + + @classmethod + def _do_pre_synchronize_auto( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ): + """setup auto sync strategy + + + "auto" checks if we can use "evaluate" first, then falls back + to "fetch" + + evaluate is vastly more efficient for the common case + where session is empty, only has a few objects, and the UPDATE + statement can potentially match thousands/millions of rows. + + OTOH more complex criteria that fails to work with "evaluate" + we would hope usually correlates with fewer net rows. + + """ + + try: + eval_condition = cls._eval_condition_from_statement( + update_options, statement + ) + + except evaluator.UnevaluatableError: + pass + else: + return update_options + { + "_eval_condition": eval_condition, + "_synchronize_session": "evaluate", + } + + update_options += {"_synchronize_session": "fetch"} + return cls._do_pre_synchronize_fetch( + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ) + + @classmethod + def _do_pre_synchronize_evaluate( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ): + try: + eval_condition = cls._eval_condition_from_statement( + update_options, statement + ) + + except evaluator.UnevaluatableError as err: + raise sa_exc.InvalidRequestError( + 'Could not evaluate current criteria in Python: "%s". ' + "Specify 'fetch' or False for the " + "synchronize_session execution option." % err + ) from err + + return update_options + { + "_eval_condition": eval_condition, + } + + @classmethod + def _get_resolved_values(cls, mapper, statement): + if statement._multi_values: + return [] + elif statement._ordered_values: + return list(statement._ordered_values) + elif statement._values: + return list(statement._values.items()) + else: + return [] + + @classmethod + def _resolved_keys_as_propnames(cls, mapper, resolved_values): + values = [] + for k, v in resolved_values: + if mapper and isinstance(k, expression.ColumnElement): + try: + attr = mapper._columntoproperty[k] + except orm_exc.UnmappedColumnError: + pass + else: + values.append((attr.key, v)) + else: + raise sa_exc.InvalidRequestError( + "Attribute name not found, can't be " + "synchronized back to objects: %r" % k + ) + return values + + @classmethod + def _do_pre_synchronize_fetch( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + update_options, + ): + mapper = update_options._subject_mapper + + select_stmt = ( + select(*(mapper.primary_key + (mapper.select_identity_token,))) + .select_from(mapper) + .options(*statement._with_options) + ) + select_stmt._where_criteria = statement._where_criteria + + # conditionally run the SELECT statement for pre-fetch, testing the + # "bind" for if we can use RETURNING or not using the do_orm_execute + # event. If RETURNING is available, the do_orm_execute event + # will cancel the SELECT from being actually run. + # + # The way this is organized seems strange, why don't we just + # call can_use_returning() before invoking the statement and get + # answer?, why does this go through the whole execute phase using an + # event? Answer: because we are integrating with extensions such + # as the horizontal sharding extention that "multiplexes" an individual + # statement run through multiple engines, and it uses + # do_orm_execute() to do that. + + can_use_returning = None + + def skip_for_returning(orm_context: ORMExecuteState) -> Any: + bind = orm_context.session.get_bind(**orm_context.bind_arguments) + nonlocal can_use_returning + + per_bind_result = cls.can_use_returning( + bind.dialect, + mapper, + is_update_from=update_options._is_update_from, + is_delete_using=update_options._is_delete_using, + is_executemany=orm_context.is_executemany, + ) + + if can_use_returning is not None: + if can_use_returning != per_bind_result: + raise sa_exc.InvalidRequestError( + "For synchronize_session='fetch', can't mix multiple " + "backends where some support RETURNING and others " + "don't" + ) + elif orm_context.is_executemany and not per_bind_result: + raise sa_exc.InvalidRequestError( + "For synchronize_session='fetch', can't use multiple " + "parameter sets in ORM mode, which this backend does not " + "support with RETURNING" + ) + else: + can_use_returning = per_bind_result + + if per_bind_result: + return _result.null_result() + else: + return None + + result = session.execute( + select_stmt, + params, + execution_options=execution_options, + bind_arguments=bind_arguments, + _add_event=skip_for_returning, + ) + matched_rows = result.fetchall() + + return update_options + { + "_matched_rows": matched_rows, + "_can_use_returning": can_use_returning, + } + + +@CompileState.plugin_for("orm", "insert") +class BulkORMInsert(ORMDMLState, InsertDMLState): + class default_insert_options(Options): + _dml_strategy: DMLStrategyArgument = "auto" + _render_nulls: bool = False + _return_defaults: bool = False + _subject_mapper: Optional[Mapper[Any]] = None + _autoflush: bool = True + _populate_existing: bool = False + + select_statement: Optional[FromStatement] = None + + @classmethod + def orm_pre_session_exec( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + is_pre_event, + ): + ( + insert_options, + execution_options, + ) = BulkORMInsert.default_insert_options.from_execution_options( + "_sa_orm_insert_options", + {"dml_strategy", "autoflush", "populate_existing", "render_nulls"}, + execution_options, + statement._execution_options, + ) + bind_arguments["clause"] = statement + try: + plugin_subject = statement._propagate_attrs["plugin_subject"] + except KeyError: + assert False, "statement had 'orm' plugin but no plugin_subject" + else: + if plugin_subject: + bind_arguments["mapper"] = plugin_subject.mapper + insert_options += {"_subject_mapper": plugin_subject.mapper} + + if not params: + if insert_options._dml_strategy == "auto": + insert_options += {"_dml_strategy": "orm"} + elif insert_options._dml_strategy == "bulk": + raise sa_exc.InvalidRequestError( + 'Can\'t use "bulk" ORM insert strategy without ' + "passing separate parameters" + ) + else: + if insert_options._dml_strategy == "auto": + insert_options += {"_dml_strategy": "bulk"} + + if insert_options._dml_strategy != "raw": + # for ORM object loading, like ORMContext, we have to disable + # result set adapt_to_context, because we will be generating a + # new statement with specific columns that's cached inside of + # an ORMFromStatementCompileState, which we will re-use for + # each result. + if not execution_options: + execution_options = context._orm_load_exec_options + else: + execution_options = execution_options.union( + context._orm_load_exec_options + ) + + if not is_pre_event and insert_options._autoflush: + session._autoflush() + + statement = statement._annotate( + {"dml_strategy": insert_options._dml_strategy} + ) + + return ( + statement, + util.immutabledict(execution_options).union( + {"_sa_orm_insert_options": insert_options} + ), + ) + + @classmethod + def orm_execute_statement( + cls, + session: Session, + statement: dml.Insert, + params: _CoreAnyExecuteParams, + execution_options: OrmExecuteOptionsParameter, + bind_arguments: _BindArguments, + conn: Connection, + ) -> _result.Result: + insert_options = execution_options.get( + "_sa_orm_insert_options", cls.default_insert_options + ) + + if insert_options._dml_strategy not in ( + "raw", + "bulk", + "orm", + "auto", + ): + raise sa_exc.ArgumentError( + "Valid strategies for ORM insert strategy " + "are 'raw', 'orm', 'bulk', 'auto" + ) + + result: _result.Result[Any] + + if insert_options._dml_strategy == "raw": + result = conn.execute( + statement, params or {}, execution_options=execution_options + ) + return result + + if insert_options._dml_strategy == "bulk": + mapper = insert_options._subject_mapper + + if ( + statement._post_values_clause is not None + and mapper._multiple_persistence_tables + ): + raise sa_exc.InvalidRequestError( + "bulk INSERT with a 'post values' clause " + "(typically upsert) not supported for multi-table " + f"mapper {mapper}" + ) + + assert mapper is not None + assert session._transaction is not None + result = _bulk_insert( + mapper, + cast( + "Iterable[Dict[str, Any]]", + [params] if isinstance(params, dict) else params, + ), + session._transaction, + isstates=False, + return_defaults=insert_options._return_defaults, + render_nulls=insert_options._render_nulls, + use_orm_insert_stmt=statement, + execution_options=execution_options, + ) + elif insert_options._dml_strategy == "orm": + result = conn.execute( + statement, params or {}, execution_options=execution_options + ) + else: + raise AssertionError() + + if not bool(statement._returning): + return result + + if insert_options._populate_existing: + load_options = execution_options.get( + "_sa_orm_load_options", QueryContext.default_load_options + ) + load_options += {"_populate_existing": True} + execution_options = execution_options.union( + {"_sa_orm_load_options": load_options} + ) + + return cls._return_orm_returning( + session, + statement, + params, + execution_options, + bind_arguments, + result, + ) + + @classmethod + def create_for_statement(cls, statement, compiler, **kw) -> BulkORMInsert: + self = cast( + BulkORMInsert, + super().create_for_statement(statement, compiler, **kw), + ) + + if compiler is not None: + toplevel = not compiler.stack + else: + toplevel = True + if not toplevel: + return self + + mapper = statement._propagate_attrs["plugin_subject"] + dml_strategy = statement._annotations.get("dml_strategy", "raw") + if dml_strategy == "bulk": + self._setup_for_bulk_insert(compiler) + elif dml_strategy == "orm": + self._setup_for_orm_insert(compiler, mapper) + + return self + + @classmethod + def _resolved_keys_as_col_keys(cls, mapper, resolved_value_dict): + return { + col.key if col is not None else k: v + for col, k, v in ( + (mapper.c.get(k), k, v) for k, v in resolved_value_dict.items() + ) + } + + def _setup_for_orm_insert(self, compiler, mapper): + statement = orm_level_statement = cast(dml.Insert, self.statement) + + statement = self._setup_orm_returning( + compiler, + orm_level_statement, + statement, + dml_mapper=mapper, + use_supplemental_cols=False, + ) + self.statement = statement + + def _setup_for_bulk_insert(self, compiler): + """establish an INSERT statement within the context of + bulk insert. + + This method will be within the "conn.execute()" call that is invoked + by persistence._emit_insert_statement(). + + """ + statement = orm_level_statement = cast(dml.Insert, self.statement) + an = statement._annotations + + emit_insert_table, emit_insert_mapper = ( + an["_emit_insert_table"], + an["_emit_insert_mapper"], + ) + + statement = statement._clone() + + statement.table = emit_insert_table + if self._dict_parameters: + self._dict_parameters = { + col: val + for col, val in self._dict_parameters.items() + if col.table is emit_insert_table + } + + statement = self._setup_orm_returning( + compiler, + orm_level_statement, + statement, + dml_mapper=emit_insert_mapper, + use_supplemental_cols=True, + ) + + if ( + self.from_statement_ctx is not None + and self.from_statement_ctx.compile_options._is_star + ): + raise sa_exc.CompileError( + "Can't use RETURNING * with bulk ORM INSERT. " + "Please use a different INSERT form, such as INSERT..VALUES " + "or INSERT with a Core Connection" + ) + + self.statement = statement + + +@CompileState.plugin_for("orm", "update") +class BulkORMUpdate(BulkUDCompileState, UpdateDMLState): + @classmethod + def create_for_statement(cls, statement, compiler, **kw): + self = cls.__new__(cls) + + dml_strategy = statement._annotations.get( + "dml_strategy", "unspecified" + ) + + toplevel = not compiler.stack + + if toplevel and dml_strategy == "bulk": + self._setup_for_bulk_update(statement, compiler) + elif ( + dml_strategy == "core_only" + or dml_strategy == "unspecified" + and "parententity" not in statement.table._annotations + ): + UpdateDMLState.__init__(self, statement, compiler, **kw) + elif not toplevel or dml_strategy in ("orm", "unspecified"): + self._setup_for_orm_update(statement, compiler) + + return self + + def _setup_for_orm_update(self, statement, compiler, **kw): + orm_level_statement = statement + + toplevel = not compiler.stack + + ext_info = statement.table._annotations["parententity"] + + self.mapper = mapper = ext_info.mapper + + self._resolved_values = self._get_resolved_values(mapper, statement) + + self._init_global_attributes( + statement, + compiler, + toplevel=toplevel, + process_criteria_for_toplevel=toplevel, + ) + + if statement._values: + self._resolved_values = dict(self._resolved_values) + + new_stmt = statement._clone() + + if new_stmt.table._annotations["parententity"] is mapper: + new_stmt.table = mapper.local_table + + # note if the statement has _multi_values, these + # are passed through to the new statement, which will then raise + # InvalidRequestError because UPDATE doesn't support multi_values + # right now. + if statement._ordered_values: + new_stmt._ordered_values = self._resolved_values + elif statement._values: + new_stmt._values = self._resolved_values + + new_crit = self._adjust_for_extra_criteria( + self.global_attributes, mapper + ) + if new_crit: + new_stmt = new_stmt.where(*new_crit) + + # if we are against a lambda statement we might not be the + # topmost object that received per-execute annotations + + # do this first as we need to determine if there is + # UPDATE..FROM + + UpdateDMLState.__init__(self, new_stmt, compiler, **kw) + + use_supplemental_cols = False + + if not toplevel: + synchronize_session = None + else: + synchronize_session = compiler._annotations.get( + "synchronize_session", None + ) + can_use_returning = compiler._annotations.get( + "can_use_returning", None + ) + if can_use_returning is not False: + # even though pre_exec has determined basic + # can_use_returning for the dialect, if we are to use + # RETURNING we need to run can_use_returning() at this level + # unconditionally because is_delete_using was not known + # at the pre_exec level + can_use_returning = ( + synchronize_session == "fetch" + and self.can_use_returning( + compiler.dialect, mapper, is_multitable=self.is_multitable + ) + ) + + if synchronize_session == "fetch" and can_use_returning: + use_supplemental_cols = True + + # NOTE: we might want to RETURNING the actual columns to be + # synchronized also. however this is complicated and difficult + # to align against the behavior of "evaluate". Additionally, + # in a large number (if not the majority) of cases, we have the + # "evaluate" answer, usually a fixed value, in memory already and + # there's no need to re-fetch the same value + # over and over again. so perhaps if it could be RETURNING just + # the elements that were based on a SQL expression and not + # a constant. For now it doesn't quite seem worth it + new_stmt = new_stmt.return_defaults(*new_stmt.table.primary_key) + + if toplevel: + new_stmt = self._setup_orm_returning( + compiler, + orm_level_statement, + new_stmt, + dml_mapper=mapper, + use_supplemental_cols=use_supplemental_cols, + ) + + self.statement = new_stmt + + def _setup_for_bulk_update(self, statement, compiler, **kw): + """establish an UPDATE statement within the context of + bulk insert. + + This method will be within the "conn.execute()" call that is invoked + by persistence._emit_update_statement(). + + """ + statement = cast(dml.Update, statement) + an = statement._annotations + + emit_update_table, _ = ( + an["_emit_update_table"], + an["_emit_update_mapper"], + ) + + statement = statement._clone() + statement.table = emit_update_table + + UpdateDMLState.__init__(self, statement, compiler, **kw) + + if self._ordered_values: + raise sa_exc.InvalidRequestError( + "bulk ORM UPDATE does not support ordered_values() for " + "custom UPDATE statements with bulk parameter sets. Use a " + "non-bulk UPDATE statement or use values()." + ) + + if self._dict_parameters: + self._dict_parameters = { + col: val + for col, val in self._dict_parameters.items() + if col.table is emit_update_table + } + self.statement = statement + + @classmethod + def orm_execute_statement( + cls, + session: Session, + statement: dml.Update, + params: _CoreAnyExecuteParams, + execution_options: OrmExecuteOptionsParameter, + bind_arguments: _BindArguments, + conn: Connection, + ) -> _result.Result: + update_options = execution_options.get( + "_sa_orm_update_options", cls.default_update_options + ) + + if update_options._dml_strategy not in ( + "orm", + "auto", + "bulk", + "core_only", + ): + raise sa_exc.ArgumentError( + "Valid strategies for ORM UPDATE strategy " + "are 'orm', 'auto', 'bulk', 'core_only'" + ) + + result: _result.Result[Any] + + if update_options._dml_strategy == "bulk": + enable_check_rowcount = not statement._where_criteria + + assert update_options._synchronize_session != "fetch" + + if ( + statement._where_criteria + and update_options._synchronize_session == "evaluate" + ): + raise sa_exc.InvalidRequestError( + "bulk synchronize of persistent objects not supported " + "when using bulk update with additional WHERE " + "criteria right now. add synchronize_session=None " + "execution option to bypass synchronize of persistent " + "objects." + ) + mapper = update_options._subject_mapper + assert mapper is not None + assert session._transaction is not None + result = _bulk_update( + mapper, + cast( + "Iterable[Dict[str, Any]]", + [params] if isinstance(params, dict) else params, + ), + session._transaction, + isstates=False, + update_changed_only=False, + use_orm_update_stmt=statement, + enable_check_rowcount=enable_check_rowcount, + ) + return cls.orm_setup_cursor_result( + session, + statement, + params, + execution_options, + bind_arguments, + result, + ) + else: + return super().orm_execute_statement( + session, + statement, + params, + execution_options, + bind_arguments, + conn, + ) + + @classmethod + def can_use_returning( + cls, + dialect: Dialect, + mapper: Mapper[Any], + *, + is_multitable: bool = False, + is_update_from: bool = False, + is_delete_using: bool = False, + is_executemany: bool = False, + ) -> bool: + # normal answer for "should we use RETURNING" at all. + normal_answer = ( + dialect.update_returning and mapper.local_table.implicit_returning + ) + if not normal_answer: + return False + + if is_executemany: + return dialect.update_executemany_returning + + # these workarounds are currently hypothetical for UPDATE, + # unlike DELETE where they impact MariaDB + if is_update_from: + return dialect.update_returning_multifrom + + elif is_multitable and not dialect.update_returning_multifrom: + raise sa_exc.CompileError( + f'Dialect "{dialect.name}" does not support RETURNING ' + "with UPDATE..FROM; for synchronize_session='fetch', " + "please add the additional execution option " + "'is_update_from=True' to the statement to indicate that " + "a separate SELECT should be used for this backend." + ) + + return True + + @classmethod + def _do_post_synchronize_bulk_evaluate( + cls, session, params, result, update_options + ): + if not params: + return + + mapper = update_options._subject_mapper + pk_keys = [prop.key for prop in mapper._identity_key_props] + + identity_map = session.identity_map + + for param in params: + identity_key = mapper.identity_key_from_primary_key( + (param[key] for key in pk_keys), + update_options._identity_token, + ) + state = identity_map.fast_get_state(identity_key) + if not state: + continue + + evaluated_keys = set(param).difference(pk_keys) + + dict_ = state.dict + # only evaluate unmodified attributes + to_evaluate = state.unmodified.intersection(evaluated_keys) + for key in to_evaluate: + if key in dict_: + dict_[key] = param[key] + + state.manager.dispatch.refresh(state, None, to_evaluate) + + state._commit(dict_, list(to_evaluate)) + + # attributes that were formerly modified instead get expired. + # this only gets hit if the session had pending changes + # and autoflush were set to False. + to_expire = evaluated_keys.intersection(dict_).difference( + to_evaluate + ) + if to_expire: + state._expire_attributes(dict_, to_expire) + + @classmethod + def _do_post_synchronize_evaluate( + cls, session, statement, result, update_options + ): + matched_objects = cls._get_matched_objects_on_criteria( + update_options, + session.identity_map.all_states(), + ) + + cls._apply_update_set_values_to_objects( + session, + update_options, + statement, + [(obj, state, dict_) for obj, state, dict_, _ in matched_objects], + ) + + @classmethod + def _do_post_synchronize_fetch( + cls, session, statement, result, update_options + ): + target_mapper = update_options._subject_mapper + + returned_defaults_rows = result.returned_defaults_rows + if returned_defaults_rows: + pk_rows = cls._interpret_returning_rows( + target_mapper, returned_defaults_rows + ) + + matched_rows = [ + tuple(row) + (update_options._identity_token,) + for row in pk_rows + ] + else: + matched_rows = update_options._matched_rows + + objs = [ + session.identity_map[identity_key] + for identity_key in [ + target_mapper.identity_key_from_primary_key( + list(primary_key), + identity_token=identity_token, + ) + for primary_key, identity_token in [ + (row[0:-1], row[-1]) for row in matched_rows + ] + if update_options._identity_token is None + or identity_token == update_options._identity_token + ] + if identity_key in session.identity_map + ] + + if not objs: + return + + cls._apply_update_set_values_to_objects( + session, + update_options, + statement, + [ + ( + obj, + attributes.instance_state(obj), + attributes.instance_dict(obj), + ) + for obj in objs + ], + ) + + @classmethod + def _apply_update_set_values_to_objects( + cls, session, update_options, statement, matched_objects + ): + """apply values to objects derived from an update statement, e.g. + UPDATE..SET + + """ + mapper = update_options._subject_mapper + target_cls = mapper.class_ + evaluator_compiler = evaluator._EvaluatorCompiler(target_cls) + resolved_values = cls._get_resolved_values(mapper, statement) + resolved_keys_as_propnames = cls._resolved_keys_as_propnames( + mapper, resolved_values + ) + value_evaluators = {} + for key, value in resolved_keys_as_propnames: + try: + _evaluator = evaluator_compiler.process( + coercions.expect(roles.ExpressionElementRole, value) + ) + except evaluator.UnevaluatableError: + pass + else: + value_evaluators[key] = _evaluator + + evaluated_keys = list(value_evaluators.keys()) + attrib = {k for k, v in resolved_keys_as_propnames} + + states = set() + for obj, state, dict_ in matched_objects: + to_evaluate = state.unmodified.intersection(evaluated_keys) + + for key in to_evaluate: + if key in dict_: + # only run eval for attributes that are present. + dict_[key] = value_evaluators[key](obj) + + state.manager.dispatch.refresh(state, None, to_evaluate) + + state._commit(dict_, list(to_evaluate)) + + # attributes that were formerly modified instead get expired. + # this only gets hit if the session had pending changes + # and autoflush were set to False. + to_expire = attrib.intersection(dict_).difference(to_evaluate) + if to_expire: + state._expire_attributes(dict_, to_expire) + + states.add(state) + session._register_altered(states) + + +@CompileState.plugin_for("orm", "delete") +class BulkORMDelete(BulkUDCompileState, DeleteDMLState): + @classmethod + def create_for_statement(cls, statement, compiler, **kw): + self = cls.__new__(cls) + + dml_strategy = statement._annotations.get( + "dml_strategy", "unspecified" + ) + + if ( + dml_strategy == "core_only" + or dml_strategy == "unspecified" + and "parententity" not in statement.table._annotations + ): + DeleteDMLState.__init__(self, statement, compiler, **kw) + return self + + toplevel = not compiler.stack + + orm_level_statement = statement + + ext_info = statement.table._annotations["parententity"] + self.mapper = mapper = ext_info.mapper + + self._init_global_attributes( + statement, + compiler, + toplevel=toplevel, + process_criteria_for_toplevel=toplevel, + ) + + new_stmt = statement._clone() + + if new_stmt.table._annotations["parententity"] is mapper: + new_stmt.table = mapper.local_table + + new_crit = cls._adjust_for_extra_criteria( + self.global_attributes, mapper + ) + if new_crit: + new_stmt = new_stmt.where(*new_crit) + + # do this first as we need to determine if there is + # DELETE..FROM + DeleteDMLState.__init__(self, new_stmt, compiler, **kw) + + use_supplemental_cols = False + + if not toplevel: + synchronize_session = None + else: + synchronize_session = compiler._annotations.get( + "synchronize_session", None + ) + can_use_returning = compiler._annotations.get( + "can_use_returning", None + ) + if can_use_returning is not False: + # even though pre_exec has determined basic + # can_use_returning for the dialect, if we are to use + # RETURNING we need to run can_use_returning() at this level + # unconditionally because is_delete_using was not known + # at the pre_exec level + can_use_returning = ( + synchronize_session == "fetch" + and self.can_use_returning( + compiler.dialect, + mapper, + is_multitable=self.is_multitable, + is_delete_using=compiler._annotations.get( + "is_delete_using", False + ), + ) + ) + + if can_use_returning: + use_supplemental_cols = True + + new_stmt = new_stmt.return_defaults(*new_stmt.table.primary_key) + + if toplevel: + new_stmt = self._setup_orm_returning( + compiler, + orm_level_statement, + new_stmt, + dml_mapper=mapper, + use_supplemental_cols=use_supplemental_cols, + ) + + self.statement = new_stmt + + return self + + @classmethod + def orm_execute_statement( + cls, + session: Session, + statement: dml.Delete, + params: _CoreAnyExecuteParams, + execution_options: OrmExecuteOptionsParameter, + bind_arguments: _BindArguments, + conn: Connection, + ) -> _result.Result: + update_options = execution_options.get( + "_sa_orm_update_options", cls.default_update_options + ) + + if update_options._dml_strategy == "bulk": + raise sa_exc.InvalidRequestError( + "Bulk ORM DELETE not supported right now. " + "Statement may be invoked at the " + "Core level using " + "session.connection().execute(stmt, parameters)" + ) + + if update_options._dml_strategy not in ("orm", "auto", "core_only"): + raise sa_exc.ArgumentError( + "Valid strategies for ORM DELETE strategy are 'orm', 'auto', " + "'core_only'" + ) + + return super().orm_execute_statement( + session, statement, params, execution_options, bind_arguments, conn + ) + + @classmethod + def can_use_returning( + cls, + dialect: Dialect, + mapper: Mapper[Any], + *, + is_multitable: bool = False, + is_update_from: bool = False, + is_delete_using: bool = False, + is_executemany: bool = False, + ) -> bool: + # normal answer for "should we use RETURNING" at all. + normal_answer = ( + dialect.delete_returning and mapper.local_table.implicit_returning + ) + if not normal_answer: + return False + + # now get into special workarounds because MariaDB supports + # DELETE...RETURNING but not DELETE...USING...RETURNING. + if is_delete_using: + # is_delete_using hint was passed. use + # additional dialect feature (True for PG, False for MariaDB) + return dialect.delete_returning_multifrom + + elif is_multitable and not dialect.delete_returning_multifrom: + # is_delete_using hint was not passed, but we determined + # at compile time that this is in fact a DELETE..USING. + # it's too late to continue since we did not pre-SELECT. + # raise that we need that hint up front. + + raise sa_exc.CompileError( + f'Dialect "{dialect.name}" does not support RETURNING ' + "with DELETE..USING; for synchronize_session='fetch', " + "please add the additional execution option " + "'is_delete_using=True' to the statement to indicate that " + "a separate SELECT should be used for this backend." + ) + + return True + + @classmethod + def _do_post_synchronize_evaluate( + cls, session, statement, result, update_options + ): + matched_objects = cls._get_matched_objects_on_criteria( + update_options, + session.identity_map.all_states(), + ) + + to_delete = [] + + for _, state, dict_, is_partially_expired in matched_objects: + if is_partially_expired: + state._expire(dict_, session.identity_map._modified) + else: + to_delete.append(state) + + if to_delete: + session._remove_newly_deleted(to_delete) + + @classmethod + def _do_post_synchronize_fetch( + cls, session, statement, result, update_options + ): + target_mapper = update_options._subject_mapper + + returned_defaults_rows = result.returned_defaults_rows + + if returned_defaults_rows: + pk_rows = cls._interpret_returning_rows( + target_mapper, returned_defaults_rows + ) + + matched_rows = [ + tuple(row) + (update_options._identity_token,) + for row in pk_rows + ] + else: + matched_rows = update_options._matched_rows + + for row in matched_rows: + primary_key = row[0:-1] + identity_token = row[-1] + + # TODO: inline this and call remove_newly_deleted + # once + identity_key = target_mapper.identity_key_from_primary_key( + list(primary_key), + identity_token=identity_token, + ) + if identity_key in session.identity_map: + session._remove_newly_deleted( + [ + attributes.instance_state( + session.identity_map[identity_key] + ) + ] + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py new file mode 100644 index 00000000..382d6aef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py @@ -0,0 +1,571 @@ +# orm/clsregistry.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Routines to handle the string class registry used by declarative. + +This system allows specification of classes and expressions used in +:func:`_orm.relationship` using strings. + +""" + +from __future__ import annotations + +import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import interfaces +from .descriptor_props import SynonymProperty +from .properties import ColumnProperty +from .util import class_mapper +from .. import exc +from .. import inspection +from .. import util +from ..sql.schema import _get_table_key +from ..util.typing import CallableReference + +if TYPE_CHECKING: + from .relationships import RelationshipProperty + from ..sql.schema import MetaData + from ..sql.schema import Table + +_T = TypeVar("_T", bound=Any) + +_ClsRegistryType = MutableMapping[str, Union[type, "ClsRegistryToken"]] + +# strong references to registries which we place in +# the _decl_class_registry, which is usually weak referencing. +# the internal registries here link to classes with weakrefs and remove +# themselves when all references to contained classes are removed. +_registries: Set[ClsRegistryToken] = set() + + +def add_class( + classname: str, cls: Type[_T], decl_class_registry: _ClsRegistryType +) -> None: + """Add a class to the _decl_class_registry associated with the + given declarative class. + + """ + if classname in decl_class_registry: + # class already exists. + existing = decl_class_registry[classname] + if not isinstance(existing, _MultipleClassMarker): + existing = decl_class_registry[classname] = _MultipleClassMarker( + [cls, cast("Type[Any]", existing)] + ) + else: + decl_class_registry[classname] = cls + + try: + root_module = cast( + _ModuleMarker, decl_class_registry["_sa_module_registry"] + ) + except KeyError: + decl_class_registry["_sa_module_registry"] = root_module = ( + _ModuleMarker("_sa_module_registry", None) + ) + + tokens = cls.__module__.split(".") + + # build up a tree like this: + # modulename: myapp.snacks.nuts + # + # myapp->snack->nuts->(classes) + # snack->nuts->(classes) + # nuts->(classes) + # + # this allows partial token paths to be used. + while tokens: + token = tokens.pop(0) + module = root_module.get_module(token) + for token in tokens: + module = module.get_module(token) + + try: + module.add_class(classname, cls) + except AttributeError as ae: + if not isinstance(module, _ModuleMarker): + raise exc.InvalidRequestError( + f'name "{classname}" matches both a ' + "class name and a module name" + ) from ae + else: + raise + + +def remove_class( + classname: str, cls: Type[Any], decl_class_registry: _ClsRegistryType +) -> None: + if classname in decl_class_registry: + existing = decl_class_registry[classname] + if isinstance(existing, _MultipleClassMarker): + existing.remove_item(cls) + else: + del decl_class_registry[classname] + + try: + root_module = cast( + _ModuleMarker, decl_class_registry["_sa_module_registry"] + ) + except KeyError: + return + + tokens = cls.__module__.split(".") + + while tokens: + token = tokens.pop(0) + module = root_module.get_module(token) + for token in tokens: + module = module.get_module(token) + try: + module.remove_class(classname, cls) + except AttributeError: + if not isinstance(module, _ModuleMarker): + pass + else: + raise + + +def _key_is_empty( + key: str, + decl_class_registry: _ClsRegistryType, + test: Callable[[Any], bool], +) -> bool: + """test if a key is empty of a certain object. + + used for unit tests against the registry to see if garbage collection + is working. + + "test" is a callable that will be passed an object should return True + if the given object is the one we were looking for. + + We can't pass the actual object itself b.c. this is for testing garbage + collection; the caller will have to have removed references to the + object itself. + + """ + if key not in decl_class_registry: + return True + + thing = decl_class_registry[key] + if isinstance(thing, _MultipleClassMarker): + for sub_thing in thing.contents: + if test(sub_thing): + return False + else: + raise NotImplementedError("unknown codepath") + else: + return not test(thing) + + +class ClsRegistryToken: + """an object that can be in the registry._class_registry as a value.""" + + __slots__ = () + + +class _MultipleClassMarker(ClsRegistryToken): + """refers to multiple classes of the same name + within _decl_class_registry. + + """ + + __slots__ = "on_remove", "contents", "__weakref__" + + contents: Set[weakref.ref[Type[Any]]] + on_remove: CallableReference[Optional[Callable[[], None]]] + + def __init__( + self, + classes: Iterable[Type[Any]], + on_remove: Optional[Callable[[], None]] = None, + ): + self.on_remove = on_remove + self.contents = { + weakref.ref(item, self._remove_item) for item in classes + } + _registries.add(self) + + def remove_item(self, cls: Type[Any]) -> None: + self._remove_item(weakref.ref(cls)) + + def __iter__(self) -> Generator[Optional[Type[Any]], None, None]: + return (ref() for ref in self.contents) + + def attempt_get(self, path: List[str], key: str) -> Type[Any]: + if len(self.contents) > 1: + raise exc.InvalidRequestError( + 'Multiple classes found for path "%s" ' + "in the registry of this declarative " + "base. Please use a fully module-qualified path." + % (".".join(path + [key])) + ) + else: + ref = list(self.contents)[0] + cls = ref() + if cls is None: + raise NameError(key) + return cls + + def _remove_item(self, ref: weakref.ref[Type[Any]]) -> None: + self.contents.discard(ref) + if not self.contents: + _registries.discard(self) + if self.on_remove: + self.on_remove() + + def add_item(self, item: Type[Any]) -> None: + # protect against class registration race condition against + # asynchronous garbage collection calling _remove_item, + # [ticket:3208] and [ticket:10782] + modules = { + cls.__module__ + for cls in [ref() for ref in list(self.contents)] + if cls is not None + } + if item.__module__ in modules: + util.warn( + "This declarative base already contains a class with the " + "same class name and module name as %s.%s, and will " + "be replaced in the string-lookup table." + % (item.__module__, item.__name__) + ) + self.contents.add(weakref.ref(item, self._remove_item)) + + +class _ModuleMarker(ClsRegistryToken): + """Refers to a module name within + _decl_class_registry. + + """ + + __slots__ = "parent", "name", "contents", "mod_ns", "path", "__weakref__" + + parent: Optional[_ModuleMarker] + contents: Dict[str, Union[_ModuleMarker, _MultipleClassMarker]] + mod_ns: _ModNS + path: List[str] + + def __init__(self, name: str, parent: Optional[_ModuleMarker]): + self.parent = parent + self.name = name + self.contents = {} + self.mod_ns = _ModNS(self) + if self.parent: + self.path = self.parent.path + [self.name] + else: + self.path = [] + _registries.add(self) + + def __contains__(self, name: str) -> bool: + return name in self.contents + + def __getitem__(self, name: str) -> ClsRegistryToken: + return self.contents[name] + + def _remove_item(self, name: str) -> None: + self.contents.pop(name, None) + if not self.contents: + if self.parent is not None: + self.parent._remove_item(self.name) + _registries.discard(self) + + def resolve_attr(self, key: str) -> Union[_ModNS, Type[Any]]: + return self.mod_ns.__getattr__(key) + + def get_module(self, name: str) -> _ModuleMarker: + if name not in self.contents: + marker = _ModuleMarker(name, self) + self.contents[name] = marker + else: + marker = cast(_ModuleMarker, self.contents[name]) + return marker + + def add_class(self, name: str, cls: Type[Any]) -> None: + if name in self.contents: + existing = cast(_MultipleClassMarker, self.contents[name]) + try: + existing.add_item(cls) + except AttributeError as ae: + if not isinstance(existing, _MultipleClassMarker): + raise exc.InvalidRequestError( + f'name "{name}" matches both a ' + "class name and a module name" + ) from ae + else: + raise + else: + existing = self.contents[name] = _MultipleClassMarker( + [cls], on_remove=lambda: self._remove_item(name) + ) + + def remove_class(self, name: str, cls: Type[Any]) -> None: + if name in self.contents: + existing = cast(_MultipleClassMarker, self.contents[name]) + existing.remove_item(cls) + + +class _ModNS: + __slots__ = ("__parent",) + + __parent: _ModuleMarker + + def __init__(self, parent: _ModuleMarker): + self.__parent = parent + + def __getattr__(self, key: str) -> Union[_ModNS, Type[Any]]: + try: + value = self.__parent.contents[key] + except KeyError: + pass + else: + if value is not None: + if isinstance(value, _ModuleMarker): + return value.mod_ns + else: + assert isinstance(value, _MultipleClassMarker) + return value.attempt_get(self.__parent.path, key) + raise NameError( + "Module %r has no mapped classes " + "registered under the name %r" % (self.__parent.name, key) + ) + + +class _GetColumns: + __slots__ = ("cls",) + + cls: Type[Any] + + def __init__(self, cls: Type[Any]): + self.cls = cls + + def __getattr__(self, key: str) -> Any: + mp = class_mapper(self.cls, configure=False) + if mp: + if key not in mp.all_orm_descriptors: + raise AttributeError( + "Class %r does not have a mapped column named %r" + % (self.cls, key) + ) + + desc = mp.all_orm_descriptors[key] + if desc.extension_type is interfaces.NotExtension.NOT_EXTENSION: + assert isinstance(desc, attributes.QueryableAttribute) + prop = desc.property + if isinstance(prop, SynonymProperty): + key = prop.name + elif not isinstance(prop, ColumnProperty): + raise exc.InvalidRequestError( + "Property %r is not an instance of" + " ColumnProperty (i.e. does not correspond" + " directly to a Column)." % key + ) + return getattr(self.cls, key) + + +inspection._inspects(_GetColumns)( + lambda target: inspection.inspect(target.cls) +) + + +class _GetTable: + __slots__ = "key", "metadata" + + key: str + metadata: MetaData + + def __init__(self, key: str, metadata: MetaData): + self.key = key + self.metadata = metadata + + def __getattr__(self, key: str) -> Table: + return self.metadata.tables[_get_table_key(key, self.key)] + + +def _determine_container(key: str, value: Any) -> _GetColumns: + if isinstance(value, _MultipleClassMarker): + value = value.attempt_get([], key) + return _GetColumns(value) + + +class _class_resolver: + __slots__ = ( + "cls", + "prop", + "arg", + "fallback", + "_dict", + "_resolvers", + "favor_tables", + ) + + cls: Type[Any] + prop: RelationshipProperty[Any] + fallback: Mapping[str, Any] + arg: str + favor_tables: bool + _resolvers: Tuple[Callable[[str], Any], ...] + + def __init__( + self, + cls: Type[Any], + prop: RelationshipProperty[Any], + fallback: Mapping[str, Any], + arg: str, + favor_tables: bool = False, + ): + self.cls = cls + self.prop = prop + self.arg = arg + self.fallback = fallback + self._dict = util.PopulateDict(self._access_cls) + self._resolvers = () + self.favor_tables = favor_tables + + def _access_cls(self, key: str) -> Any: + cls = self.cls + + manager = attributes.manager_of_class(cls) + decl_base = manager.registry + assert decl_base is not None + decl_class_registry = decl_base._class_registry + metadata = decl_base.metadata + + if self.favor_tables: + if key in metadata.tables: + return metadata.tables[key] + elif key in metadata._schemas: + return _GetTable(key, getattr(cls, "metadata", metadata)) + + if key in decl_class_registry: + return _determine_container(key, decl_class_registry[key]) + + if not self.favor_tables: + if key in metadata.tables: + return metadata.tables[key] + elif key in metadata._schemas: + return _GetTable(key, getattr(cls, "metadata", metadata)) + + if "_sa_module_registry" in decl_class_registry and key in cast( + _ModuleMarker, decl_class_registry["_sa_module_registry"] + ): + registry = cast( + _ModuleMarker, decl_class_registry["_sa_module_registry"] + ) + return registry.resolve_attr(key) + elif self._resolvers: + for resolv in self._resolvers: + value = resolv(key) + if value is not None: + return value + + return self.fallback[key] + + def _raise_for_name(self, name: str, err: Exception) -> NoReturn: + generic_match = re.match(r"(.+)\[(.+)\]", name) + + if generic_match: + clsarg = generic_match.group(2).strip("'") + raise exc.InvalidRequestError( + f"When initializing mapper {self.prop.parent}, " + f'expression "relationship({self.arg!r})" seems to be ' + "using a generic class as the argument to relationship(); " + "please state the generic argument " + "using an annotation, e.g. " + f'"{self.prop.key}: Mapped[{generic_match.group(1)}' + f"['{clsarg}']] = relationship()\"" + ) from err + else: + raise exc.InvalidRequestError( + "When initializing mapper %s, expression %r failed to " + "locate a name (%r). If this is a class name, consider " + "adding this relationship() to the %r class after " + "both dependent classes have been defined." + % (self.prop.parent, self.arg, name, self.cls) + ) from err + + def _resolve_name(self) -> Union[Table, Type[Any], _ModNS]: + name = self.arg + d = self._dict + rval = None + try: + for token in name.split("."): + if rval is None: + rval = d[token] + else: + rval = getattr(rval, token) + except KeyError as err: + self._raise_for_name(name, err) + except NameError as n: + self._raise_for_name(n.args[0], n) + else: + if isinstance(rval, _GetColumns): + return rval.cls + else: + if TYPE_CHECKING: + assert isinstance(rval, (type, Table, _ModNS)) + return rval + + def __call__(self) -> Any: + try: + x = eval(self.arg, globals(), self._dict) + + if isinstance(x, _GetColumns): + return x.cls + else: + return x + except NameError as n: + self._raise_for_name(n.args[0], n) + + +_fallback_dict: Mapping[str, Any] = None # type: ignore + + +def _resolver(cls: Type[Any], prop: RelationshipProperty[Any]) -> Tuple[ + Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]], + Callable[[str, bool], _class_resolver], +]: + global _fallback_dict + + if _fallback_dict is None: + import sqlalchemy + from . import foreign + from . import remote + + _fallback_dict = util.immutabledict(sqlalchemy.__dict__).union( + {"foreign": foreign, "remote": remote} + ) + + def resolve_arg(arg: str, favor_tables: bool = False) -> _class_resolver: + return _class_resolver( + cls, prop, _fallback_dict, arg, favor_tables=favor_tables + ) + + def resolve_name( + arg: str, + ) -> Callable[[], Union[Type[Any], Table, _ModNS]]: + return _class_resolver(cls, prop, _fallback_dict, arg)._resolve_name + + return resolve_name, resolve_arg diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py new file mode 100644 index 00000000..d713abb0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py @@ -0,0 +1,1620 @@ +# orm/collections.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Support for collections of mapped entities. + +The collections package supplies the machinery used to inform the ORM of +collection membership changes. An instrumentation via decoration approach is +used, allowing arbitrary types (including built-ins) to be used as entity +collections without requiring inheritance from a base class. + +Instrumentation decoration relays membership change events to the +:class:`.CollectionAttributeImpl` that is currently managing the collection. +The decorators observe function call arguments and return values, tracking +entities entering or leaving the collection. Two decorator approaches are +provided. One is a bundle of generic decorators that map function arguments +and return values to events:: + + from sqlalchemy.orm.collections import collection + class MyClass: + # ... + + @collection.adds(1) + def store(self, item): + self.data.append(item) + + @collection.removes_return() + def pop(self): + return self.data.pop() + + +The second approach is a bundle of targeted decorators that wrap appropriate +append and remove notifiers around the mutation methods present in the +standard Python ``list``, ``set`` and ``dict`` interfaces. These could be +specified in terms of generic decorator recipes, but are instead hand-tooled +for increased efficiency. The targeted decorators occasionally implement +adapter-like behavior, such as mapping bulk-set methods (``extend``, +``update``, ``__setslice__``, etc.) into the series of atomic mutation events +that the ORM requires. + +The targeted decorators are used internally for automatic instrumentation of +entity collection classes. Every collection class goes through a +transformation process roughly like so: + +1. If the class is a built-in, substitute a trivial sub-class +2. Is this class already instrumented? +3. Add in generic decorators +4. Sniff out the collection interface through duck-typing +5. Add targeted decoration to any undecorated interface method + +This process modifies the class at runtime, decorating methods and adding some +bookkeeping properties. This isn't possible (or desirable) for built-in +classes like ``list``, so trivial sub-classes are substituted to hold +decoration:: + + class InstrumentedList(list): + pass + +Collection classes can be specified in ``relationship(collection_class=)`` as +types or a function that returns an instance. Collection classes are +inspected and instrumented during the mapper compilation phase. The +collection_class callable will be executed once to produce a specimen +instance, and the type of that specimen will be instrumented. Functions that +return built-in types like ``lists`` will be adapted to produce instrumented +instances. + +When extending a known type like ``list``, additional decorations are not +generally not needed. Odds are, the extension method will delegate to a +method that's already instrumented. For example:: + + class QueueIsh(list): + def push(self, item): + self.append(item) + def shift(self): + return self.pop(0) + +There's no need to decorate these methods. ``append`` and ``pop`` are already +instrumented as part of the ``list`` interface. Decorating them would fire +duplicate events, which should be avoided. + +The targeted decoration tries not to rely on other methods in the underlying +collection class, but some are unavoidable. Many depend on 'read' methods +being present to properly instrument a 'write', for example, ``__setitem__`` +needs ``__getitem__``. "Bulk" methods like ``update`` and ``extend`` may also +reimplemented in terms of atomic appends and removes, so the ``extend`` +decoration will actually perform many ``append`` operations and not call the +underlying method at all. + +Tight control over bulk operation and the firing of events is also possible by +implementing the instrumentation internally in your methods. The basic +instrumentation package works under the general assumption that collection +mutation will not raise unusual exceptions. If you want to closely +orchestrate append and remove events with exception management, internal +instrumentation may be the answer. Within your method, +``collection_adapter(self)`` will retrieve an object that you can use for +explicit control over triggering append and remove events. + +The owning object and :class:`.CollectionAttributeImpl` are also reachable +through the adapter, allowing for some very sophisticated behavior. + +""" +from __future__ import annotations + +import operator +import threading +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import Dict +from typing import Iterable +from typing import List +from typing import NoReturn +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from .base import NO_KEY +from .. import exc as sa_exc +from .. import util +from ..sql.base import NO_ARG +from ..util.compat import inspect_getfullargspec +from ..util.typing import Protocol + +if typing.TYPE_CHECKING: + from .attributes import AttributeEventToken + from .attributes import CollectionAttributeImpl + from .mapped_collection import attribute_keyed_dict + from .mapped_collection import column_keyed_dict + from .mapped_collection import keyfunc_mapping + from .mapped_collection import KeyFuncDict # noqa: F401 + from .state import InstanceState + + +__all__ = [ + "collection", + "collection_adapter", + "keyfunc_mapping", + "column_keyed_dict", + "attribute_keyed_dict", + "KeyFuncDict", + # old names in < 2.0 + "mapped_collection", + "column_mapped_collection", + "attribute_mapped_collection", + "MappedCollection", +] + +__instrumentation_mutex = threading.Lock() + + +_CollectionFactoryType = Callable[[], "_AdaptedCollectionProtocol"] + +_T = TypeVar("_T", bound=Any) +_KT = TypeVar("_KT", bound=Any) +_VT = TypeVar("_VT", bound=Any) +_COL = TypeVar("_COL", bound="Collection[Any]") +_FN = TypeVar("_FN", bound="Callable[..., Any]") + + +class _CollectionConverterProtocol(Protocol): + def __call__(self, collection: _COL) -> _COL: ... + + +class _AdaptedCollectionProtocol(Protocol): + _sa_adapter: CollectionAdapter + _sa_appender: Callable[..., Any] + _sa_remover: Callable[..., Any] + _sa_iterator: Callable[..., Iterable[Any]] + _sa_converter: _CollectionConverterProtocol + + +class collection: + """Decorators for entity collection classes. + + The decorators fall into two groups: annotations and interception recipes. + + The annotating decorators (appender, remover, iterator, converter, + internally_instrumented) indicate the method's purpose and take no + arguments. They are not written with parens:: + + @collection.appender + def append(self, append): ... + + The recipe decorators all require parens, even those that take no + arguments:: + + @collection.adds('entity') + def insert(self, position, entity): ... + + @collection.removes_return() + def popitem(self): ... + + """ + + # Bundled as a class solely for ease of use: packaging, doc strings, + # importability. + + @staticmethod + def appender(fn): + """Tag the method as the collection appender. + + The appender method is called with one positional argument: the value + to append. The method will be automatically decorated with 'adds(1)' + if not already decorated:: + + @collection.appender + def add(self, append): ... + + # or, equivalently + @collection.appender + @collection.adds(1) + def add(self, append): ... + + # for mapping type, an 'append' may kick out a previous value + # that occupies that slot. consider d['a'] = 'foo'- any previous + # value in d['a'] is discarded. + @collection.appender + @collection.replaces(1) + def add(self, entity): + key = some_key_func(entity) + previous = None + if key in self: + previous = self[key] + self[key] = entity + return previous + + If the value to append is not allowed in the collection, you may + raise an exception. Something to remember is that the appender + will be called for each object mapped by a database query. If the + database contains rows that violate your collection semantics, you + will need to get creative to fix the problem, as access via the + collection will not work. + + If the appender method is internally instrumented, you must also + receive the keyword argument '_sa_initiator' and ensure its + promulgation to collection events. + + """ + fn._sa_instrument_role = "appender" + return fn + + @staticmethod + def remover(fn): + """Tag the method as the collection remover. + + The remover method is called with one positional argument: the value + to remove. The method will be automatically decorated with + :meth:`removes_return` if not already decorated:: + + @collection.remover + def zap(self, entity): ... + + # or, equivalently + @collection.remover + @collection.removes_return() + def zap(self, ): ... + + If the value to remove is not present in the collection, you may + raise an exception or return None to ignore the error. + + If the remove method is internally instrumented, you must also + receive the keyword argument '_sa_initiator' and ensure its + promulgation to collection events. + + """ + fn._sa_instrument_role = "remover" + return fn + + @staticmethod + def iterator(fn): + """Tag the method as the collection remover. + + The iterator method is called with no arguments. It is expected to + return an iterator over all collection members:: + + @collection.iterator + def __iter__(self): ... + + """ + fn._sa_instrument_role = "iterator" + return fn + + @staticmethod + def internally_instrumented(fn): + """Tag the method as instrumented. + + This tag will prevent any decoration from being applied to the + method. Use this if you are orchestrating your own calls to + :func:`.collection_adapter` in one of the basic SQLAlchemy + interface methods, or to prevent an automatic ABC method + decoration from wrapping your implementation:: + + # normally an 'extend' method on a list-like class would be + # automatically intercepted and re-implemented in terms of + # SQLAlchemy events and append(). your implementation will + # never be called, unless: + @collection.internally_instrumented + def extend(self, items): ... + + """ + fn._sa_instrumented = True + return fn + + @staticmethod + @util.deprecated( + "1.3", + "The :meth:`.collection.converter` handler is deprecated and will " + "be removed in a future release. Please refer to the " + ":class:`.AttributeEvents.bulk_replace` listener interface in " + "conjunction with the :func:`.event.listen` function.", + ) + def converter(fn): + """Tag the method as the collection converter. + + This optional method will be called when a collection is being + replaced entirely, as in:: + + myobj.acollection = [newvalue1, newvalue2] + + The converter method will receive the object being assigned and should + return an iterable of values suitable for use by the ``appender`` + method. A converter must not assign values or mutate the collection, + its sole job is to adapt the value the user provides into an iterable + of values for the ORM's use. + + The default converter implementation will use duck-typing to do the + conversion. A dict-like collection will be convert into an iterable + of dictionary values, and other types will simply be iterated:: + + @collection.converter + def convert(self, other): ... + + If the duck-typing of the object does not match the type of this + collection, a TypeError is raised. + + Supply an implementation of this method if you want to expand the + range of possible types that can be assigned in bulk or perform + validation on the values about to be assigned. + + """ + fn._sa_instrument_role = "converter" + return fn + + @staticmethod + def adds(arg): + """Mark the method as adding an entity to the collection. + + Adds "add to collection" handling to the method. The decorator + argument indicates which method argument holds the SQLAlchemy-relevant + value. Arguments can be specified positionally (i.e. integer) or by + name:: + + @collection.adds(1) + def push(self, item): ... + + @collection.adds('entity') + def do_stuff(self, thing, entity=None): ... + + """ + + def decorator(fn): + fn._sa_instrument_before = ("fire_append_event", arg) + return fn + + return decorator + + @staticmethod + def replaces(arg): + """Mark the method as replacing an entity in the collection. + + Adds "add to collection" and "remove from collection" handling to + the method. The decorator argument indicates which method argument + holds the SQLAlchemy-relevant value to be added, and return value, if + any will be considered the value to remove. + + Arguments can be specified positionally (i.e. integer) or by name:: + + @collection.replaces(2) + def __setitem__(self, index, item): ... + + """ + + def decorator(fn): + fn._sa_instrument_before = ("fire_append_event", arg) + fn._sa_instrument_after = "fire_remove_event" + return fn + + return decorator + + @staticmethod + def removes(arg): + """Mark the method as removing an entity in the collection. + + Adds "remove from collection" handling to the method. The decorator + argument indicates which method argument holds the SQLAlchemy-relevant + value to be removed. Arguments can be specified positionally (i.e. + integer) or by name:: + + @collection.removes(1) + def zap(self, item): ... + + For methods where the value to remove is not known at call-time, use + collection.removes_return. + + """ + + def decorator(fn): + fn._sa_instrument_before = ("fire_remove_event", arg) + return fn + + return decorator + + @staticmethod + def removes_return(): + """Mark the method as removing an entity in the collection. + + Adds "remove from collection" handling to the method. The return + value of the method, if any, is considered the value to remove. The + method arguments are not inspected:: + + @collection.removes_return() + def pop(self): ... + + For methods where the value to remove is known at call-time, use + collection.remove. + + """ + + def decorator(fn): + fn._sa_instrument_after = "fire_remove_event" + return fn + + return decorator + + +if TYPE_CHECKING: + + def collection_adapter(collection: Collection[Any]) -> CollectionAdapter: + """Fetch the :class:`.CollectionAdapter` for a collection.""" + +else: + collection_adapter = operator.attrgetter("_sa_adapter") + + +class CollectionAdapter: + """Bridges between the ORM and arbitrary Python collections. + + Proxies base-level collection operations (append, remove, iterate) + to the underlying Python collection, and emits add/remove events for + entities entering or leaving the collection. + + The ORM uses :class:`.CollectionAdapter` exclusively for interaction with + entity collections. + + + """ + + __slots__ = ( + "attr", + "_key", + "_data", + "owner_state", + "_converter", + "invalidated", + "empty", + ) + + attr: CollectionAttributeImpl + _key: str + + # this is actually a weakref; see note in constructor + _data: Callable[..., _AdaptedCollectionProtocol] + + owner_state: InstanceState[Any] + _converter: _CollectionConverterProtocol + invalidated: bool + empty: bool + + def __init__( + self, + attr: CollectionAttributeImpl, + owner_state: InstanceState[Any], + data: _AdaptedCollectionProtocol, + ): + self.attr = attr + self._key = attr.key + + # this weakref stays referenced throughout the lifespan of + # CollectionAdapter. so while the weakref can return None, this + # is realistically only during garbage collection of this object, so + # we type this as a callable that returns _AdaptedCollectionProtocol + # in all cases. + self._data = weakref.ref(data) # type: ignore + + self.owner_state = owner_state + data._sa_adapter = self + self._converter = data._sa_converter + self.invalidated = False + self.empty = False + + def _warn_invalidated(self) -> None: + util.warn("This collection has been invalidated.") + + @property + def data(self) -> _AdaptedCollectionProtocol: + "The entity collection being adapted." + return self._data() + + @property + def _referenced_by_owner(self) -> bool: + """return True if the owner state still refers to this collection. + + This will return False within a bulk replace operation, + where this collection is the one being replaced. + + """ + return self.owner_state.dict[self._key] is self._data() + + def bulk_appender(self): + return self._data()._sa_appender + + def append_with_event( + self, item: Any, initiator: Optional[AttributeEventToken] = None + ) -> None: + """Add an entity to the collection, firing mutation events.""" + + self._data()._sa_appender(item, _sa_initiator=initiator) + + def _set_empty(self, user_data): + assert ( + not self.empty + ), "This collection adapter is already in the 'empty' state" + self.empty = True + self.owner_state._empty_collections[self._key] = user_data + + def _reset_empty(self) -> None: + assert ( + self.empty + ), "This collection adapter is not in the 'empty' state" + self.empty = False + self.owner_state.dict[self._key] = ( + self.owner_state._empty_collections.pop(self._key) + ) + + def _refuse_empty(self) -> NoReturn: + raise sa_exc.InvalidRequestError( + "This is a special 'empty' collection which cannot accommodate " + "internal mutation operations" + ) + + def append_without_event(self, item: Any) -> None: + """Add or restore an entity to the collection, firing no events.""" + + if self.empty: + self._refuse_empty() + self._data()._sa_appender(item, _sa_initiator=False) + + def append_multiple_without_event(self, items: Iterable[Any]) -> None: + """Add or restore an entity to the collection, firing no events.""" + if self.empty: + self._refuse_empty() + appender = self._data()._sa_appender + for item in items: + appender(item, _sa_initiator=False) + + def bulk_remover(self): + return self._data()._sa_remover + + def remove_with_event( + self, item: Any, initiator: Optional[AttributeEventToken] = None + ) -> None: + """Remove an entity from the collection, firing mutation events.""" + self._data()._sa_remover(item, _sa_initiator=initiator) + + def remove_without_event(self, item: Any) -> None: + """Remove an entity from the collection, firing no events.""" + if self.empty: + self._refuse_empty() + self._data()._sa_remover(item, _sa_initiator=False) + + def clear_with_event( + self, initiator: Optional[AttributeEventToken] = None + ) -> None: + """Empty the collection, firing a mutation event for each entity.""" + + if self.empty: + self._refuse_empty() + remover = self._data()._sa_remover + for item in list(self): + remover(item, _sa_initiator=initiator) + + def clear_without_event(self) -> None: + """Empty the collection, firing no events.""" + + if self.empty: + self._refuse_empty() + remover = self._data()._sa_remover + for item in list(self): + remover(item, _sa_initiator=False) + + def __iter__(self): + """Iterate over entities in the collection.""" + + return iter(self._data()._sa_iterator()) + + def __len__(self): + """Count entities in the collection.""" + return len(list(self._data()._sa_iterator())) + + def __bool__(self): + return True + + def _fire_append_wo_mutation_event_bulk( + self, items, initiator=None, key=NO_KEY + ): + if not items: + return + + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + + if self.empty: + self._reset_empty() + + for item in items: + self.attr.fire_append_wo_mutation_event( + self.owner_state, + self.owner_state.dict, + item, + initiator, + key, + ) + + def fire_append_wo_mutation_event(self, item, initiator=None, key=NO_KEY): + """Notify that a entity is entering the collection but is already + present. + + + Initiator is a token owned by the InstrumentedAttribute that + initiated the membership mutation, and should be left as None + unless you are passing along an initiator value from a chained + operation. + + .. versionadded:: 1.4.15 + + """ + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + + if self.empty: + self._reset_empty() + + return self.attr.fire_append_wo_mutation_event( + self.owner_state, self.owner_state.dict, item, initiator, key + ) + else: + return item + + def fire_append_event(self, item, initiator=None, key=NO_KEY): + """Notify that a entity has entered the collection. + + Initiator is a token owned by the InstrumentedAttribute that + initiated the membership mutation, and should be left as None + unless you are passing along an initiator value from a chained + operation. + + """ + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + + if self.empty: + self._reset_empty() + + return self.attr.fire_append_event( + self.owner_state, self.owner_state.dict, item, initiator, key + ) + else: + return item + + def _fire_remove_event_bulk(self, items, initiator=None, key=NO_KEY): + if not items: + return + + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + + if self.empty: + self._reset_empty() + + for item in items: + self.attr.fire_remove_event( + self.owner_state, + self.owner_state.dict, + item, + initiator, + key, + ) + + def fire_remove_event(self, item, initiator=None, key=NO_KEY): + """Notify that a entity has been removed from the collection. + + Initiator is the InstrumentedAttribute that initiated the membership + mutation, and should be left as None unless you are passing along + an initiator value from a chained operation. + + """ + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + + if self.empty: + self._reset_empty() + + self.attr.fire_remove_event( + self.owner_state, self.owner_state.dict, item, initiator, key + ) + + def fire_pre_remove_event(self, initiator=None, key=NO_KEY): + """Notify that an entity is about to be removed from the collection. + + Only called if the entity cannot be removed after calling + fire_remove_event(). + + """ + if self.invalidated: + self._warn_invalidated() + self.attr.fire_pre_remove_event( + self.owner_state, + self.owner_state.dict, + initiator=initiator, + key=key, + ) + + def __getstate__(self): + return { + "key": self._key, + "owner_state": self.owner_state, + "owner_cls": self.owner_state.class_, + "data": self.data, + "invalidated": self.invalidated, + "empty": self.empty, + } + + def __setstate__(self, d): + self._key = d["key"] + self.owner_state = d["owner_state"] + + # see note in constructor regarding this type: ignore + self._data = weakref.ref(d["data"]) # type: ignore + + self._converter = d["data"]._sa_converter + d["data"]._sa_adapter = self + self.invalidated = d["invalidated"] + self.attr = getattr(d["owner_cls"], self._key).impl + self.empty = d.get("empty", False) + + +def bulk_replace(values, existing_adapter, new_adapter, initiator=None): + """Load a new collection, firing events based on prior like membership. + + Appends instances in ``values`` onto the ``new_adapter``. Events will be + fired for any instance not present in the ``existing_adapter``. Any + instances in ``existing_adapter`` not present in ``values`` will have + remove events fired upon them. + + :param values: An iterable of collection member instances + + :param existing_adapter: A :class:`.CollectionAdapter` of + instances to be replaced + + :param new_adapter: An empty :class:`.CollectionAdapter` + to load with ``values`` + + + """ + + assert isinstance(values, list) + + idset = util.IdentitySet + existing_idset = idset(existing_adapter or ()) + constants = existing_idset.intersection(values or ()) + additions = idset(values or ()).difference(constants) + removals = existing_idset.difference(constants) + + appender = new_adapter.bulk_appender() + + for member in values or (): + if member in additions: + appender(member, _sa_initiator=initiator) + elif member in constants: + appender(member, _sa_initiator=False) + + if existing_adapter: + existing_adapter._fire_append_wo_mutation_event_bulk( + constants, initiator=initiator + ) + existing_adapter._fire_remove_event_bulk(removals, initiator=initiator) + + +def prepare_instrumentation( + factory: Union[Type[Collection[Any]], _CollectionFactoryType], +) -> _CollectionFactoryType: + """Prepare a callable for future use as a collection class factory. + + Given a collection class factory (either a type or no-arg callable), + return another factory that will produce compatible instances when + called. + + This function is responsible for converting collection_class=list + into the run-time behavior of collection_class=InstrumentedList. + + """ + + impl_factory: _CollectionFactoryType + + # Convert a builtin to 'Instrumented*' + if factory in __canned_instrumentation: + impl_factory = __canned_instrumentation[factory] + else: + impl_factory = cast(_CollectionFactoryType, factory) + + cls: Union[_CollectionFactoryType, Type[Collection[Any]]] + + # Create a specimen + cls = type(impl_factory()) + + # Did factory callable return a builtin? + if cls in __canned_instrumentation: + # if so, just convert. + # in previous major releases, this codepath wasn't working and was + # not covered by tests. prior to that it supplied a "wrapper" + # function that would return the class, though the rationale for this + # case is not known + impl_factory = __canned_instrumentation[cls] + cls = type(impl_factory()) + + # Instrument the class if needed. + if __instrumentation_mutex.acquire(): + try: + if getattr(cls, "_sa_instrumented", None) != id(cls): + _instrument_class(cls) + finally: + __instrumentation_mutex.release() + + return impl_factory + + +def _instrument_class(cls): + """Modify methods in a class and install instrumentation.""" + + # In the normal call flow, a request for any of the 3 basic collection + # types is transformed into one of our trivial subclasses + # (e.g. InstrumentedList). Catch anything else that sneaks in here... + if cls.__module__ == "__builtin__": + raise sa_exc.ArgumentError( + "Can not instrument a built-in type. Use a " + "subclass, even a trivial one." + ) + + roles, methods = _locate_roles_and_methods(cls) + + _setup_canned_roles(cls, roles, methods) + + _assert_required_roles(cls, roles, methods) + + _set_collection_attributes(cls, roles, methods) + + +def _locate_roles_and_methods(cls): + """search for _sa_instrument_role-decorated methods in + method resolution order, assign to roles. + + """ + + roles: Dict[str, str] = {} + methods: Dict[str, Tuple[Optional[str], Optional[int], Optional[str]]] = {} + + for supercls in cls.__mro__: + for name, method in vars(supercls).items(): + if not callable(method): + continue + + # note role declarations + if hasattr(method, "_sa_instrument_role"): + role = method._sa_instrument_role + assert role in ( + "appender", + "remover", + "iterator", + "converter", + ) + roles.setdefault(role, name) + + # transfer instrumentation requests from decorated function + # to the combined queue + before: Optional[Tuple[str, int]] = None + after: Optional[str] = None + + if hasattr(method, "_sa_instrument_before"): + op, argument = method._sa_instrument_before + assert op in ("fire_append_event", "fire_remove_event") + before = op, argument + if hasattr(method, "_sa_instrument_after"): + op = method._sa_instrument_after + assert op in ("fire_append_event", "fire_remove_event") + after = op + if before: + methods[name] = before + (after,) + elif after: + methods[name] = None, None, after + return roles, methods + + +def _setup_canned_roles(cls, roles, methods): + """see if this class has "canned" roles based on a known + collection type (dict, set, list). Apply those roles + as needed to the "roles" dictionary, and also + prepare "decorator" methods + + """ + collection_type = util.duck_type_collection(cls) + if collection_type in __interfaces: + assert collection_type is not None + canned_roles, decorators = __interfaces[collection_type] + for role, name in canned_roles.items(): + roles.setdefault(role, name) + + # apply ABC auto-decoration to methods that need it + for method, decorator in decorators.items(): + fn = getattr(cls, method, None) + if ( + fn + and method not in methods + and not hasattr(fn, "_sa_instrumented") + ): + setattr(cls, method, decorator(fn)) + + +def _assert_required_roles(cls, roles, methods): + """ensure all roles are present, and apply implicit instrumentation if + needed + + """ + if "appender" not in roles or not hasattr(cls, roles["appender"]): + raise sa_exc.ArgumentError( + "Type %s must elect an appender method to be " + "a collection class" % cls.__name__ + ) + elif roles["appender"] not in methods and not hasattr( + getattr(cls, roles["appender"]), "_sa_instrumented" + ): + methods[roles["appender"]] = ("fire_append_event", 1, None) + + if "remover" not in roles or not hasattr(cls, roles["remover"]): + raise sa_exc.ArgumentError( + "Type %s must elect a remover method to be " + "a collection class" % cls.__name__ + ) + elif roles["remover"] not in methods and not hasattr( + getattr(cls, roles["remover"]), "_sa_instrumented" + ): + methods[roles["remover"]] = ("fire_remove_event", 1, None) + + if "iterator" not in roles or not hasattr(cls, roles["iterator"]): + raise sa_exc.ArgumentError( + "Type %s must elect an iterator method to be " + "a collection class" % cls.__name__ + ) + + +def _set_collection_attributes(cls, roles, methods): + """apply ad-hoc instrumentation from decorators, class-level defaults + and implicit role declarations + + """ + for method_name, (before, argument, after) in methods.items(): + setattr( + cls, + method_name, + _instrument_membership_mutator( + getattr(cls, method_name), before, argument, after + ), + ) + # intern the role map + for role, method_name in roles.items(): + setattr(cls, "_sa_%s" % role, getattr(cls, method_name)) + + cls._sa_adapter = None + + if not hasattr(cls, "_sa_converter"): + cls._sa_converter = None + cls._sa_instrumented = id(cls) + + +def _instrument_membership_mutator(method, before, argument, after): + """Route method args and/or return value through the collection + adapter.""" + # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))' + if before: + fn_args = list( + util.flatten_iterator(inspect_getfullargspec(method)[0]) + ) + if isinstance(argument, int): + pos_arg = argument + named_arg = len(fn_args) > argument and fn_args[argument] or None + else: + if argument in fn_args: + pos_arg = fn_args.index(argument) + else: + pos_arg = None + named_arg = argument + del fn_args + + def wrapper(*args, **kw): + if before: + if pos_arg is None: + if named_arg not in kw: + raise sa_exc.ArgumentError( + "Missing argument %s" % argument + ) + value = kw[named_arg] + else: + if len(args) > pos_arg: + value = args[pos_arg] + elif named_arg in kw: + value = kw[named_arg] + else: + raise sa_exc.ArgumentError( + "Missing argument %s" % argument + ) + + initiator = kw.pop("_sa_initiator", None) + if initiator is False: + executor = None + else: + executor = args[0]._sa_adapter + + if before and executor: + getattr(executor, before)(value, initiator) + + if not after or not executor: + return method(*args, **kw) + else: + res = method(*args, **kw) + if res is not None: + getattr(executor, after)(res, initiator) + return res + + wrapper._sa_instrumented = True # type: ignore[attr-defined] + if hasattr(method, "_sa_instrument_role"): + wrapper._sa_instrument_role = method._sa_instrument_role # type: ignore[attr-defined] # noqa: E501 + wrapper.__name__ = method.__name__ + wrapper.__doc__ = method.__doc__ + return wrapper + + +def __set_wo_mutation(collection, item, _sa_initiator=None): + """Run set wo mutation events. + + The collection is not mutated. + + """ + if _sa_initiator is not False: + executor = collection._sa_adapter + if executor: + executor.fire_append_wo_mutation_event( + item, _sa_initiator, key=None + ) + + +def __set(collection, item, _sa_initiator, key): + """Run set events. + + This event always occurs before the collection is actually mutated. + + """ + + if _sa_initiator is not False: + executor = collection._sa_adapter + if executor: + item = executor.fire_append_event(item, _sa_initiator, key=key) + return item + + +def __del(collection, item, _sa_initiator, key): + """Run del events. + + This event occurs before the collection is actually mutated, *except* + in the case of a pop operation, in which case it occurs afterwards. + For pop operations, the __before_pop hook is called before the + operation occurs. + + """ + if _sa_initiator is not False: + executor = collection._sa_adapter + if executor: + executor.fire_remove_event(item, _sa_initiator, key=key) + + +def __before_pop(collection, _sa_initiator=None): + """An event which occurs on a before a pop() operation occurs.""" + executor = collection._sa_adapter + if executor: + executor.fire_pre_remove_event(_sa_initiator) + + +def _list_decorators() -> Dict[str, Callable[[_FN], _FN]]: + """Tailored instrumentation wrappers for any list-like class.""" + + def _tidy(fn): + fn._sa_instrumented = True + fn.__doc__ = getattr(list, fn.__name__).__doc__ + + def append(fn): + def append(self, item, _sa_initiator=None): + item = __set(self, item, _sa_initiator, NO_KEY) + fn(self, item) + + _tidy(append) + return append + + def remove(fn): + def remove(self, value, _sa_initiator=None): + __del(self, value, _sa_initiator, NO_KEY) + # testlib.pragma exempt:__eq__ + fn(self, value) + + _tidy(remove) + return remove + + def insert(fn): + def insert(self, index, value): + value = __set(self, value, None, index) + fn(self, index, value) + + _tidy(insert) + return insert + + def __setitem__(fn): + def __setitem__(self, index, value): + if not isinstance(index, slice): + existing = self[index] + if existing is not None: + __del(self, existing, None, index) + value = __set(self, value, None, index) + fn(self, index, value) + else: + # slice assignment requires __delitem__, insert, __len__ + step = index.step or 1 + start = index.start or 0 + if start < 0: + start += len(self) + if index.stop is not None: + stop = index.stop + else: + stop = len(self) + if stop < 0: + stop += len(self) + + if step == 1: + if value is self: + return + for i in range(start, stop, step): + if len(self) > start: + del self[start] + + for i, item in enumerate(value): + self.insert(i + start, item) + else: + rng = list(range(start, stop, step)) + if len(value) != len(rng): + raise ValueError( + "attempt to assign sequence of size %s to " + "extended slice of size %s" + % (len(value), len(rng)) + ) + for i, item in zip(rng, value): + self.__setitem__(i, item) + + _tidy(__setitem__) + return __setitem__ + + def __delitem__(fn): + def __delitem__(self, index): + if not isinstance(index, slice): + item = self[index] + __del(self, item, None, index) + fn(self, index) + else: + # slice deletion requires __getslice__ and a slice-groking + # __getitem__ for stepped deletion + # note: not breaking this into atomic dels + for item in self[index]: + __del(self, item, None, index) + fn(self, index) + + _tidy(__delitem__) + return __delitem__ + + def extend(fn): + def extend(self, iterable): + for value in list(iterable): + self.append(value) + + _tidy(extend) + return extend + + def __iadd__(fn): + def __iadd__(self, iterable): + # list.__iadd__ takes any iterable and seems to let TypeError + # raise as-is instead of returning NotImplemented + for value in list(iterable): + self.append(value) + return self + + _tidy(__iadd__) + return __iadd__ + + def pop(fn): + def pop(self, index=-1): + __before_pop(self) + item = fn(self, index) + __del(self, item, None, index) + return item + + _tidy(pop) + return pop + + def clear(fn): + def clear(self, index=-1): + for item in self: + __del(self, item, None, index) + fn(self) + + _tidy(clear) + return clear + + # __imul__ : not wrapping this. all members of the collection are already + # present, so no need to fire appends... wrapping it with an explicit + # decorator is still possible, so events on *= can be had if they're + # desired. hard to imagine a use case for __imul__, though. + + l = locals().copy() + l.pop("_tidy") + return l + + +def _dict_decorators() -> Dict[str, Callable[[_FN], _FN]]: + """Tailored instrumentation wrappers for any dict-like mapping class.""" + + def _tidy(fn): + fn._sa_instrumented = True + fn.__doc__ = getattr(dict, fn.__name__).__doc__ + + def __setitem__(fn): + def __setitem__(self, key, value, _sa_initiator=None): + if key in self: + __del(self, self[key], _sa_initiator, key) + value = __set(self, value, _sa_initiator, key) + fn(self, key, value) + + _tidy(__setitem__) + return __setitem__ + + def __delitem__(fn): + def __delitem__(self, key, _sa_initiator=None): + if key in self: + __del(self, self[key], _sa_initiator, key) + fn(self, key) + + _tidy(__delitem__) + return __delitem__ + + def clear(fn): + def clear(self): + for key in self: + __del(self, self[key], None, key) + fn(self) + + _tidy(clear) + return clear + + def pop(fn): + def pop(self, key, default=NO_ARG): + __before_pop(self) + _to_del = key in self + if default is NO_ARG: + item = fn(self, key) + else: + item = fn(self, key, default) + if _to_del: + __del(self, item, None, key) + return item + + _tidy(pop) + return pop + + def popitem(fn): + def popitem(self): + __before_pop(self) + item = fn(self) + __del(self, item[1], None, 1) + return item + + _tidy(popitem) + return popitem + + def setdefault(fn): + def setdefault(self, key, default=None): + if key not in self: + self.__setitem__(key, default) + return default + else: + value = self.__getitem__(key) + if value is default: + __set_wo_mutation(self, value, None) + + return value + + _tidy(setdefault) + return setdefault + + def update(fn): + def update(self, __other=NO_ARG, **kw): + if __other is not NO_ARG: + if hasattr(__other, "keys"): + for key in list(__other): + if key not in self or self[key] is not __other[key]: + self[key] = __other[key] + else: + __set_wo_mutation(self, __other[key], None) + else: + for key, value in __other: + if key not in self or self[key] is not value: + self[key] = value + else: + __set_wo_mutation(self, value, None) + for key in kw: + if key not in self or self[key] is not kw[key]: + self[key] = kw[key] + else: + __set_wo_mutation(self, kw[key], None) + + _tidy(update) + return update + + l = locals().copy() + l.pop("_tidy") + return l + + +_set_binop_bases = (set, frozenset) + + +def _set_binops_check_strict(self: Any, obj: Any) -> bool: + """Allow only set, frozenset and self.__class__-derived + objects in binops.""" + return isinstance(obj, _set_binop_bases + (self.__class__,)) + + +def _set_binops_check_loose(self: Any, obj: Any) -> bool: + """Allow anything set-like to participate in set binops.""" + return ( + isinstance(obj, _set_binop_bases + (self.__class__,)) + or util.duck_type_collection(obj) == set + ) + + +def _set_decorators() -> Dict[str, Callable[[_FN], _FN]]: + """Tailored instrumentation wrappers for any set-like class.""" + + def _tidy(fn): + fn._sa_instrumented = True + fn.__doc__ = getattr(set, fn.__name__).__doc__ + + def add(fn): + def add(self, value, _sa_initiator=None): + if value not in self: + value = __set(self, value, _sa_initiator, NO_KEY) + else: + __set_wo_mutation(self, value, _sa_initiator) + # testlib.pragma exempt:__hash__ + fn(self, value) + + _tidy(add) + return add + + def discard(fn): + def discard(self, value, _sa_initiator=None): + # testlib.pragma exempt:__hash__ + if value in self: + __del(self, value, _sa_initiator, NO_KEY) + # testlib.pragma exempt:__hash__ + fn(self, value) + + _tidy(discard) + return discard + + def remove(fn): + def remove(self, value, _sa_initiator=None): + # testlib.pragma exempt:__hash__ + if value in self: + __del(self, value, _sa_initiator, NO_KEY) + # testlib.pragma exempt:__hash__ + fn(self, value) + + _tidy(remove) + return remove + + def pop(fn): + def pop(self): + __before_pop(self) + item = fn(self) + # for set in particular, we have no way to access the item + # that will be popped before pop is called. + __del(self, item, None, NO_KEY) + return item + + _tidy(pop) + return pop + + def clear(fn): + def clear(self): + for item in list(self): + self.remove(item) + + _tidy(clear) + return clear + + def update(fn): + def update(self, value): + for item in value: + self.add(item) + + _tidy(update) + return update + + def __ior__(fn): + def __ior__(self, value): + if not _set_binops_check_strict(self, value): + return NotImplemented + for item in value: + self.add(item) + return self + + _tidy(__ior__) + return __ior__ + + def difference_update(fn): + def difference_update(self, value): + for item in value: + self.discard(item) + + _tidy(difference_update) + return difference_update + + def __isub__(fn): + def __isub__(self, value): + if not _set_binops_check_strict(self, value): + return NotImplemented + for item in value: + self.discard(item) + return self + + _tidy(__isub__) + return __isub__ + + def intersection_update(fn): + def intersection_update(self, other): + want, have = self.intersection(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + + _tidy(intersection_update) + return intersection_update + + def __iand__(fn): + def __iand__(self, other): + if not _set_binops_check_strict(self, other): + return NotImplemented + want, have = self.intersection(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + return self + + _tidy(__iand__) + return __iand__ + + def symmetric_difference_update(fn): + def symmetric_difference_update(self, other): + want, have = self.symmetric_difference(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + + _tidy(symmetric_difference_update) + return symmetric_difference_update + + def __ixor__(fn): + def __ixor__(self, other): + if not _set_binops_check_strict(self, other): + return NotImplemented + want, have = self.symmetric_difference(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + return self + + _tidy(__ixor__) + return __ixor__ + + l = locals().copy() + l.pop("_tidy") + return l + + +class InstrumentedList(List[_T]): + """An instrumented version of the built-in list.""" + + +class InstrumentedSet(Set[_T]): + """An instrumented version of the built-in set.""" + + +class InstrumentedDict(Dict[_KT, _VT]): + """An instrumented version of the built-in dict.""" + + +__canned_instrumentation: util.immutabledict[Any, _CollectionFactoryType] = ( + util.immutabledict( + { + list: InstrumentedList, + set: InstrumentedSet, + dict: InstrumentedDict, + } + ) +) + +__interfaces: util.immutabledict[ + Any, + Tuple[ + Dict[str, str], + Dict[str, Callable[..., Any]], + ], +] = util.immutabledict( + { + list: ( + { + "appender": "append", + "remover": "remove", + "iterator": "__iter__", + }, + _list_decorators(), + ), + set: ( + {"appender": "add", "remover": "remove", "iterator": "__iter__"}, + _set_decorators(), + ), + # decorators are required for dicts and object collections. + dict: ({"iterator": "values"}, _dict_decorators()), + } +) + + +def __go(lcls): + global keyfunc_mapping, mapped_collection + global column_keyed_dict, column_mapped_collection + global MappedCollection, KeyFuncDict + global attribute_keyed_dict, attribute_mapped_collection + + from .mapped_collection import keyfunc_mapping + from .mapped_collection import column_keyed_dict + from .mapped_collection import attribute_keyed_dict + from .mapped_collection import KeyFuncDict + + from .mapped_collection import mapped_collection + from .mapped_collection import column_mapped_collection + from .mapped_collection import attribute_mapped_collection + from .mapped_collection import MappedCollection + + # ensure instrumentation is associated with + # these built-in classes; if a user-defined class + # subclasses these and uses @internally_instrumented, + # the superclass is otherwise not instrumented. + # see [ticket:2406]. + _instrument_class(InstrumentedList) + _instrument_class(InstrumentedSet) + _instrument_class(KeyFuncDict) + + +__go(locals()) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py new file mode 100644 index 00000000..6bacd77e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py @@ -0,0 +1,3268 @@ +# orm/context.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +from __future__ import annotations + +import itertools +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import attributes +from . import interfaces +from . import loading +from .base import _is_aliased_class +from .interfaces import ORMColumnDescription +from .interfaces import ORMColumnsClauseRole +from .path_registry import PathRegistry +from .util import _entity_corresponds_to +from .util import _ORMJoin +from .util import _TraceAdaptRole +from .util import AliasedClass +from .util import Bundle +from .util import ORMAdapter +from .util import ORMStatementAdapter +from .. import exc as sa_exc +from .. import future +from .. import inspect +from .. import sql +from .. import util +from ..sql import coercions +from ..sql import expression +from ..sql import roles +from ..sql import util as sql_util +from ..sql import visitors +from ..sql._typing import _TP +from ..sql._typing import is_dml +from ..sql._typing import is_insert_update +from ..sql._typing import is_select_base +from ..sql.base import _select_iterables +from ..sql.base import CacheableOptions +from ..sql.base import CompileState +from ..sql.base import Executable +from ..sql.base import Generative +from ..sql.base import Options +from ..sql.dml import UpdateBase +from ..sql.elements import GroupedElement +from ..sql.elements import TextClause +from ..sql.selectable import CompoundSelectState +from ..sql.selectable import LABEL_STYLE_DISAMBIGUATE_ONLY +from ..sql.selectable import LABEL_STYLE_NONE +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from ..sql.selectable import Select +from ..sql.selectable import SelectLabelStyle +from ..sql.selectable import SelectState +from ..sql.selectable import TypedReturnsRows +from ..sql.visitors import InternalTraversal + +if TYPE_CHECKING: + from ._typing import _InternalEntityType + from ._typing import OrmExecuteOptionsParameter + from .loading import PostLoad + from .mapper import Mapper + from .query import Query + from .session import _BindArguments + from .session import Session + from ..engine import Result + from ..engine.interfaces import _CoreSingleExecuteParams + from ..sql._typing import _ColumnsClauseArgument + from ..sql.compiler import SQLCompiler + from ..sql.dml import _DMLTableElement + from ..sql.elements import ColumnElement + from ..sql.selectable import _JoinTargetElement + from ..sql.selectable import _LabelConventionCallable + from ..sql.selectable import _SetupJoinsElement + from ..sql.selectable import ExecutableReturnsRows + from ..sql.selectable import SelectBase + from ..sql.type_api import TypeEngine + +_T = TypeVar("_T", bound=Any) +_path_registry = PathRegistry.root + +_EMPTY_DICT = util.immutabledict() + + +LABEL_STYLE_LEGACY_ORM = SelectLabelStyle.LABEL_STYLE_LEGACY_ORM + + +class QueryContext: + __slots__ = ( + "top_level_context", + "compile_state", + "query", + "user_passed_query", + "params", + "load_options", + "bind_arguments", + "execution_options", + "session", + "autoflush", + "populate_existing", + "invoke_all_eagers", + "version_check", + "refresh_state", + "create_eager_joins", + "propagated_loader_options", + "attributes", + "runid", + "partials", + "post_load_paths", + "identity_token", + "yield_per", + "loaders_require_buffering", + "loaders_require_uniquing", + ) + + runid: int + post_load_paths: Dict[PathRegistry, PostLoad] + compile_state: ORMCompileState + + class default_load_options(Options): + _only_return_tuples = False + _populate_existing = False + _version_check = False + _invoke_all_eagers = True + _autoflush = True + _identity_token = None + _yield_per = None + _refresh_state = None + _lazy_loaded_from = None + _legacy_uniquing = False + _sa_top_level_orm_context = None + _is_user_refresh = False + + def __init__( + self, + compile_state: CompileState, + statement: Union[Select[Any], FromStatement[Any]], + user_passed_query: Union[ + Select[Any], + FromStatement[Any], + ], + params: _CoreSingleExecuteParams, + session: Session, + load_options: Union[ + Type[QueryContext.default_load_options], + QueryContext.default_load_options, + ], + execution_options: Optional[OrmExecuteOptionsParameter] = None, + bind_arguments: Optional[_BindArguments] = None, + ): + self.load_options = load_options + self.execution_options = execution_options or _EMPTY_DICT + self.bind_arguments = bind_arguments or _EMPTY_DICT + self.compile_state = compile_state + self.query = statement + + # the query that the end user passed to Session.execute() or similar. + # this is usually the same as .query, except in the bulk_persistence + # routines where a separate FromStatement is manufactured in the + # compile stage; this allows differentiation in that case. + self.user_passed_query = user_passed_query + + self.session = session + self.loaders_require_buffering = False + self.loaders_require_uniquing = False + self.params = params + self.top_level_context = load_options._sa_top_level_orm_context + + cached_options = compile_state.select_statement._with_options + uncached_options = user_passed_query._with_options + + # see issue #7447 , #8399 for some background + # propagated loader options will be present on loaded InstanceState + # objects under state.load_options and are typically used by + # LazyLoader to apply options to the SELECT statement it emits. + # For compile state options (i.e. loader strategy options), these + # need to line up with the ".load_path" attribute which in + # loader.py is pulled from context.compile_state.current_path. + # so, this means these options have to be the ones from the + # *cached* statement that's travelling with compile_state, not the + # *current* statement which won't match up for an ad-hoc + # AliasedClass + self.propagated_loader_options = tuple( + opt._adapt_cached_option_to_uncached_option(self, uncached_opt) + for opt, uncached_opt in zip(cached_options, uncached_options) + if opt.propagate_to_loaders + ) + + self.attributes = dict(compile_state.attributes) + + self.autoflush = load_options._autoflush + self.populate_existing = load_options._populate_existing + self.invoke_all_eagers = load_options._invoke_all_eagers + self.version_check = load_options._version_check + self.refresh_state = load_options._refresh_state + self.yield_per = load_options._yield_per + self.identity_token = load_options._identity_token + + def _get_top_level_context(self) -> QueryContext: + return self.top_level_context or self + + +_orm_load_exec_options = util.immutabledict( + {"_result_disable_adapt_to_context": True} +) + + +class AbstractORMCompileState(CompileState): + is_dml_returning = False + + def _init_global_attributes( + self, statement, compiler, *, toplevel, process_criteria_for_toplevel + ): + self.attributes = {} + + if compiler is None: + # this is the legacy / testing only ORM _compile_state() use case. + # there is no need to apply criteria options for this. + self.global_attributes = ga = {} + assert toplevel + return + else: + self.global_attributes = ga = compiler._global_attributes + + if toplevel: + ga["toplevel_orm"] = True + + if process_criteria_for_toplevel: + for opt in statement._with_options: + if opt._is_criteria_option: + opt.process_compile_state(self) + + return + elif ga.get("toplevel_orm", False): + return + + stack_0 = compiler.stack[0] + + try: + toplevel_stmt = stack_0["selectable"] + except KeyError: + pass + else: + for opt in toplevel_stmt._with_options: + if opt._is_compile_state and opt._is_criteria_option: + opt.process_compile_state(self) + + ga["toplevel_orm"] = True + + @classmethod + def create_for_statement( + cls, + statement: Union[Select, FromStatement], + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> AbstractORMCompileState: + """Create a context for a statement given a :class:`.Compiler`. + + This method is always invoked in the context of SQLCompiler.process(). + + For a Select object, this would be invoked from + SQLCompiler.visit_select(). For the special FromStatement object used + by Query to indicate "Query.from_statement()", this is called by + FromStatement._compiler_dispatch() that would be called by + SQLCompiler.process(). + """ + return super().create_for_statement(statement, compiler, **kw) + + @classmethod + def orm_pre_session_exec( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + is_pre_event, + ): + raise NotImplementedError() + + @classmethod + def orm_execute_statement( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + conn, + ) -> Result: + result = conn.execute( + statement, params or {}, execution_options=execution_options + ) + return cls.orm_setup_cursor_result( + session, + statement, + params, + execution_options, + bind_arguments, + result, + ) + + @classmethod + def orm_setup_cursor_result( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + result, + ): + raise NotImplementedError() + + +class AutoflushOnlyORMCompileState(AbstractORMCompileState): + """ORM compile state that is a passthrough, except for autoflush.""" + + @classmethod + def orm_pre_session_exec( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + is_pre_event, + ): + # consume result-level load_options. These may have been set up + # in an ORMExecuteState hook + ( + load_options, + execution_options, + ) = QueryContext.default_load_options.from_execution_options( + "_sa_orm_load_options", + { + "autoflush", + }, + execution_options, + statement._execution_options, + ) + + if not is_pre_event and load_options._autoflush: + session._autoflush() + + return statement, execution_options + + @classmethod + def orm_setup_cursor_result( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + result, + ): + return result + + +class ORMCompileState(AbstractORMCompileState): + class default_compile_options(CacheableOptions): + _cache_key_traversal = [ + ("_use_legacy_query_style", InternalTraversal.dp_boolean), + ("_for_statement", InternalTraversal.dp_boolean), + ("_bake_ok", InternalTraversal.dp_boolean), + ("_current_path", InternalTraversal.dp_has_cache_key), + ("_enable_single_crit", InternalTraversal.dp_boolean), + ("_enable_eagerloads", InternalTraversal.dp_boolean), + ("_only_load_props", InternalTraversal.dp_plain_obj), + ("_set_base_alias", InternalTraversal.dp_boolean), + ("_for_refresh_state", InternalTraversal.dp_boolean), + ("_render_for_subquery", InternalTraversal.dp_boolean), + ("_is_star", InternalTraversal.dp_boolean), + ] + + # set to True by default from Query._statement_20(), to indicate + # the rendered query should look like a legacy ORM query. right + # now this basically indicates we should use tablename_columnname + # style labels. Generally indicates the statement originated + # from a Query object. + _use_legacy_query_style = False + + # set *only* when we are coming from the Query.statement + # accessor, or a Query-level equivalent such as + # query.subquery(). this supersedes "toplevel". + _for_statement = False + + _bake_ok = True + _current_path = _path_registry + _enable_single_crit = True + _enable_eagerloads = True + _only_load_props = None + _set_base_alias = False + _for_refresh_state = False + _render_for_subquery = False + _is_star = False + + attributes: Dict[Any, Any] + global_attributes: Dict[Any, Any] + + statement: Union[Select[Any], FromStatement[Any]] + select_statement: Union[Select[Any], FromStatement[Any]] + _entities: List[_QueryEntity] + _polymorphic_adapters: Dict[_InternalEntityType, ORMAdapter] + compile_options: Union[ + Type[default_compile_options], default_compile_options + ] + _primary_entity: Optional[_QueryEntity] + use_legacy_query_style: bool + _label_convention: _LabelConventionCallable + primary_columns: List[ColumnElement[Any]] + secondary_columns: List[ColumnElement[Any]] + dedupe_columns: Set[ColumnElement[Any]] + create_eager_joins: List[ + # TODO: this structure is set up by JoinedLoader + Tuple[Any, ...] + ] + current_path: PathRegistry = _path_registry + _has_mapper_entities = False + + def __init__(self, *arg, **kw): + raise NotImplementedError() + + if TYPE_CHECKING: + + @classmethod + def create_for_statement( + cls, + statement: Union[Select, FromStatement], + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> ORMCompileState: ... + + def _append_dedupe_col_collection(self, obj, col_collection): + dedupe = self.dedupe_columns + if obj not in dedupe: + dedupe.add(obj) + col_collection.append(obj) + + @classmethod + def _column_naming_convention( + cls, label_style: SelectLabelStyle, legacy: bool + ) -> _LabelConventionCallable: + if legacy: + + def name(col, col_name=None): + if col_name: + return col_name + else: + return getattr(col, "key") + + return name + else: + return SelectState._column_naming_convention(label_style) + + @classmethod + def get_column_descriptions(cls, statement): + return _column_descriptions(statement) + + @classmethod + def orm_pre_session_exec( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + is_pre_event, + ): + # consume result-level load_options. These may have been set up + # in an ORMExecuteState hook + ( + load_options, + execution_options, + ) = QueryContext.default_load_options.from_execution_options( + "_sa_orm_load_options", + { + "populate_existing", + "autoflush", + "yield_per", + "identity_token", + "sa_top_level_orm_context", + }, + execution_options, + statement._execution_options, + ) + + # default execution options for ORM results: + # 1. _result_disable_adapt_to_context=True + # this will disable the ResultSetMetadata._adapt_to_context() + # step which we don't need, as we have result processors cached + # against the original SELECT statement before caching. + + if "sa_top_level_orm_context" in execution_options: + ctx = execution_options["sa_top_level_orm_context"] + execution_options = ctx.query._execution_options.merge_with( + ctx.execution_options, execution_options + ) + + if not execution_options: + execution_options = _orm_load_exec_options + else: + execution_options = execution_options.union(_orm_load_exec_options) + + # would have been placed here by legacy Query only + if load_options._yield_per: + execution_options = execution_options.union( + {"yield_per": load_options._yield_per} + ) + + if ( + getattr(statement._compile_options, "_current_path", None) + and len(statement._compile_options._current_path) > 10 + and execution_options.get("compiled_cache", True) is not None + ): + execution_options: util.immutabledict[str, Any] = ( + execution_options.union( + { + "compiled_cache": None, + "_cache_disable_reason": "excess depth for " + "ORM loader options", + } + ) + ) + + bind_arguments["clause"] = statement + + # new in 1.4 - the coercions system is leveraged to allow the + # "subject" mapper of a statement be propagated to the top + # as the statement is built. "subject" mapper is the generally + # standard object used as an identifier for multi-database schemes. + + # we are here based on the fact that _propagate_attrs contains + # "compile_state_plugin": "orm". The "plugin_subject" + # needs to be present as well. + + try: + plugin_subject = statement._propagate_attrs["plugin_subject"] + except KeyError: + assert False, "statement had 'orm' plugin but no plugin_subject" + else: + if plugin_subject: + bind_arguments["mapper"] = plugin_subject.mapper + + if not is_pre_event and load_options._autoflush: + session._autoflush() + + return statement, execution_options + + @classmethod + def orm_setup_cursor_result( + cls, + session, + statement, + params, + execution_options, + bind_arguments, + result, + ): + execution_context = result.context + compile_state = execution_context.compiled.compile_state + + # cover edge case where ORM entities used in legacy select + # were passed to session.execute: + # session.execute(legacy_select([User.id, User.name])) + # see test_query->test_legacy_tuple_old_select + + load_options = execution_options.get( + "_sa_orm_load_options", QueryContext.default_load_options + ) + + if compile_state.compile_options._is_star: + return result + + querycontext = QueryContext( + compile_state, + statement, + statement, + params, + session, + load_options, + execution_options, + bind_arguments, + ) + return loading.instances(result, querycontext) + + @property + def _lead_mapper_entities(self): + """return all _MapperEntity objects in the lead entities collection. + + Does **not** include entities that have been replaced by + with_entities(), with_only_columns() + + """ + return [ + ent for ent in self._entities if isinstance(ent, _MapperEntity) + ] + + def _create_with_polymorphic_adapter(self, ext_info, selectable): + """given MapperEntity or ORMColumnEntity, setup polymorphic loading + if called for by the Mapper. + + As of #8168 in 2.0.0rc1, polymorphic adapters, which greatly increase + the complexity of the query creation process, are not used at all + except in the quasi-legacy cases of with_polymorphic referring to an + alias and/or subquery. This would apply to concrete polymorphic + loading, and joined inheritance where a subquery is + passed to with_polymorphic (which is completely unnecessary in modern + use). + + """ + if ( + not ext_info.is_aliased_class + and ext_info.mapper.persist_selectable + not in self._polymorphic_adapters + ): + for mp in ext_info.mapper.iterate_to_root(): + self._mapper_loads_polymorphically_with( + mp, + ORMAdapter( + _TraceAdaptRole.WITH_POLYMORPHIC_ADAPTER, + mp, + equivalents=mp._equivalent_columns, + selectable=selectable, + ), + ) + + def _mapper_loads_polymorphically_with(self, mapper, adapter): + for m2 in mapper._with_polymorphic_mappers or [mapper]: + self._polymorphic_adapters[m2] = adapter + + for m in m2.iterate_to_root(): + self._polymorphic_adapters[m.local_table] = adapter + + @classmethod + def _create_entities_collection(cls, query, legacy): + raise NotImplementedError( + "this method only works for ORMSelectCompileState" + ) + + +class DMLReturningColFilter: + """an adapter used for the DML RETURNING case. + + Has a subset of the interface used by + :class:`.ORMAdapter` and is used for :class:`._QueryEntity` + instances to set up their columns as used in RETURNING for a + DML statement. + + """ + + __slots__ = ("mapper", "columns", "__weakref__") + + def __init__(self, target_mapper, immediate_dml_mapper): + if ( + immediate_dml_mapper is not None + and target_mapper.local_table + is not immediate_dml_mapper.local_table + ): + # joined inh, or in theory other kinds of multi-table mappings + self.mapper = immediate_dml_mapper + else: + # single inh, normal mappings, etc. + self.mapper = target_mapper + self.columns = self.columns = util.WeakPopulateDict( + self.adapt_check_present # type: ignore + ) + + def __call__(self, col, as_filter): + for cc in sql_util._find_columns(col): + c2 = self.adapt_check_present(cc) + if c2 is not None: + return col + else: + return None + + def adapt_check_present(self, col): + mapper = self.mapper + prop = mapper._columntoproperty.get(col, None) + if prop is None: + return None + return mapper.local_table.c.corresponding_column(col) + + +@sql.base.CompileState.plugin_for("orm", "orm_from_statement") +class ORMFromStatementCompileState(ORMCompileState): + _from_obj_alias = None + _has_mapper_entities = False + + statement_container: FromStatement + requested_statement: Union[SelectBase, TextClause, UpdateBase] + dml_table: Optional[_DMLTableElement] = None + + _has_orm_entities = False + multi_row_eager_loaders = False + eager_adding_joins = False + compound_eager_adapter = None + + extra_criteria_entities = _EMPTY_DICT + eager_joins = _EMPTY_DICT + + @classmethod + def create_for_statement( + cls, + statement_container: Union[Select, FromStatement], + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> ORMFromStatementCompileState: + assert isinstance(statement_container, FromStatement) + + if compiler is not None and compiler.stack: + raise sa_exc.CompileError( + "The ORM FromStatement construct only supports being " + "invoked as the topmost statement, as it is only intended to " + "define how result rows should be returned." + ) + + self = cls.__new__(cls) + self._primary_entity = None + + self.use_legacy_query_style = ( + statement_container._compile_options._use_legacy_query_style + ) + self.statement_container = self.select_statement = statement_container + self.requested_statement = statement = statement_container.element + + if statement.is_dml: + self.dml_table = statement.table + self.is_dml_returning = True + + self._entities = [] + self._polymorphic_adapters = {} + + self.compile_options = statement_container._compile_options + + if ( + self.use_legacy_query_style + and isinstance(statement, expression.SelectBase) + and not statement._is_textual + and not statement.is_dml + and statement._label_style is LABEL_STYLE_NONE + ): + self.statement = statement.set_label_style( + LABEL_STYLE_TABLENAME_PLUS_COL + ) + else: + self.statement = statement + + self._label_convention = self._column_naming_convention( + ( + statement._label_style + if not statement._is_textual and not statement.is_dml + else LABEL_STYLE_NONE + ), + self.use_legacy_query_style, + ) + + _QueryEntity.to_compile_state( + self, + statement_container._raw_columns, + self._entities, + is_current_entities=True, + ) + + self.current_path = statement_container._compile_options._current_path + + self._init_global_attributes( + statement_container, + compiler, + process_criteria_for_toplevel=False, + toplevel=True, + ) + + if statement_container._with_options: + for opt in statement_container._with_options: + if opt._is_compile_state: + opt.process_compile_state(self) + + if statement_container._with_context_options: + for fn, key in statement_container._with_context_options: + fn(self) + + self.primary_columns = [] + self.secondary_columns = [] + self.dedupe_columns = set() + self.create_eager_joins = [] + self._fallback_from_clauses = [] + + self.order_by = None + + if isinstance(self.statement, expression.TextClause): + # TextClause has no "column" objects at all. for this case, + # we generate columns from our _QueryEntity objects, then + # flip on all the "please match no matter what" parameters. + self.extra_criteria_entities = {} + + for entity in self._entities: + entity.setup_compile_state(self) + + compiler._ordered_columns = compiler._textual_ordered_columns = ( + False + ) + + # enable looser result column matching. this is shown to be + # needed by test_query.py::TextTest + compiler._loose_column_name_matching = True + + for c in self.primary_columns: + compiler.process( + c, + within_columns_clause=True, + add_to_result_map=compiler._add_to_result_map, + ) + else: + # for everyone else, Select, Insert, Update, TextualSelect, they + # have column objects already. After much + # experimentation here, the best approach seems to be, use + # those columns completely, don't interfere with the compiler + # at all; just in ORM land, use an adapter to convert from + # our ORM columns to whatever columns are in the statement, + # before we look in the result row. Adapt on names + # to accept cases such as issue #9217, however also allow + # this to be overridden for cases such as #9273. + self._from_obj_alias = ORMStatementAdapter( + _TraceAdaptRole.ADAPT_FROM_STATEMENT, + self.statement, + adapt_on_names=statement_container._adapt_on_names, + ) + + return self + + def _adapt_col_list(self, cols, current_adapter): + return cols + + def _get_current_adapter(self): + return None + + def setup_dml_returning_compile_state(self, dml_mapper): + """used by BulkORMInsert (and Update / Delete?) to set up a handler + for RETURNING to return ORM objects and expressions + + """ + target_mapper = self.statement._propagate_attrs.get( + "plugin_subject", None + ) + adapter = DMLReturningColFilter(target_mapper, dml_mapper) + + if self.compile_options._is_star and (len(self._entities) != 1): + raise sa_exc.CompileError( + "Can't generate ORM query that includes multiple expressions " + "at the same time as '*'; query for '*' alone if present" + ) + + for entity in self._entities: + entity.setup_dml_returning_compile_state(self, adapter) + + +class FromStatement(GroupedElement, Generative, TypedReturnsRows[_TP]): + """Core construct that represents a load of ORM objects from various + :class:`.ReturnsRows` and other classes including: + + :class:`.Select`, :class:`.TextClause`, :class:`.TextualSelect`, + :class:`.CompoundSelect`, :class`.Insert`, :class:`.Update`, + and in theory, :class:`.Delete`. + + """ + + __visit_name__ = "orm_from_statement" + + _compile_options = ORMFromStatementCompileState.default_compile_options + + _compile_state_factory = ORMFromStatementCompileState.create_for_statement + + _for_update_arg = None + + element: Union[ExecutableReturnsRows, TextClause] + + _adapt_on_names: bool + + _traverse_internals = [ + ("_raw_columns", InternalTraversal.dp_clauseelement_list), + ("element", InternalTraversal.dp_clauseelement), + ] + Executable._executable_traverse_internals + + _cache_key_traversal = _traverse_internals + [ + ("_compile_options", InternalTraversal.dp_has_cache_key) + ] + + is_from_statement = True + + def __init__( + self, + entities: Iterable[_ColumnsClauseArgument[Any]], + element: Union[ExecutableReturnsRows, TextClause], + _adapt_on_names: bool = True, + ): + self._raw_columns = [ + coercions.expect( + roles.ColumnsClauseRole, + ent, + apply_propagate_attrs=self, + post_inspect=True, + ) + for ent in util.to_list(entities) + ] + self.element = element + self.is_dml = element.is_dml + self.is_select = element.is_select + self.is_delete = element.is_delete + self.is_insert = element.is_insert + self.is_update = element.is_update + self._label_style = ( + element._label_style if is_select_base(element) else None + ) + self._adapt_on_names = _adapt_on_names + + def _compiler_dispatch(self, compiler, **kw): + """provide a fixed _compiler_dispatch method. + + This is roughly similar to using the sqlalchemy.ext.compiler + ``@compiles`` extension. + + """ + + compile_state = self._compile_state_factory(self, compiler, **kw) + + toplevel = not compiler.stack + + if toplevel: + compiler.compile_state = compile_state + + return compiler.process(compile_state.statement, **kw) + + @property + def column_descriptions(self): + """Return a :term:`plugin-enabled` 'column descriptions' structure + referring to the columns which are SELECTed by this statement. + + See the section :ref:`queryguide_inspection` for an overview + of this feature. + + .. seealso:: + + :ref:`queryguide_inspection` - ORM background + + """ + meth = cast( + ORMSelectCompileState, SelectState.get_plugin_class(self) + ).get_column_descriptions + return meth(self) + + def _ensure_disambiguated_names(self): + return self + + def get_children(self, **kw): + yield from itertools.chain.from_iterable( + element._from_objects for element in self._raw_columns + ) + yield from super().get_children(**kw) + + @property + def _all_selected_columns(self): + return self.element._all_selected_columns + + @property + def _return_defaults(self): + return self.element._return_defaults if is_dml(self.element) else None + + @property + def _returning(self): + return self.element._returning if is_dml(self.element) else None + + @property + def _inline(self): + return self.element._inline if is_insert_update(self.element) else None + + +@sql.base.CompileState.plugin_for("orm", "compound_select") +class CompoundSelectCompileState( + AutoflushOnlyORMCompileState, CompoundSelectState +): + pass + + +@sql.base.CompileState.plugin_for("orm", "select") +class ORMSelectCompileState(ORMCompileState, SelectState): + _already_joined_edges = () + + _memoized_entities = _EMPTY_DICT + + _from_obj_alias = None + _has_mapper_entities = False + + _has_orm_entities = False + multi_row_eager_loaders = False + eager_adding_joins = False + compound_eager_adapter = None + + correlate = None + correlate_except = None + _where_criteria = () + _having_criteria = () + + @classmethod + def create_for_statement( + cls, + statement: Union[Select, FromStatement], + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> ORMSelectCompileState: + """compiler hook, we arrive here from compiler.visit_select() only.""" + + self = cls.__new__(cls) + + if compiler is not None: + toplevel = not compiler.stack + else: + toplevel = True + + select_statement = statement + + # if we are a select() that was never a legacy Query, we won't + # have ORM level compile options. + statement._compile_options = cls.default_compile_options.safe_merge( + statement._compile_options + ) + + if select_statement._execution_options: + # execution options should not impact the compilation of a + # query, and at the moment subqueryloader is putting some things + # in here that we explicitly don't want stuck in a cache. + self.select_statement = select_statement._clone() + self.select_statement._execution_options = util.immutabledict() + else: + self.select_statement = select_statement + + # indicates this select() came from Query.statement + self.for_statement = select_statement._compile_options._for_statement + + # generally if we are from Query or directly from a select() + self.use_legacy_query_style = ( + select_statement._compile_options._use_legacy_query_style + ) + + self._entities = [] + self._primary_entity = None + self._polymorphic_adapters = {} + + self.compile_options = select_statement._compile_options + + if not toplevel: + # for subqueries, turn off eagerloads and set + # "render_for_subquery". + self.compile_options += { + "_enable_eagerloads": False, + "_render_for_subquery": True, + } + + # determine label style. we can make different decisions here. + # at the moment, trying to see if we can always use DISAMBIGUATE_ONLY + # rather than LABEL_STYLE_NONE, and if we can use disambiguate style + # for new style ORM selects too. + if ( + self.use_legacy_query_style + and self.select_statement._label_style is LABEL_STYLE_LEGACY_ORM + ): + if not self.for_statement: + self.label_style = LABEL_STYLE_TABLENAME_PLUS_COL + else: + self.label_style = LABEL_STYLE_DISAMBIGUATE_ONLY + else: + self.label_style = self.select_statement._label_style + + if select_statement._memoized_select_entities: + self._memoized_entities = { + memoized_entities: _QueryEntity.to_compile_state( + self, + memoized_entities._raw_columns, + [], + is_current_entities=False, + ) + for memoized_entities in ( + select_statement._memoized_select_entities + ) + } + + # label_convention is stateful and will yield deduping keys if it + # sees the same key twice. therefore it's important that it is not + # invoked for the above "memoized" entities that aren't actually + # in the columns clause + self._label_convention = self._column_naming_convention( + statement._label_style, self.use_legacy_query_style + ) + + _QueryEntity.to_compile_state( + self, + select_statement._raw_columns, + self._entities, + is_current_entities=True, + ) + + self.current_path = select_statement._compile_options._current_path + + self.eager_order_by = () + + self._init_global_attributes( + select_statement, + compiler, + toplevel=toplevel, + process_criteria_for_toplevel=False, + ) + + if toplevel and ( + select_statement._with_options + or select_statement._memoized_select_entities + ): + for ( + memoized_entities + ) in select_statement._memoized_select_entities: + for opt in memoized_entities._with_options: + if opt._is_compile_state: + opt.process_compile_state_replaced_entities( + self, + [ + ent + for ent in self._memoized_entities[ + memoized_entities + ] + if isinstance(ent, _MapperEntity) + ], + ) + + for opt in self.select_statement._with_options: + if opt._is_compile_state: + opt.process_compile_state(self) + + # uncomment to print out the context.attributes structure + # after it's been set up above + # self._dump_option_struct() + + if select_statement._with_context_options: + for fn, key in select_statement._with_context_options: + fn(self) + + self.primary_columns = [] + self.secondary_columns = [] + self.dedupe_columns = set() + self.eager_joins = {} + self.extra_criteria_entities = {} + self.create_eager_joins = [] + self._fallback_from_clauses = [] + + # normalize the FROM clauses early by themselves, as this makes + # it an easier job when we need to assemble a JOIN onto these, + # for select.join() as well as joinedload(). As of 1.4 there are now + # potentially more complex sets of FROM objects here as the use + # of lambda statements for lazyload, load_on_pk etc. uses more + # cloning of the select() construct. See #6495 + self.from_clauses = self._normalize_froms( + info.selectable for info in select_statement._from_obj + ) + + # this is a fairly arbitrary break into a second method, + # so it might be nicer to break up create_for_statement() + # and _setup_for_generate into three or four logical sections + self._setup_for_generate() + + SelectState.__init__(self, self.statement, compiler, **kw) + return self + + def _dump_option_struct(self): + print("\n---------------------------------------------------\n") + print(f"current path: {self.current_path}") + for key in self.attributes: + if isinstance(key, tuple) and key[0] == "loader": + print(f"\nLoader: {PathRegistry.coerce(key[1])}") + print(f" {self.attributes[key]}") + print(f" {self.attributes[key].__dict__}") + elif isinstance(key, tuple) and key[0] == "path_with_polymorphic": + print(f"\nWith Polymorphic: {PathRegistry.coerce(key[1])}") + print(f" {self.attributes[key]}") + + def _setup_for_generate(self): + query = self.select_statement + + self.statement = None + self._join_entities = () + + if self.compile_options._set_base_alias: + # legacy Query only + self._set_select_from_alias() + + for memoized_entities in query._memoized_select_entities: + if memoized_entities._setup_joins: + self._join( + memoized_entities._setup_joins, + self._memoized_entities[memoized_entities], + ) + + if query._setup_joins: + self._join(query._setup_joins, self._entities) + + current_adapter = self._get_current_adapter() + + if query._where_criteria: + self._where_criteria = query._where_criteria + + if current_adapter: + self._where_criteria = tuple( + current_adapter(crit, True) + for crit in self._where_criteria + ) + + # TODO: some complexity with order_by here was due to mapper.order_by. + # now that this is removed we can hopefully make order_by / + # group_by act identically to how they are in Core select. + self.order_by = ( + self._adapt_col_list(query._order_by_clauses, current_adapter) + if current_adapter and query._order_by_clauses not in (None, False) + else query._order_by_clauses + ) + + if query._having_criteria: + self._having_criteria = tuple( + current_adapter(crit, True) if current_adapter else crit + for crit in query._having_criteria + ) + + self.group_by = ( + self._adapt_col_list( + util.flatten_iterator(query._group_by_clauses), current_adapter + ) + if current_adapter and query._group_by_clauses not in (None, False) + else query._group_by_clauses or None + ) + + if self.eager_order_by: + adapter = self.from_clauses[0]._target_adapter + self.eager_order_by = adapter.copy_and_process(self.eager_order_by) + + if query._distinct_on: + self.distinct_on = self._adapt_col_list( + query._distinct_on, current_adapter + ) + else: + self.distinct_on = () + + self.distinct = query._distinct + + if query._correlate: + # ORM mapped entities that are mapped to joins can be passed + # to .correlate, so here they are broken into their component + # tables. + self.correlate = tuple( + util.flatten_iterator( + sql_util.surface_selectables(s) if s is not None else None + for s in query._correlate + ) + ) + elif query._correlate_except is not None: + self.correlate_except = tuple( + util.flatten_iterator( + sql_util.surface_selectables(s) if s is not None else None + for s in query._correlate_except + ) + ) + elif not query._auto_correlate: + self.correlate = (None,) + + # PART II + + self._for_update_arg = query._for_update_arg + + if self.compile_options._is_star and (len(self._entities) != 1): + raise sa_exc.CompileError( + "Can't generate ORM query that includes multiple expressions " + "at the same time as '*'; query for '*' alone if present" + ) + for entity in self._entities: + entity.setup_compile_state(self) + + for rec in self.create_eager_joins: + strategy = rec[0] + strategy(self, *rec[1:]) + + # else "load from discrete FROMs" mode, + # i.e. when each _MappedEntity has its own FROM + + if self.compile_options._enable_single_crit: + self._adjust_for_extra_criteria() + + if not self.primary_columns: + if self.compile_options._only_load_props: + assert False, "no columns were included in _only_load_props" + + raise sa_exc.InvalidRequestError( + "Query contains no columns with which to SELECT from." + ) + + if not self.from_clauses: + self.from_clauses = list(self._fallback_from_clauses) + + if self.order_by is False: + self.order_by = None + + if ( + self.multi_row_eager_loaders + and self.eager_adding_joins + and self._should_nest_selectable + ): + self.statement = self._compound_eager_statement() + else: + self.statement = self._simple_statement() + + if self.for_statement: + ezero = self._mapper_zero() + if ezero is not None: + # TODO: this goes away once we get rid of the deep entity + # thing + self.statement = self.statement._annotate( + {"deepentity": ezero} + ) + + @classmethod + def _create_entities_collection(cls, query, legacy): + """Creates a partial ORMSelectCompileState that includes + the full collection of _MapperEntity and other _QueryEntity objects. + + Supports a few remaining use cases that are pre-compilation + but still need to gather some of the column / adaption information. + + """ + self = cls.__new__(cls) + + self._entities = [] + self._primary_entity = None + self._polymorphic_adapters = {} + + self._label_convention = self._column_naming_convention( + query._label_style, legacy + ) + + # entities will also set up polymorphic adapters for mappers + # that have with_polymorphic configured + _QueryEntity.to_compile_state( + self, query._raw_columns, self._entities, is_current_entities=True + ) + return self + + @classmethod + def determine_last_joined_entity(cls, statement): + setup_joins = statement._setup_joins + + return _determine_last_joined_entity(setup_joins, None) + + @classmethod + def all_selected_columns(cls, statement): + for element in statement._raw_columns: + if ( + element.is_selectable + and "entity_namespace" in element._annotations + ): + ens = element._annotations["entity_namespace"] + if not ens.is_mapper and not ens.is_aliased_class: + yield from _select_iterables([element]) + else: + yield from _select_iterables(ens._all_column_expressions) + else: + yield from _select_iterables([element]) + + @classmethod + def get_columns_clause_froms(cls, statement): + return cls._normalize_froms( + itertools.chain.from_iterable( + ( + element._from_objects + if "parententity" not in element._annotations + else [ + element._annotations[ + "parententity" + ].__clause_element__() + ] + ) + for element in statement._raw_columns + ) + ) + + @classmethod + def from_statement(cls, statement, from_statement): + from_statement = coercions.expect( + roles.ReturnsRowsRole, + from_statement, + apply_propagate_attrs=statement, + ) + + stmt = FromStatement(statement._raw_columns, from_statement) + + stmt.__dict__.update( + _with_options=statement._with_options, + _with_context_options=statement._with_context_options, + _execution_options=statement._execution_options, + _propagate_attrs=statement._propagate_attrs, + ) + return stmt + + def _set_select_from_alias(self): + """used only for legacy Query cases""" + + query = self.select_statement # query + + assert self.compile_options._set_base_alias + assert len(query._from_obj) == 1 + + adapter = self._get_select_from_alias_from_obj(query._from_obj[0]) + if adapter: + self.compile_options += {"_enable_single_crit": False} + self._from_obj_alias = adapter + + def _get_select_from_alias_from_obj(self, from_obj): + """used only for legacy Query cases""" + + info = from_obj + + if "parententity" in info._annotations: + info = info._annotations["parententity"] + + if hasattr(info, "mapper"): + if not info.is_aliased_class: + raise sa_exc.ArgumentError( + "A selectable (FromClause) instance is " + "expected when the base alias is being set." + ) + else: + return info._adapter + + elif isinstance(info.selectable, sql.selectable.AliasedReturnsRows): + equivs = self._all_equivs() + assert info is info.selectable + return ORMStatementAdapter( + _TraceAdaptRole.LEGACY_SELECT_FROM_ALIAS, + info.selectable, + equivalents=equivs, + ) + else: + return None + + def _mapper_zero(self): + """return the Mapper associated with the first QueryEntity.""" + return self._entities[0].mapper + + def _entity_zero(self): + """Return the 'entity' (mapper or AliasedClass) associated + with the first QueryEntity, or alternatively the 'select from' + entity if specified.""" + + for ent in self.from_clauses: + if "parententity" in ent._annotations: + return ent._annotations["parententity"] + for qent in self._entities: + if qent.entity_zero: + return qent.entity_zero + + return None + + def _only_full_mapper_zero(self, methname): + if self._entities != [self._primary_entity]: + raise sa_exc.InvalidRequestError( + "%s() can only be used against " + "a single mapped class." % methname + ) + return self._primary_entity.entity_zero + + def _only_entity_zero(self, rationale=None): + if len(self._entities) > 1: + raise sa_exc.InvalidRequestError( + rationale + or "This operation requires a Query " + "against a single mapper." + ) + return self._entity_zero() + + def _all_equivs(self): + equivs = {} + + for memoized_entities in self._memoized_entities.values(): + for ent in [ + ent + for ent in memoized_entities + if isinstance(ent, _MapperEntity) + ]: + equivs.update(ent.mapper._equivalent_columns) + + for ent in [ + ent for ent in self._entities if isinstance(ent, _MapperEntity) + ]: + equivs.update(ent.mapper._equivalent_columns) + return equivs + + def _compound_eager_statement(self): + # for eager joins present and LIMIT/OFFSET/DISTINCT, + # wrap the query inside a select, + # then append eager joins onto that + + if self.order_by: + # the default coercion for ORDER BY is now the OrderByRole, + # which adds an additional post coercion to ByOfRole in that + # elements are converted into label references. For the + # eager load / subquery wrapping case, we need to un-coerce + # the original expressions outside of the label references + # in order to have them render. + unwrapped_order_by = [ + ( + elem.element + if isinstance(elem, sql.elements._label_reference) + else elem + ) + for elem in self.order_by + ] + + order_by_col_expr = sql_util.expand_column_list_from_order_by( + self.primary_columns, unwrapped_order_by + ) + else: + order_by_col_expr = [] + unwrapped_order_by = None + + # put FOR UPDATE on the inner query, where MySQL will honor it, + # as well as if it has an OF so PostgreSQL can use it. + inner = self._select_statement( + self.primary_columns + + [c for c in order_by_col_expr if c not in self.dedupe_columns], + self.from_clauses, + self._where_criteria, + self._having_criteria, + self.label_style, + self.order_by, + for_update=self._for_update_arg, + hints=self.select_statement._hints, + statement_hints=self.select_statement._statement_hints, + correlate=self.correlate, + correlate_except=self.correlate_except, + **self._select_args, + ) + + inner = inner.alias() + + equivs = self._all_equivs() + + self.compound_eager_adapter = ORMStatementAdapter( + _TraceAdaptRole.COMPOUND_EAGER_STATEMENT, inner, equivalents=equivs + ) + + statement = future.select( + *([inner] + self.secondary_columns) # use_labels=self.labels + ) + statement._label_style = self.label_style + + # Oracle however does not allow FOR UPDATE on the subquery, + # and the Oracle dialect ignores it, plus for PostgreSQL, MySQL + # we expect that all elements of the row are locked, so also put it + # on the outside (except in the case of PG when OF is used) + if ( + self._for_update_arg is not None + and self._for_update_arg.of is None + ): + statement._for_update_arg = self._for_update_arg + + from_clause = inner + for eager_join in self.eager_joins.values(): + # EagerLoader places a 'stop_on' attribute on the join, + # giving us a marker as to where the "splice point" of + # the join should be + from_clause = sql_util.splice_joins( + from_clause, eager_join, eager_join.stop_on + ) + + statement.select_from.non_generative(statement, from_clause) + + if unwrapped_order_by: + statement.order_by.non_generative( + statement, + *self.compound_eager_adapter.copy_and_process( + unwrapped_order_by + ), + ) + + statement.order_by.non_generative(statement, *self.eager_order_by) + return statement + + def _simple_statement(self): + statement = self._select_statement( + self.primary_columns + self.secondary_columns, + tuple(self.from_clauses) + tuple(self.eager_joins.values()), + self._where_criteria, + self._having_criteria, + self.label_style, + self.order_by, + for_update=self._for_update_arg, + hints=self.select_statement._hints, + statement_hints=self.select_statement._statement_hints, + correlate=self.correlate, + correlate_except=self.correlate_except, + **self._select_args, + ) + + if self.eager_order_by: + statement.order_by.non_generative(statement, *self.eager_order_by) + return statement + + def _select_statement( + self, + raw_columns, + from_obj, + where_criteria, + having_criteria, + label_style, + order_by, + for_update, + hints, + statement_hints, + correlate, + correlate_except, + limit_clause, + offset_clause, + fetch_clause, + fetch_clause_options, + distinct, + distinct_on, + prefixes, + suffixes, + group_by, + independent_ctes, + independent_ctes_opts, + ): + statement = Select._create_raw_select( + _raw_columns=raw_columns, + _from_obj=from_obj, + _label_style=label_style, + ) + + if where_criteria: + statement._where_criteria = where_criteria + if having_criteria: + statement._having_criteria = having_criteria + + if order_by: + statement._order_by_clauses += tuple(order_by) + + if distinct_on: + statement.distinct.non_generative(statement, *distinct_on) + elif distinct: + statement.distinct.non_generative(statement) + + if group_by: + statement._group_by_clauses += tuple(group_by) + + statement._limit_clause = limit_clause + statement._offset_clause = offset_clause + statement._fetch_clause = fetch_clause + statement._fetch_clause_options = fetch_clause_options + statement._independent_ctes = independent_ctes + statement._independent_ctes_opts = independent_ctes_opts + + if prefixes: + statement._prefixes = prefixes + + if suffixes: + statement._suffixes = suffixes + + statement._for_update_arg = for_update + + if hints: + statement._hints = hints + if statement_hints: + statement._statement_hints = statement_hints + + if correlate: + statement.correlate.non_generative(statement, *correlate) + + if correlate_except is not None: + statement.correlate_except.non_generative( + statement, *correlate_except + ) + + return statement + + def _adapt_polymorphic_element(self, element): + if "parententity" in element._annotations: + search = element._annotations["parententity"] + alias = self._polymorphic_adapters.get(search, None) + if alias: + return alias.adapt_clause(element) + + if isinstance(element, expression.FromClause): + search = element + elif hasattr(element, "table"): + search = element.table + else: + return None + + alias = self._polymorphic_adapters.get(search, None) + if alias: + return alias.adapt_clause(element) + + def _adapt_col_list(self, cols, current_adapter): + if current_adapter: + return [current_adapter(o, True) for o in cols] + else: + return cols + + def _get_current_adapter(self): + adapters = [] + + if self._from_obj_alias: + # used for legacy going forward for query set_ops, e.g. + # union(), union_all(), etc. + # 1.4 and previously, also used for from_self(), + # select_entity_from() + # + # for the "from obj" alias, apply extra rule to the + # 'ORM only' check, if this query were generated from a + # subquery of itself, i.e. _from_selectable(), apply adaption + # to all SQL constructs. + adapters.append( + ( + True, + self._from_obj_alias.replace, + ) + ) + + # this was *hopefully* the only adapter we were going to need + # going forward...however, we unfortunately need _from_obj_alias + # for query.union(), which we can't drop + if self._polymorphic_adapters: + adapters.append((False, self._adapt_polymorphic_element)) + + if not adapters: + return None + + def _adapt_clause(clause, as_filter): + # do we adapt all expression elements or only those + # tagged as 'ORM' constructs ? + + def replace(elem): + is_orm_adapt = ( + "_orm_adapt" in elem._annotations + or "parententity" in elem._annotations + ) + for always_adapt, adapter in adapters: + if is_orm_adapt or always_adapt: + e = adapter(elem) + if e is not None: + return e + + return visitors.replacement_traverse(clause, {}, replace) + + return _adapt_clause + + def _join(self, args, entities_collection): + for right, onclause, from_, flags in args: + isouter = flags["isouter"] + full = flags["full"] + + right = inspect(right) + if onclause is not None: + onclause = inspect(onclause) + + if isinstance(right, interfaces.PropComparator): + if onclause is not None: + raise sa_exc.InvalidRequestError( + "No 'on clause' argument may be passed when joining " + "to a relationship path as a target" + ) + + onclause = right + right = None + elif "parententity" in right._annotations: + right = right._annotations["parententity"] + + if onclause is None: + if not right.is_selectable and not hasattr(right, "mapper"): + raise sa_exc.ArgumentError( + "Expected mapped entity or " + "selectable/table as join target" + ) + + of_type = None + + if isinstance(onclause, interfaces.PropComparator): + # descriptor/property given (or determined); this tells us + # explicitly what the expected "left" side of the join is. + + of_type = getattr(onclause, "_of_type", None) + + if right is None: + if of_type: + right = of_type + else: + right = onclause.property + + try: + right = right.entity + except AttributeError as err: + raise sa_exc.ArgumentError( + "Join target %s does not refer to a " + "mapped entity" % right + ) from err + + left = onclause._parententity + + prop = onclause.property + if not isinstance(onclause, attributes.QueryableAttribute): + onclause = prop + + # check for this path already present. don't render in that + # case. + if (left, right, prop.key) in self._already_joined_edges: + continue + + if from_ is not None: + if ( + from_ is not left + and from_._annotations.get("parententity", None) + is not left + ): + raise sa_exc.InvalidRequestError( + "explicit from clause %s does not match left side " + "of relationship attribute %s" + % ( + from_._annotations.get("parententity", from_), + onclause, + ) + ) + elif from_ is not None: + prop = None + left = from_ + else: + # no descriptor/property given; we will need to figure out + # what the effective "left" side is + prop = left = None + + # figure out the final "left" and "right" sides and create an + # ORMJoin to add to our _from_obj tuple + self._join_left_to_right( + entities_collection, + left, + right, + onclause, + prop, + isouter, + full, + ) + + def _join_left_to_right( + self, + entities_collection, + left, + right, + onclause, + prop, + outerjoin, + full, + ): + """given raw "left", "right", "onclause" parameters consumed from + a particular key within _join(), add a real ORMJoin object to + our _from_obj list (or augment an existing one) + + """ + + if left is None: + # left not given (e.g. no relationship object/name specified) + # figure out the best "left" side based on our existing froms / + # entities + assert prop is None + ( + left, + replace_from_obj_index, + use_entity_index, + ) = self._join_determine_implicit_left_side( + entities_collection, left, right, onclause + ) + else: + # left is given via a relationship/name, or as explicit left side. + # Determine where in our + # "froms" list it should be spliced/appended as well as what + # existing entity it corresponds to. + ( + replace_from_obj_index, + use_entity_index, + ) = self._join_place_explicit_left_side(entities_collection, left) + + if left is right: + raise sa_exc.InvalidRequestError( + "Can't construct a join from %s to %s, they " + "are the same entity" % (left, right) + ) + + # the right side as given often needs to be adapted. additionally + # a lot of things can be wrong with it. handle all that and + # get back the new effective "right" side + r_info, right, onclause = self._join_check_and_adapt_right_side( + left, right, onclause, prop + ) + + if not r_info.is_selectable: + extra_criteria = self._get_extra_criteria(r_info) + else: + extra_criteria = () + + if replace_from_obj_index is not None: + # splice into an existing element in the + # self._from_obj list + left_clause = self.from_clauses[replace_from_obj_index] + + self.from_clauses = ( + self.from_clauses[:replace_from_obj_index] + + [ + _ORMJoin( + left_clause, + right, + onclause, + isouter=outerjoin, + full=full, + _extra_criteria=extra_criteria, + ) + ] + + self.from_clauses[replace_from_obj_index + 1 :] + ) + else: + # add a new element to the self._from_obj list + if use_entity_index is not None: + # make use of _MapperEntity selectable, which is usually + # entity_zero.selectable, but if with_polymorphic() were used + # might be distinct + assert isinstance( + entities_collection[use_entity_index], _MapperEntity + ) + left_clause = entities_collection[use_entity_index].selectable + else: + left_clause = left + + self.from_clauses = self.from_clauses + [ + _ORMJoin( + left_clause, + r_info, + onclause, + isouter=outerjoin, + full=full, + _extra_criteria=extra_criteria, + ) + ] + + def _join_determine_implicit_left_side( + self, entities_collection, left, right, onclause + ): + """When join conditions don't express the left side explicitly, + determine if an existing FROM or entity in this query + can serve as the left hand side. + + """ + + # when we are here, it means join() was called without an ORM- + # specific way of telling us what the "left" side is, e.g.: + # + # join(RightEntity) + # + # or + # + # join(RightEntity, RightEntity.foo == LeftEntity.bar) + # + + r_info = inspect(right) + + replace_from_obj_index = use_entity_index = None + + if self.from_clauses: + # we have a list of FROMs already. So by definition this + # join has to connect to one of those FROMs. + + indexes = sql_util.find_left_clause_to_join_from( + self.from_clauses, r_info.selectable, onclause + ) + + if len(indexes) == 1: + replace_from_obj_index = indexes[0] + left = self.from_clauses[replace_from_obj_index] + elif len(indexes) > 1: + raise sa_exc.InvalidRequestError( + "Can't determine which FROM clause to join " + "from, there are multiple FROMS which can " + "join to this entity. Please use the .select_from() " + "method to establish an explicit left side, as well as " + "providing an explicit ON clause if not present already " + "to help resolve the ambiguity." + ) + else: + raise sa_exc.InvalidRequestError( + "Don't know how to join to %r. " + "Please use the .select_from() " + "method to establish an explicit left side, as well as " + "providing an explicit ON clause if not present already " + "to help resolve the ambiguity." % (right,) + ) + + elif entities_collection: + # we have no explicit FROMs, so the implicit left has to + # come from our list of entities. + + potential = {} + for entity_index, ent in enumerate(entities_collection): + entity = ent.entity_zero_or_selectable + if entity is None: + continue + ent_info = inspect(entity) + if ent_info is r_info: # left and right are the same, skip + continue + + # by using a dictionary with the selectables as keys this + # de-duplicates those selectables as occurs when the query is + # against a series of columns from the same selectable + if isinstance(ent, _MapperEntity): + potential[ent.selectable] = (entity_index, entity) + else: + potential[ent_info.selectable] = (None, entity) + + all_clauses = list(potential.keys()) + indexes = sql_util.find_left_clause_to_join_from( + all_clauses, r_info.selectable, onclause + ) + + if len(indexes) == 1: + use_entity_index, left = potential[all_clauses[indexes[0]]] + elif len(indexes) > 1: + raise sa_exc.InvalidRequestError( + "Can't determine which FROM clause to join " + "from, there are multiple FROMS which can " + "join to this entity. Please use the .select_from() " + "method to establish an explicit left side, as well as " + "providing an explicit ON clause if not present already " + "to help resolve the ambiguity." + ) + else: + raise sa_exc.InvalidRequestError( + "Don't know how to join to %r. " + "Please use the .select_from() " + "method to establish an explicit left side, as well as " + "providing an explicit ON clause if not present already " + "to help resolve the ambiguity." % (right,) + ) + else: + raise sa_exc.InvalidRequestError( + "No entities to join from; please use " + "select_from() to establish the left " + "entity/selectable of this join" + ) + + return left, replace_from_obj_index, use_entity_index + + def _join_place_explicit_left_side(self, entities_collection, left): + """When join conditions express a left side explicitly, determine + where in our existing list of FROM clauses we should join towards, + or if we need to make a new join, and if so is it from one of our + existing entities. + + """ + + # when we are here, it means join() was called with an indicator + # as to an exact left side, which means a path to a + # Relationship was given, e.g.: + # + # join(RightEntity, LeftEntity.right) + # + # or + # + # join(LeftEntity.right) + # + # as well as string forms: + # + # join(RightEntity, "right") + # + # etc. + # + + replace_from_obj_index = use_entity_index = None + + l_info = inspect(left) + if self.from_clauses: + indexes = sql_util.find_left_clause_that_matches_given( + self.from_clauses, l_info.selectable + ) + + if len(indexes) > 1: + raise sa_exc.InvalidRequestError( + "Can't identify which entity in which to assign the " + "left side of this join. Please use a more specific " + "ON clause." + ) + + # have an index, means the left side is already present in + # an existing FROM in the self._from_obj tuple + if indexes: + replace_from_obj_index = indexes[0] + + # no index, means we need to add a new element to the + # self._from_obj tuple + + # no from element present, so we will have to add to the + # self._from_obj tuple. Determine if this left side matches up + # with existing mapper entities, in which case we want to apply the + # aliasing / adaptation rules present on that entity if any + if ( + replace_from_obj_index is None + and entities_collection + and hasattr(l_info, "mapper") + ): + for idx, ent in enumerate(entities_collection): + # TODO: should we be checking for multiple mapper entities + # matching? + if isinstance(ent, _MapperEntity) and ent.corresponds_to(left): + use_entity_index = idx + break + + return replace_from_obj_index, use_entity_index + + def _join_check_and_adapt_right_side(self, left, right, onclause, prop): + """transform the "right" side of the join as well as the onclause + according to polymorphic mapping translations, aliasing on the query + or on the join, special cases where the right and left side have + overlapping tables. + + """ + + l_info = inspect(left) + r_info = inspect(right) + + overlap = False + + right_mapper = getattr(r_info, "mapper", None) + # if the target is a joined inheritance mapping, + # be more liberal about auto-aliasing. + if right_mapper and ( + right_mapper.with_polymorphic + or isinstance(right_mapper.persist_selectable, expression.Join) + ): + for from_obj in self.from_clauses or [l_info.selectable]: + if sql_util.selectables_overlap( + l_info.selectable, from_obj + ) and sql_util.selectables_overlap( + from_obj, r_info.selectable + ): + overlap = True + break + + if overlap and l_info.selectable is r_info.selectable: + raise sa_exc.InvalidRequestError( + "Can't join table/selectable '%s' to itself" + % l_info.selectable + ) + + right_mapper, right_selectable, right_is_aliased = ( + getattr(r_info, "mapper", None), + r_info.selectable, + getattr(r_info, "is_aliased_class", False), + ) + + if ( + right_mapper + and prop + and not right_mapper.common_parent(prop.mapper) + ): + raise sa_exc.InvalidRequestError( + "Join target %s does not correspond to " + "the right side of join condition %s" % (right, onclause) + ) + + # _join_entities is used as a hint for single-table inheritance + # purposes at the moment + if hasattr(r_info, "mapper"): + self._join_entities += (r_info,) + + need_adapter = False + + # test for joining to an unmapped selectable as the target + if r_info.is_clause_element: + if prop: + right_mapper = prop.mapper + + if right_selectable._is_lateral: + # orm_only is disabled to suit the case where we have to + # adapt an explicit correlate(Entity) - the select() loses + # the ORM-ness in this case right now, ideally it would not + current_adapter = self._get_current_adapter() + if current_adapter is not None: + # TODO: we had orm_only=False here before, removing + # it didn't break things. if we identify the rationale, + # may need to apply "_orm_only" annotation here. + right = current_adapter(right, True) + + elif prop: + # joining to selectable with a mapper property given + # as the ON clause + + if not right_selectable.is_derived_from( + right_mapper.persist_selectable + ): + raise sa_exc.InvalidRequestError( + "Selectable '%s' is not derived from '%s'" + % ( + right_selectable.description, + right_mapper.persist_selectable.description, + ) + ) + + # if the destination selectable is a plain select(), + # turn it into an alias(). + if isinstance(right_selectable, expression.SelectBase): + right_selectable = coercions.expect( + roles.FromClauseRole, right_selectable + ) + need_adapter = True + + # make the right hand side target into an ORM entity + right = AliasedClass(right_mapper, right_selectable) + + util.warn_deprecated( + "An alias is being generated automatically against " + "joined entity %s for raw clauseelement, which is " + "deprecated and will be removed in a later release. " + "Use the aliased() " + "construct explicitly, see the linked example." + % right_mapper, + "1.4", + code="xaj1", + ) + + # test for overlap: + # orm/inheritance/relationships.py + # SelfReferentialM2MTest + aliased_entity = right_mapper and not right_is_aliased and overlap + + if not need_adapter and aliased_entity: + # there are a few places in the ORM that automatic aliasing + # is still desirable, and can't be automatic with a Core + # only approach. For illustrations of "overlaps" see + # test/orm/inheritance/test_relationships.py. There are also + # general overlap cases with many-to-many tables where automatic + # aliasing is desirable. + right = AliasedClass(right, flat=True) + need_adapter = True + + util.warn( + "An alias is being generated automatically against " + "joined entity %s due to overlapping tables. This is a " + "legacy pattern which may be " + "deprecated in a later release. Use the " + "aliased(, flat=True) " + "construct explicitly, see the linked example." % right_mapper, + code="xaj2", + ) + + if need_adapter: + # if need_adapter is True, we are in a deprecated case and + # a warning has been emitted. + assert right_mapper + + adapter = ORMAdapter( + _TraceAdaptRole.DEPRECATED_JOIN_ADAPT_RIGHT_SIDE, + inspect(right), + equivalents=right_mapper._equivalent_columns, + ) + + # if an alias() on the right side was generated, + # which is intended to wrap a the right side in a subquery, + # ensure that columns retrieved from this target in the result + # set are also adapted. + self._mapper_loads_polymorphically_with(right_mapper, adapter) + elif ( + not r_info.is_clause_element + and not right_is_aliased + and right_mapper._has_aliased_polymorphic_fromclause + ): + # for the case where the target mapper has a with_polymorphic + # set up, ensure an adapter is set up for criteria that works + # against this mapper. Previously, this logic used to + # use the "create_aliases or aliased_entity" case to generate + # an aliased() object, but this creates an alias that isn't + # strictly necessary. + # see test/orm/test_core_compilation.py + # ::RelNaturalAliasedJoinsTest::test_straight + # and similar + self._mapper_loads_polymorphically_with( + right_mapper, + ORMAdapter( + _TraceAdaptRole.WITH_POLYMORPHIC_ADAPTER_RIGHT_JOIN, + right_mapper, + selectable=right_mapper.selectable, + equivalents=right_mapper._equivalent_columns, + ), + ) + # if the onclause is a ClauseElement, adapt it with any + # adapters that are in place right now + if isinstance(onclause, expression.ClauseElement): + current_adapter = self._get_current_adapter() + if current_adapter: + onclause = current_adapter(onclause, True) + + # if joining on a MapperProperty path, + # track the path to prevent redundant joins + if prop: + self._already_joined_edges += ((left, right, prop.key),) + + return inspect(right), right, onclause + + @property + def _select_args(self): + return { + "limit_clause": self.select_statement._limit_clause, + "offset_clause": self.select_statement._offset_clause, + "distinct": self.distinct, + "distinct_on": self.distinct_on, + "prefixes": self.select_statement._prefixes, + "suffixes": self.select_statement._suffixes, + "group_by": self.group_by or None, + "fetch_clause": self.select_statement._fetch_clause, + "fetch_clause_options": ( + self.select_statement._fetch_clause_options + ), + "independent_ctes": self.select_statement._independent_ctes, + "independent_ctes_opts": ( + self.select_statement._independent_ctes_opts + ), + } + + @property + def _should_nest_selectable(self): + kwargs = self._select_args + return ( + kwargs.get("limit_clause") is not None + or kwargs.get("offset_clause") is not None + or kwargs.get("distinct", False) + or kwargs.get("distinct_on", ()) + or kwargs.get("group_by", False) + ) + + def _get_extra_criteria(self, ext_info): + if ( + "additional_entity_criteria", + ext_info.mapper, + ) in self.global_attributes: + return tuple( + ae._resolve_where_criteria(ext_info) + for ae in self.global_attributes[ + ("additional_entity_criteria", ext_info.mapper) + ] + if (ae.include_aliases or ae.entity is ext_info) + and ae._should_include(self) + ) + else: + return () + + def _adjust_for_extra_criteria(self): + """Apply extra criteria filtering. + + For all distinct single-table-inheritance mappers represented in + the columns clause of this query, as well as the "select from entity", + add criterion to the WHERE + clause of the given QueryContext such that only the appropriate + subtypes are selected from the total results. + + Additionally, add WHERE criteria originating from LoaderCriteriaOptions + associated with the global context. + + """ + + for fromclause in self.from_clauses: + ext_info = fromclause._annotations.get("parententity", None) + + if ( + ext_info + and ( + ext_info.mapper._single_table_criterion is not None + or ("additional_entity_criteria", ext_info.mapper) + in self.global_attributes + ) + and ext_info not in self.extra_criteria_entities + ): + self.extra_criteria_entities[ext_info] = ( + ext_info, + ext_info._adapter if ext_info.is_aliased_class else None, + ) + + search = set(self.extra_criteria_entities.values()) + + for ext_info, adapter in search: + if ext_info in self._join_entities: + continue + + single_crit = ext_info.mapper._single_table_criterion + + if self.compile_options._for_refresh_state: + additional_entity_criteria = [] + else: + additional_entity_criteria = self._get_extra_criteria(ext_info) + + if single_crit is not None: + additional_entity_criteria += (single_crit,) + + current_adapter = self._get_current_adapter() + for crit in additional_entity_criteria: + if adapter: + crit = adapter.traverse(crit) + + if current_adapter: + crit = sql_util._deep_annotate(crit, {"_orm_adapt": True}) + crit = current_adapter(crit, False) + self._where_criteria += (crit,) + + +def _column_descriptions( + query_or_select_stmt: Union[Query, Select, FromStatement], + compile_state: Optional[ORMSelectCompileState] = None, + legacy: bool = False, +) -> List[ORMColumnDescription]: + if compile_state is None: + compile_state = ORMSelectCompileState._create_entities_collection( + query_or_select_stmt, legacy=legacy + ) + ctx = compile_state + d = [ + { + "name": ent._label_name, + "type": ent.type, + "aliased": getattr(insp_ent, "is_aliased_class", False), + "expr": ent.expr, + "entity": ( + getattr(insp_ent, "entity", None) + if ent.entity_zero is not None + and not insp_ent.is_clause_element + else None + ), + } + for ent, insp_ent in [ + (_ent, _ent.entity_zero) for _ent in ctx._entities + ] + ] + return d + + +def _legacy_filter_by_entity_zero( + query_or_augmented_select: Union[Query[Any], Select[Any]] +) -> Optional[_InternalEntityType[Any]]: + self = query_or_augmented_select + if self._setup_joins: + _last_joined_entity = self._last_joined_entity + if _last_joined_entity is not None: + return _last_joined_entity + + if self._from_obj and "parententity" in self._from_obj[0]._annotations: + return self._from_obj[0]._annotations["parententity"] + + return _entity_from_pre_ent_zero(self) + + +def _entity_from_pre_ent_zero( + query_or_augmented_select: Union[Query[Any], Select[Any]] +) -> Optional[_InternalEntityType[Any]]: + self = query_or_augmented_select + if not self._raw_columns: + return None + + ent = self._raw_columns[0] + + if "parententity" in ent._annotations: + return ent._annotations["parententity"] + elif isinstance(ent, ORMColumnsClauseRole): + return ent.entity + elif "bundle" in ent._annotations: + return ent._annotations["bundle"] + else: + return ent + + +def _determine_last_joined_entity( + setup_joins: Tuple[_SetupJoinsElement, ...], + entity_zero: Optional[_InternalEntityType[Any]] = None, +) -> Optional[Union[_InternalEntityType[Any], _JoinTargetElement]]: + if not setup_joins: + return None + + (target, onclause, from_, flags) = setup_joins[-1] + + if isinstance( + target, + attributes.QueryableAttribute, + ): + return target.entity + else: + return target + + +class _QueryEntity: + """represent an entity column returned within a Query result.""" + + __slots__ = () + + supports_single_entity: bool + + _non_hashable_value = False + _null_column_type = False + use_id_for_hash = False + + _label_name: Optional[str] + type: Union[Type[Any], TypeEngine[Any]] + expr: Union[_InternalEntityType, ColumnElement[Any]] + entity_zero: Optional[_InternalEntityType] + + def setup_compile_state(self, compile_state: ORMCompileState) -> None: + raise NotImplementedError() + + def setup_dml_returning_compile_state( + self, + compile_state: ORMCompileState, + adapter: DMLReturningColFilter, + ) -> None: + raise NotImplementedError() + + def row_processor(self, context, result): + raise NotImplementedError() + + @classmethod + def to_compile_state( + cls, compile_state, entities, entities_collection, is_current_entities + ): + for idx, entity in enumerate(entities): + if entity._is_lambda_element: + if entity._is_sequence: + cls.to_compile_state( + compile_state, + entity._resolved, + entities_collection, + is_current_entities, + ) + continue + else: + entity = entity._resolved + + if entity.is_clause_element: + if entity.is_selectable: + if "parententity" in entity._annotations: + _MapperEntity( + compile_state, + entity, + entities_collection, + is_current_entities, + ) + else: + _ColumnEntity._for_columns( + compile_state, + entity._select_iterable, + entities_collection, + idx, + is_current_entities, + ) + else: + if entity._annotations.get("bundle", False): + _BundleEntity( + compile_state, + entity, + entities_collection, + is_current_entities, + ) + elif entity._is_clause_list: + # this is legacy only - test_composites.py + # test_query_cols_legacy + _ColumnEntity._for_columns( + compile_state, + entity._select_iterable, + entities_collection, + idx, + is_current_entities, + ) + else: + _ColumnEntity._for_columns( + compile_state, + [entity], + entities_collection, + idx, + is_current_entities, + ) + elif entity.is_bundle: + _BundleEntity(compile_state, entity, entities_collection) + + return entities_collection + + +class _MapperEntity(_QueryEntity): + """mapper/class/AliasedClass entity""" + + __slots__ = ( + "expr", + "mapper", + "entity_zero", + "is_aliased_class", + "path", + "_extra_entities", + "_label_name", + "_with_polymorphic_mappers", + "selectable", + "_polymorphic_discriminator", + ) + + expr: _InternalEntityType + mapper: Mapper[Any] + entity_zero: _InternalEntityType + is_aliased_class: bool + path: PathRegistry + _label_name: str + + def __init__( + self, compile_state, entity, entities_collection, is_current_entities + ): + entities_collection.append(self) + if is_current_entities: + if compile_state._primary_entity is None: + compile_state._primary_entity = self + compile_state._has_mapper_entities = True + compile_state._has_orm_entities = True + + entity = entity._annotations["parententity"] + entity._post_inspect + ext_info = self.entity_zero = entity + entity = ext_info.entity + + self.expr = entity + self.mapper = mapper = ext_info.mapper + + self._extra_entities = (self.expr,) + + if ext_info.is_aliased_class: + self._label_name = ext_info.name + else: + self._label_name = mapper.class_.__name__ + + self.is_aliased_class = ext_info.is_aliased_class + self.path = ext_info._path_registry + + self.selectable = ext_info.selectable + self._with_polymorphic_mappers = ext_info.with_polymorphic_mappers + self._polymorphic_discriminator = ext_info.polymorphic_on + + if mapper._should_select_with_poly_adapter: + compile_state._create_with_polymorphic_adapter( + ext_info, self.selectable + ) + + supports_single_entity = True + + _non_hashable_value = True + use_id_for_hash = True + + @property + def type(self): + return self.mapper.class_ + + @property + def entity_zero_or_selectable(self): + return self.entity_zero + + def corresponds_to(self, entity): + return _entity_corresponds_to(self.entity_zero, entity) + + def _get_entity_clauses(self, compile_state): + adapter = None + + if not self.is_aliased_class: + if compile_state._polymorphic_adapters: + adapter = compile_state._polymorphic_adapters.get( + self.mapper, None + ) + else: + adapter = self.entity_zero._adapter + + if adapter: + if compile_state._from_obj_alias: + ret = adapter.wrap(compile_state._from_obj_alias) + else: + ret = adapter + else: + ret = compile_state._from_obj_alias + + return ret + + def row_processor(self, context, result): + compile_state = context.compile_state + adapter = self._get_entity_clauses(compile_state) + + if compile_state.compound_eager_adapter and adapter: + adapter = adapter.wrap(compile_state.compound_eager_adapter) + elif not adapter: + adapter = compile_state.compound_eager_adapter + + if compile_state._primary_entity is self: + only_load_props = compile_state.compile_options._only_load_props + refresh_state = context.refresh_state + else: + only_load_props = refresh_state = None + + _instance = loading._instance_processor( + self, + self.mapper, + context, + result, + self.path, + adapter, + only_load_props=only_load_props, + refresh_state=refresh_state, + polymorphic_discriminator=self._polymorphic_discriminator, + ) + + return _instance, self._label_name, self._extra_entities + + def setup_dml_returning_compile_state( + self, + compile_state: ORMCompileState, + adapter: DMLReturningColFilter, + ) -> None: + loading._setup_entity_query( + compile_state, + self.mapper, + self, + self.path, + adapter, + compile_state.primary_columns, + with_polymorphic=self._with_polymorphic_mappers, + only_load_props=compile_state.compile_options._only_load_props, + polymorphic_discriminator=self._polymorphic_discriminator, + ) + + def setup_compile_state(self, compile_state): + adapter = self._get_entity_clauses(compile_state) + + single_table_crit = self.mapper._single_table_criterion + if ( + single_table_crit is not None + or ("additional_entity_criteria", self.mapper) + in compile_state.global_attributes + ): + ext_info = self.entity_zero + compile_state.extra_criteria_entities[ext_info] = ( + ext_info, + ext_info._adapter if ext_info.is_aliased_class else None, + ) + + loading._setup_entity_query( + compile_state, + self.mapper, + self, + self.path, + adapter, + compile_state.primary_columns, + with_polymorphic=self._with_polymorphic_mappers, + only_load_props=compile_state.compile_options._only_load_props, + polymorphic_discriminator=self._polymorphic_discriminator, + ) + compile_state._fallback_from_clauses.append(self.selectable) + + +class _BundleEntity(_QueryEntity): + _extra_entities = () + + __slots__ = ( + "bundle", + "expr", + "type", + "_label_name", + "_entities", + "supports_single_entity", + ) + + _entities: List[_QueryEntity] + bundle: Bundle + type: Type[Any] + _label_name: str + supports_single_entity: bool + expr: Bundle + + def __init__( + self, + compile_state, + expr, + entities_collection, + is_current_entities, + setup_entities=True, + parent_bundle=None, + ): + compile_state._has_orm_entities = True + + expr = expr._annotations["bundle"] + if parent_bundle: + parent_bundle._entities.append(self) + else: + entities_collection.append(self) + + if isinstance( + expr, (attributes.QueryableAttribute, interfaces.PropComparator) + ): + bundle = expr.__clause_element__() + else: + bundle = expr + + self.bundle = self.expr = bundle + self.type = type(bundle) + self._label_name = bundle.name + self._entities = [] + + if setup_entities: + for expr in bundle.exprs: + if "bundle" in expr._annotations: + _BundleEntity( + compile_state, + expr, + entities_collection, + is_current_entities, + parent_bundle=self, + ) + elif isinstance(expr, Bundle): + _BundleEntity( + compile_state, + expr, + entities_collection, + is_current_entities, + parent_bundle=self, + ) + else: + _ORMColumnEntity._for_columns( + compile_state, + [expr], + entities_collection, + None, + is_current_entities, + parent_bundle=self, + ) + + self.supports_single_entity = self.bundle.single_entity + + @property + def mapper(self): + ezero = self.entity_zero + if ezero is not None: + return ezero.mapper + else: + return None + + @property + def entity_zero(self): + for ent in self._entities: + ezero = ent.entity_zero + if ezero is not None: + return ezero + else: + return None + + def corresponds_to(self, entity): + # TODO: we might be able to implement this but for now + # we are working around it + return False + + @property + def entity_zero_or_selectable(self): + for ent in self._entities: + ezero = ent.entity_zero_or_selectable + if ezero is not None: + return ezero + else: + return None + + def setup_compile_state(self, compile_state): + for ent in self._entities: + ent.setup_compile_state(compile_state) + + def setup_dml_returning_compile_state( + self, + compile_state: ORMCompileState, + adapter: DMLReturningColFilter, + ) -> None: + return self.setup_compile_state(compile_state) + + def row_processor(self, context, result): + procs, labels, extra = zip( + *[ent.row_processor(context, result) for ent in self._entities] + ) + + proc = self.bundle.create_row_processor(context.query, procs, labels) + + return proc, self._label_name, self._extra_entities + + +class _ColumnEntity(_QueryEntity): + __slots__ = ( + "_fetch_column", + "_row_processor", + "raw_column_index", + "translate_raw_column", + ) + + @classmethod + def _for_columns( + cls, + compile_state, + columns, + entities_collection, + raw_column_index, + is_current_entities, + parent_bundle=None, + ): + for column in columns: + annotations = column._annotations + if "parententity" in annotations: + _entity = annotations["parententity"] + else: + _entity = sql_util.extract_first_column_annotation( + column, "parententity" + ) + + if _entity: + if "identity_token" in column._annotations: + _IdentityTokenEntity( + compile_state, + column, + entities_collection, + _entity, + raw_column_index, + is_current_entities, + parent_bundle=parent_bundle, + ) + else: + _ORMColumnEntity( + compile_state, + column, + entities_collection, + _entity, + raw_column_index, + is_current_entities, + parent_bundle=parent_bundle, + ) + else: + _RawColumnEntity( + compile_state, + column, + entities_collection, + raw_column_index, + is_current_entities, + parent_bundle=parent_bundle, + ) + + @property + def type(self): + return self.column.type + + @property + def _non_hashable_value(self): + return not self.column.type.hashable + + @property + def _null_column_type(self): + return self.column.type._isnull + + def row_processor(self, context, result): + compile_state = context.compile_state + + # the resulting callable is entirely cacheable so just return + # it if we already made one + if self._row_processor is not None: + getter, label_name, extra_entities = self._row_processor + if self.translate_raw_column: + extra_entities += ( + context.query._raw_columns[self.raw_column_index], + ) + + return getter, label_name, extra_entities + + # retrieve the column that would have been set up in + # setup_compile_state, to avoid doing redundant work + if self._fetch_column is not None: + column = self._fetch_column + else: + # fetch_column will be None when we are doing a from_statement + # and setup_compile_state may not have been called. + column = self.column + + # previously, the RawColumnEntity didn't look for from_obj_alias + # however I can't think of a case where we would be here and + # we'd want to ignore it if this is the from_statement use case. + # it's not really a use case to have raw columns + from_statement + if compile_state._from_obj_alias: + column = compile_state._from_obj_alias.columns[column] + + if column._annotations: + # annotated columns perform more slowly in compiler and + # result due to the __eq__() method, so use deannotated + column = column._deannotate() + + if compile_state.compound_eager_adapter: + column = compile_state.compound_eager_adapter.columns[column] + + getter = result._getter(column) + ret = getter, self._label_name, self._extra_entities + self._row_processor = ret + + if self.translate_raw_column: + extra_entities = self._extra_entities + ( + context.query._raw_columns[self.raw_column_index], + ) + return getter, self._label_name, extra_entities + else: + return ret + + +class _RawColumnEntity(_ColumnEntity): + entity_zero = None + mapper = None + supports_single_entity = False + + __slots__ = ( + "expr", + "column", + "_label_name", + "entity_zero_or_selectable", + "_extra_entities", + ) + + def __init__( + self, + compile_state, + column, + entities_collection, + raw_column_index, + is_current_entities, + parent_bundle=None, + ): + self.expr = column + self.raw_column_index = raw_column_index + self.translate_raw_column = raw_column_index is not None + + if column._is_star: + compile_state.compile_options += {"_is_star": True} + + if not is_current_entities or column._is_text_clause: + self._label_name = None + else: + if parent_bundle: + self._label_name = column._proxy_key + else: + self._label_name = compile_state._label_convention(column) + + if parent_bundle: + parent_bundle._entities.append(self) + else: + entities_collection.append(self) + + self.column = column + self.entity_zero_or_selectable = ( + self.column._from_objects[0] if self.column._from_objects else None + ) + self._extra_entities = (self.expr, self.column) + self._fetch_column = self._row_processor = None + + def corresponds_to(self, entity): + return False + + def setup_dml_returning_compile_state( + self, + compile_state: ORMCompileState, + adapter: DMLReturningColFilter, + ) -> None: + return self.setup_compile_state(compile_state) + + def setup_compile_state(self, compile_state): + current_adapter = compile_state._get_current_adapter() + if current_adapter: + column = current_adapter(self.column, False) + if column is None: + return + else: + column = self.column + + if column._annotations: + # annotated columns perform more slowly in compiler and + # result due to the __eq__() method, so use deannotated + column = column._deannotate() + + compile_state.dedupe_columns.add(column) + compile_state.primary_columns.append(column) + self._fetch_column = column + + +class _ORMColumnEntity(_ColumnEntity): + """Column/expression based entity.""" + + supports_single_entity = False + + __slots__ = ( + "expr", + "mapper", + "column", + "_label_name", + "entity_zero_or_selectable", + "entity_zero", + "_extra_entities", + ) + + def __init__( + self, + compile_state, + column, + entities_collection, + parententity, + raw_column_index, + is_current_entities, + parent_bundle=None, + ): + annotations = column._annotations + + _entity = parententity + + # an AliasedClass won't have proxy_key in the annotations for + # a column if it was acquired using the class' adapter directly, + # such as using AliasedInsp._adapt_element(). this occurs + # within internal loaders. + + orm_key = annotations.get("proxy_key", None) + proxy_owner = annotations.get("proxy_owner", _entity) + if orm_key: + self.expr = getattr(proxy_owner.entity, orm_key) + self.translate_raw_column = False + else: + # if orm_key is not present, that means this is an ad-hoc + # SQL ColumnElement, like a CASE() or other expression. + # include this column position from the invoked statement + # in the ORM-level ResultSetMetaData on each execute, so that + # it can be targeted by identity after caching + self.expr = column + self.translate_raw_column = raw_column_index is not None + + self.raw_column_index = raw_column_index + + if is_current_entities: + if parent_bundle: + self._label_name = orm_key if orm_key else column._proxy_key + else: + self._label_name = compile_state._label_convention( + column, col_name=orm_key + ) + else: + self._label_name = None + + _entity._post_inspect + self.entity_zero = self.entity_zero_or_selectable = ezero = _entity + self.mapper = mapper = _entity.mapper + + if parent_bundle: + parent_bundle._entities.append(self) + else: + entities_collection.append(self) + + compile_state._has_orm_entities = True + + self.column = column + + self._fetch_column = self._row_processor = None + + self._extra_entities = (self.expr, self.column) + + if mapper._should_select_with_poly_adapter: + compile_state._create_with_polymorphic_adapter( + ezero, ezero.selectable + ) + + def corresponds_to(self, entity): + if _is_aliased_class(entity): + # TODO: polymorphic subclasses ? + return entity is self.entity_zero + else: + return not _is_aliased_class( + self.entity_zero + ) and entity.common_parent(self.entity_zero) + + def setup_dml_returning_compile_state( + self, + compile_state: ORMCompileState, + adapter: DMLReturningColFilter, + ) -> None: + self._fetch_column = self.column + column = adapter(self.column, False) + if column is not None: + compile_state.dedupe_columns.add(column) + compile_state.primary_columns.append(column) + + def setup_compile_state(self, compile_state): + current_adapter = compile_state._get_current_adapter() + if current_adapter: + column = current_adapter(self.column, False) + if column is None: + assert compile_state.is_dml_returning + self._fetch_column = self.column + return + else: + column = self.column + + ezero = self.entity_zero + + single_table_crit = self.mapper._single_table_criterion + if ( + single_table_crit is not None + or ("additional_entity_criteria", self.mapper) + in compile_state.global_attributes + ): + compile_state.extra_criteria_entities[ezero] = ( + ezero, + ezero._adapter if ezero.is_aliased_class else None, + ) + + if column._annotations and not column._expression_label: + # annotated columns perform more slowly in compiler and + # result due to the __eq__() method, so use deannotated + column = column._deannotate() + + # use entity_zero as the from if we have it. this is necessary + # for polymorphic scenarios where our FROM is based on ORM entity, + # not the FROM of the column. but also, don't use it if our column + # doesn't actually have any FROMs that line up, such as when its + # a scalar subquery. + if set(self.column._from_objects).intersection( + ezero.selectable._from_objects + ): + compile_state._fallback_from_clauses.append(ezero.selectable) + + compile_state.dedupe_columns.add(column) + compile_state.primary_columns.append(column) + self._fetch_column = column + + +class _IdentityTokenEntity(_ORMColumnEntity): + translate_raw_column = False + + def setup_compile_state(self, compile_state): + pass + + def row_processor(self, context, result): + def getter(row): + return context.load_options._identity_token + + return getter, self._label_name, self._extra_entities diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py new file mode 100644 index 00000000..91f9539b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py @@ -0,0 +1,1883 @@ +# orm/decl_api.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Public API functions and helpers for declarative.""" + +from __future__ import annotations + +import itertools +import re +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import clsregistry +from . import instrumentation +from . import interfaces +from . import mapperlib +from ._orm_constructors import composite +from ._orm_constructors import deferred +from ._orm_constructors import mapped_column +from ._orm_constructors import relationship +from ._orm_constructors import synonym +from .attributes import InstrumentedAttribute +from .base import _inspect_mapped_class +from .base import _is_mapped_class +from .base import Mapped +from .base import ORMDescriptor +from .decl_base import _add_attribute +from .decl_base import _as_declarative +from .decl_base import _ClassScanMapperConfig +from .decl_base import _declarative_constructor +from .decl_base import _DeferredMapperConfig +from .decl_base import _del_attribute +from .decl_base import _mapper +from .descriptor_props import Composite +from .descriptor_props import Synonym +from .descriptor_props import Synonym as _orm_synonym +from .mapper import Mapper +from .properties import MappedColumn +from .relationships import RelationshipProperty +from .state import InstanceState +from .. import exc +from .. import inspection +from .. import util +from ..sql import sqltypes +from ..sql.base import _NoArg +from ..sql.elements import SQLCoreOperations +from ..sql.schema import MetaData +from ..sql.selectable import FromClause +from ..util import hybridmethod +from ..util import hybridproperty +from ..util import typing as compat_typing +from ..util.typing import CallableReference +from ..util.typing import flatten_newtype +from ..util.typing import is_generic +from ..util.typing import is_literal +from ..util.typing import is_newtype +from ..util.typing import is_pep695 +from ..util.typing import Literal +from ..util.typing import Self + +if TYPE_CHECKING: + from ._typing import _O + from ._typing import _RegistryType + from .decl_base import _DataclassArguments + from .instrumentation import ClassManager + from .interfaces import MapperProperty + from .state import InstanceState # noqa + from ..sql._typing import _TypeEngineArgument + from ..sql.type_api import _MatchedOnType + +_T = TypeVar("_T", bound=Any) + +_TT = TypeVar("_TT", bound=Any) + +# it's not clear how to have Annotated, Union objects etc. as keys here +# from a typing perspective so just leave it open ended for now +_TypeAnnotationMapType = Mapping[Any, "_TypeEngineArgument[Any]"] +_MutableTypeAnnotationMapType = Dict[Any, "_TypeEngineArgument[Any]"] + +_DeclaredAttrDecorated = Callable[ + ..., Union[Mapped[_T], ORMDescriptor[_T], SQLCoreOperations[_T]] +] + + +def has_inherited_table(cls: Type[_O]) -> bool: + """Given a class, return True if any of the classes it inherits from has a + mapped table, otherwise return False. + + This is used in declarative mixins to build attributes that behave + differently for the base class vs. a subclass in an inheritance + hierarchy. + + .. seealso:: + + :ref:`decl_mixin_inheritance` + + """ + for class_ in cls.__mro__[1:]: + if getattr(class_, "__table__", None) is not None: + return True + return False + + +class _DynamicAttributesType(type): + def __setattr__(cls, key: str, value: Any) -> None: + if "__mapper__" in cls.__dict__: + _add_attribute(cls, key, value) + else: + type.__setattr__(cls, key, value) + + def __delattr__(cls, key: str) -> None: + if "__mapper__" in cls.__dict__: + _del_attribute(cls, key) + else: + type.__delattr__(cls, key) + + +class DeclarativeAttributeIntercept( + _DynamicAttributesType, + # Inspectable is used only by the mypy plugin + inspection.Inspectable[Mapper[Any]], +): + """Metaclass that may be used in conjunction with the + :class:`_orm.DeclarativeBase` class to support addition of class + attributes dynamically. + + """ + + +@compat_typing.dataclass_transform( + field_specifiers=( + MappedColumn, + RelationshipProperty, + Composite, + Synonym, + mapped_column, + relationship, + composite, + synonym, + deferred, + ), +) +class DCTransformDeclarative(DeclarativeAttributeIntercept): + """metaclass that includes @dataclass_transforms""" + + +class DeclarativeMeta(DeclarativeAttributeIntercept): + metadata: MetaData + registry: RegistryType + + def __init__( + cls, classname: Any, bases: Any, dict_: Any, **kw: Any + ) -> None: + # use cls.__dict__, which can be modified by an + # __init_subclass__() method (#7900) + dict_ = cls.__dict__ + + # early-consume registry from the initial declarative base, + # assign privately to not conflict with subclass attributes named + # "registry" + reg = getattr(cls, "_sa_registry", None) + if reg is None: + reg = dict_.get("registry", None) + if not isinstance(reg, registry): + raise exc.InvalidRequestError( + "Declarative base class has no 'registry' attribute, " + "or registry is not a sqlalchemy.orm.registry() object" + ) + else: + cls._sa_registry = reg + + if not cls.__dict__.get("__abstract__", False): + _as_declarative(reg, cls, dict_) + type.__init__(cls, classname, bases, dict_) + + +def synonym_for( + name: str, map_column: bool = False +) -> Callable[[Callable[..., Any]], Synonym[Any]]: + """Decorator that produces an :func:`_orm.synonym` + attribute in conjunction with a Python descriptor. + + The function being decorated is passed to :func:`_orm.synonym` as the + :paramref:`.orm.synonym.descriptor` parameter:: + + class MyClass(Base): + __tablename__ = 'my_table' + + id = Column(Integer, primary_key=True) + _job_status = Column("job_status", String(50)) + + @synonym_for("job_status") + @property + def job_status(self): + return "Status: %s" % self._job_status + + The :ref:`hybrid properties ` feature of SQLAlchemy + is typically preferred instead of synonyms, which is a more legacy + feature. + + .. seealso:: + + :ref:`synonyms` - Overview of synonyms + + :func:`_orm.synonym` - the mapper-level function + + :ref:`mapper_hybrids` - The Hybrid Attribute extension provides an + updated approach to augmenting attribute behavior more flexibly than + can be achieved with synonyms. + + """ + + def decorate(fn: Callable[..., Any]) -> Synonym[Any]: + return _orm_synonym(name, map_column=map_column, descriptor=fn) + + return decorate + + +class _declared_attr_common: + def __init__( + self, + fn: Callable[..., Any], + cascading: bool = False, + quiet: bool = False, + ): + # suppport + # @declared_attr + # @classmethod + # def foo(cls) -> Mapped[thing]: + # ... + # which seems to help typing tools interpret the fn as a classmethod + # for situations where needed + if isinstance(fn, classmethod): + fn = fn.__func__ + + self.fget = fn + self._cascading = cascading + self._quiet = quiet + self.__doc__ = fn.__doc__ + + def _collect_return_annotation(self) -> Optional[Type[Any]]: + return util.get_annotations(self.fget).get("return") + + def __get__(self, instance: Optional[object], owner: Any) -> Any: + # the declared_attr needs to make use of a cache that exists + # for the span of the declarative scan_attributes() phase. + # to achieve this we look at the class manager that's configured. + + # note this method should not be called outside of the declarative + # setup phase + + cls = owner + manager = attributes.opt_manager_of_class(cls) + if manager is None: + if not re.match(r"^__.+__$", self.fget.__name__): + # if there is no manager at all, then this class hasn't been + # run through declarative or mapper() at all, emit a warning. + util.warn( + "Unmanaged access of declarative attribute %s from " + "non-mapped class %s" % (self.fget.__name__, cls.__name__) + ) + return self.fget(cls) + elif manager.is_mapped: + # the class is mapped, which means we're outside of the declarative + # scan setup, just run the function. + return self.fget(cls) + + # here, we are inside of the declarative scan. use the registry + # that is tracking the values of these attributes. + declarative_scan = manager.declarative_scan() + + # assert that we are in fact in the declarative scan + assert declarative_scan is not None + + reg = declarative_scan.declared_attr_reg + + if self in reg: + return reg[self] + else: + reg[self] = obj = self.fget(cls) + return obj + + +class _declared_directive(_declared_attr_common, Generic[_T]): + # see mapping_api.rst for docstring + + if typing.TYPE_CHECKING: + + def __init__( + self, + fn: Callable[..., _T], + cascading: bool = False, + ): ... + + def __get__(self, instance: Optional[object], owner: Any) -> _T: ... + + def __set__(self, instance: Any, value: Any) -> None: ... + + def __delete__(self, instance: Any) -> None: ... + + def __call__(self, fn: Callable[..., _TT]) -> _declared_directive[_TT]: + # extensive fooling of mypy underway... + ... + + +class declared_attr(interfaces._MappedAttribute[_T], _declared_attr_common): + """Mark a class-level method as representing the definition of + a mapped property or Declarative directive. + + :class:`_orm.declared_attr` is typically applied as a decorator to a class + level method, turning the attribute into a scalar-like property that can be + invoked from the uninstantiated class. The Declarative mapping process + looks for these :class:`_orm.declared_attr` callables as it scans classes, + and assumes any attribute marked with :class:`_orm.declared_attr` will be a + callable that will produce an object specific to the Declarative mapping or + table configuration. + + :class:`_orm.declared_attr` is usually applicable to + :ref:`mixins `, to define relationships that are to be + applied to different implementors of the class. It may also be used to + define dynamically generated column expressions and other Declarative + attributes. + + Example:: + + class ProvidesUserMixin: + "A mixin that adds a 'user' relationship to classes." + + user_id: Mapped[int] = mapped_column(ForeignKey("user_table.id")) + + @declared_attr + def user(cls) -> Mapped["User"]: + return relationship("User") + + When used with Declarative directives such as ``__tablename__``, the + :meth:`_orm.declared_attr.directive` modifier may be used which indicates + to :pep:`484` typing tools that the given method is not dealing with + :class:`_orm.Mapped` attributes:: + + class CreateTableName: + @declared_attr.directive + def __tablename__(cls) -> str: + return cls.__name__.lower() + + :class:`_orm.declared_attr` can also be applied directly to mapped + classes, to allow for attributes that dynamically configure themselves + on subclasses when using mapped inheritance schemes. Below + illustrates :class:`_orm.declared_attr` to create a dynamic scheme + for generating the :paramref:`_orm.Mapper.polymorphic_identity` parameter + for subclasses:: + + class Employee(Base): + __tablename__ = 'employee' + + id: Mapped[int] = mapped_column(primary_key=True) + type: Mapped[str] = mapped_column(String(50)) + + @declared_attr.directive + def __mapper_args__(cls) -> Dict[str, Any]: + if cls.__name__ == 'Employee': + return { + "polymorphic_on":cls.type, + "polymorphic_identity":"Employee" + } + else: + return {"polymorphic_identity":cls.__name__} + + class Engineer(Employee): + pass + + :class:`_orm.declared_attr` supports decorating functions that are + explicitly decorated with ``@classmethod``. This is never necessary from a + runtime perspective, however may be needed in order to support :pep:`484` + typing tools that don't otherwise recognize the decorated function as + having class-level behaviors for the ``cls`` parameter:: + + class SomethingMixin: + x: Mapped[int] + y: Mapped[int] + + @declared_attr + @classmethod + def x_plus_y(cls) -> Mapped[int]: + return column_property(cls.x + cls.y) + + .. versionadded:: 2.0 - :class:`_orm.declared_attr` can accommodate a + function decorated with ``@classmethod`` to help with :pep:`484` + integration where needed. + + + .. seealso:: + + :ref:`orm_mixins_toplevel` - Declarative Mixin documentation with + background on use patterns for :class:`_orm.declared_attr`. + + """ # noqa: E501 + + if typing.TYPE_CHECKING: + + def __init__( + self, + fn: _DeclaredAttrDecorated[_T], + cascading: bool = False, + ): ... + + def __set__(self, instance: Any, value: Any) -> None: ... + + def __delete__(self, instance: Any) -> None: ... + + # this is the Mapped[] API where at class descriptor get time we want + # the type checker to see InstrumentedAttribute[_T]. However the + # callable function prior to mapping in fact calls the given + # declarative function that does not return InstrumentedAttribute + @overload + def __get__( + self, instance: None, owner: Any + ) -> InstrumentedAttribute[_T]: ... + + @overload + def __get__(self, instance: object, owner: Any) -> _T: ... + + def __get__( + self, instance: Optional[object], owner: Any + ) -> Union[InstrumentedAttribute[_T], _T]: ... + + @hybridmethod + def _stateful(cls, **kw: Any) -> _stateful_declared_attr[_T]: + return _stateful_declared_attr(**kw) + + @hybridproperty + def directive(cls) -> _declared_directive[Any]: + # see mapping_api.rst for docstring + return _declared_directive # type: ignore + + @hybridproperty + def cascading(cls) -> _stateful_declared_attr[_T]: + # see mapping_api.rst for docstring + return cls._stateful(cascading=True) + + +class _stateful_declared_attr(declared_attr[_T]): + kw: Dict[str, Any] + + def __init__(self, **kw: Any): + self.kw = kw + + @hybridmethod + def _stateful(self, **kw: Any) -> _stateful_declared_attr[_T]: + new_kw = self.kw.copy() + new_kw.update(kw) + return _stateful_declared_attr(**new_kw) + + def __call__(self, fn: _DeclaredAttrDecorated[_T]) -> declared_attr[_T]: + return declared_attr(fn, **self.kw) + + +def declarative_mixin(cls: Type[_T]) -> Type[_T]: + """Mark a class as providing the feature of "declarative mixin". + + E.g.:: + + from sqlalchemy.orm import declared_attr + from sqlalchemy.orm import declarative_mixin + + @declarative_mixin + class MyMixin: + + @declared_attr + def __tablename__(cls): + return cls.__name__.lower() + + __table_args__ = {'mysql_engine': 'InnoDB'} + __mapper_args__= {'always_refresh': True} + + id = Column(Integer, primary_key=True) + + class MyModel(MyMixin, Base): + name = Column(String(1000)) + + The :func:`_orm.declarative_mixin` decorator currently does not modify + the given class in any way; it's current purpose is strictly to assist + the :ref:`Mypy plugin ` in being able to identify + SQLAlchemy declarative mixin classes when no other context is present. + + .. versionadded:: 1.4.6 + + .. seealso:: + + :ref:`orm_mixins_toplevel` + + :ref:`mypy_declarative_mixins` - in the + :ref:`Mypy plugin documentation ` + + """ # noqa: E501 + + return cls + + +def _setup_declarative_base(cls: Type[Any]) -> None: + if "metadata" in cls.__dict__: + metadata = cls.__dict__["metadata"] + else: + metadata = None + + if "type_annotation_map" in cls.__dict__: + type_annotation_map = cls.__dict__["type_annotation_map"] + else: + type_annotation_map = None + + reg = cls.__dict__.get("registry", None) + if reg is not None: + if not isinstance(reg, registry): + raise exc.InvalidRequestError( + "Declarative base class has a 'registry' attribute that is " + "not an instance of sqlalchemy.orm.registry()" + ) + elif type_annotation_map is not None: + raise exc.InvalidRequestError( + "Declarative base class has both a 'registry' attribute and a " + "type_annotation_map entry. Per-base type_annotation_maps " + "are not supported. Please apply the type_annotation_map " + "to this registry directly." + ) + + else: + reg = registry( + metadata=metadata, type_annotation_map=type_annotation_map + ) + cls.registry = reg + + cls._sa_registry = reg + + if "metadata" not in cls.__dict__: + cls.metadata = cls.registry.metadata + + if getattr(cls, "__init__", object.__init__) is object.__init__: + cls.__init__ = cls.registry.constructor + + +class MappedAsDataclass(metaclass=DCTransformDeclarative): + """Mixin class to indicate when mapping this class, also convert it to be + a dataclass. + + .. seealso:: + + :ref:`orm_declarative_native_dataclasses` - complete background + on SQLAlchemy native dataclass mapping + + .. versionadded:: 2.0 + + """ + + def __init_subclass__( + cls, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + eq: Union[_NoArg, bool] = _NoArg.NO_ARG, + order: Union[_NoArg, bool] = _NoArg.NO_ARG, + unsafe_hash: Union[_NoArg, bool] = _NoArg.NO_ARG, + match_args: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + dataclass_callable: Union[ + _NoArg, Callable[..., Type[Any]] + ] = _NoArg.NO_ARG, + **kw: Any, + ) -> None: + apply_dc_transforms: _DataclassArguments = { + "init": init, + "repr": repr, + "eq": eq, + "order": order, + "unsafe_hash": unsafe_hash, + "match_args": match_args, + "kw_only": kw_only, + "dataclass_callable": dataclass_callable, + } + + current_transforms: _DataclassArguments + + if hasattr(cls, "_sa_apply_dc_transforms"): + current = cls._sa_apply_dc_transforms + + _ClassScanMapperConfig._assert_dc_arguments(current) + + cls._sa_apply_dc_transforms = current_transforms = { # type: ignore # noqa: E501 + k: current.get(k, _NoArg.NO_ARG) if v is _NoArg.NO_ARG else v + for k, v in apply_dc_transforms.items() + } + else: + cls._sa_apply_dc_transforms = current_transforms = ( + apply_dc_transforms + ) + + super().__init_subclass__(**kw) + + if not _is_mapped_class(cls): + new_anno = ( + _ClassScanMapperConfig._update_annotations_for_non_mapped_class + )(cls) + _ClassScanMapperConfig._apply_dataclasses_to_any_class( + current_transforms, cls, new_anno + ) + + +class DeclarativeBase( + # Inspectable is used only by the mypy plugin + inspection.Inspectable[InstanceState[Any]], + metaclass=DeclarativeAttributeIntercept, +): + """Base class used for declarative class definitions. + + The :class:`_orm.DeclarativeBase` allows for the creation of new + declarative bases in such a way that is compatible with type checkers:: + + + from sqlalchemy.orm import DeclarativeBase + + class Base(DeclarativeBase): + pass + + + The above ``Base`` class is now usable as the base for new declarative + mappings. The superclass makes use of the ``__init_subclass__()`` + method to set up new classes and metaclasses aren't used. + + When first used, the :class:`_orm.DeclarativeBase` class instantiates a new + :class:`_orm.registry` to be used with the base, assuming one was not + provided explicitly. The :class:`_orm.DeclarativeBase` class supports + class-level attributes which act as parameters for the construction of this + registry; such as to indicate a specific :class:`_schema.MetaData` + collection as well as a specific value for + :paramref:`_orm.registry.type_annotation_map`:: + + from typing_extensions import Annotated + + from sqlalchemy import BigInteger + from sqlalchemy import MetaData + from sqlalchemy import String + from sqlalchemy.orm import DeclarativeBase + + bigint = Annotated[int, "bigint"] + my_metadata = MetaData() + + class Base(DeclarativeBase): + metadata = my_metadata + type_annotation_map = { + str: String().with_variant(String(255), "mysql", "mariadb"), + bigint: BigInteger() + } + + Class-level attributes which may be specified include: + + :param metadata: optional :class:`_schema.MetaData` collection. + If a :class:`_orm.registry` is constructed automatically, this + :class:`_schema.MetaData` collection will be used to construct it. + Otherwise, the local :class:`_schema.MetaData` collection will supercede + that used by an existing :class:`_orm.registry` passed using the + :paramref:`_orm.DeclarativeBase.registry` parameter. + :param type_annotation_map: optional type annotation map that will be + passed to the :class:`_orm.registry` as + :paramref:`_orm.registry.type_annotation_map`. + :param registry: supply a pre-existing :class:`_orm.registry` directly. + + .. versionadded:: 2.0 Added :class:`.DeclarativeBase`, so that declarative + base classes may be constructed in such a way that is also recognized + by :pep:`484` type checkers. As a result, :class:`.DeclarativeBase` + and other subclassing-oriented APIs should be seen as + superseding previous "class returned by a function" APIs, namely + :func:`_orm.declarative_base` and :meth:`_orm.registry.generate_base`, + where the base class returned cannot be recognized by type checkers + without using plugins. + + **__init__ behavior** + + In a plain Python class, the base-most ``__init__()`` method in the class + hierarchy is ``object.__init__()``, which accepts no arguments. However, + when the :class:`_orm.DeclarativeBase` subclass is first declared, the + class is given an ``__init__()`` method that links to the + :paramref:`_orm.registry.constructor` constructor function, if no + ``__init__()`` method is already present; this is the usual declarative + constructor that will assign keyword arguments as attributes on the + instance, assuming those attributes are established at the class level + (i.e. are mapped, or are linked to a descriptor). This constructor is + **never accessed by a mapped class without being called explicitly via + super()**, as mapped classes are themselves given an ``__init__()`` method + directly which calls :paramref:`_orm.registry.constructor`, so in the + default case works independently of what the base-most ``__init__()`` + method does. + + .. versionchanged:: 2.0.1 :class:`_orm.DeclarativeBase` has a default + constructor that links to :paramref:`_orm.registry.constructor` by + default, so that calls to ``super().__init__()`` can access this + constructor. Previously, due to an implementation mistake, this default + constructor was missing, and calling ``super().__init__()`` would invoke + ``object.__init__()``. + + The :class:`_orm.DeclarativeBase` subclass may also declare an explicit + ``__init__()`` method which will replace the use of the + :paramref:`_orm.registry.constructor` function at this level:: + + class Base(DeclarativeBase): + def __init__(self, id=None): + self.id = id + + Mapped classes still will not invoke this constructor implicitly; it + remains only accessible by calling ``super().__init__()``:: + + class MyClass(Base): + def __init__(self, id=None, name=None): + self.name = name + super().__init__(id=id) + + Note that this is a different behavior from what functions like the legacy + :func:`_orm.declarative_base` would do; the base created by those functions + would always install :paramref:`_orm.registry.constructor` for + ``__init__()``. + + + """ + + if typing.TYPE_CHECKING: + + def _sa_inspect_type(self) -> Mapper[Self]: ... + + def _sa_inspect_instance(self) -> InstanceState[Self]: ... + + _sa_registry: ClassVar[_RegistryType] + + registry: ClassVar[_RegistryType] + """Refers to the :class:`_orm.registry` in use where new + :class:`_orm.Mapper` objects will be associated.""" + + metadata: ClassVar[MetaData] + """Refers to the :class:`_schema.MetaData` collection that will be used + for new :class:`_schema.Table` objects. + + .. seealso:: + + :ref:`orm_declarative_metadata` + + """ + + __name__: ClassVar[str] + + # this ideally should be Mapper[Self], but mypy as of 1.4.1 does not + # like it, and breaks the declared_attr_one test. Pyright/pylance is + # ok with it. + __mapper__: ClassVar[Mapper[Any]] + """The :class:`_orm.Mapper` object to which a particular class is + mapped. + + May also be acquired using :func:`_sa.inspect`, e.g. + ``inspect(klass)``. + + """ + + __table__: ClassVar[FromClause] + """The :class:`_sql.FromClause` to which a particular subclass is + mapped. + + This is usually an instance of :class:`_schema.Table` but may also + refer to other kinds of :class:`_sql.FromClause` such as + :class:`_sql.Subquery`, depending on how the class is mapped. + + .. seealso:: + + :ref:`orm_declarative_metadata` + + """ + + # pyright/pylance do not consider a classmethod a ClassVar so use Any + # https://github.com/microsoft/pylance-release/issues/3484 + __tablename__: Any + """String name to assign to the generated + :class:`_schema.Table` object, if not specified directly via + :attr:`_orm.DeclarativeBase.__table__`. + + .. seealso:: + + :ref:`orm_declarative_table` + + """ + + __mapper_args__: Any + """Dictionary of arguments which will be passed to the + :class:`_orm.Mapper` constructor. + + .. seealso:: + + :ref:`orm_declarative_mapper_options` + + """ + + __table_args__: Any + """A dictionary or tuple of arguments that will be passed to the + :class:`_schema.Table` constructor. See + :ref:`orm_declarative_table_configuration` + for background on the specific structure of this collection. + + .. seealso:: + + :ref:`orm_declarative_table_configuration` + + """ + + def __init__(self, **kw: Any): ... + + def __init_subclass__(cls, **kw: Any) -> None: + if DeclarativeBase in cls.__bases__: + _check_not_declarative(cls, DeclarativeBase) + _setup_declarative_base(cls) + else: + _as_declarative(cls._sa_registry, cls, cls.__dict__) + super().__init_subclass__(**kw) + + +def _check_not_declarative(cls: Type[Any], base: Type[Any]) -> None: + cls_dict = cls.__dict__ + if ( + "__table__" in cls_dict + and not ( + callable(cls_dict["__table__"]) + or hasattr(cls_dict["__table__"], "__get__") + ) + ) or isinstance(cls_dict.get("__tablename__", None), str): + raise exc.InvalidRequestError( + f"Cannot use {base.__name__!r} directly as a declarative base " + "class. Create a Base by creating a subclass of it." + ) + + +class DeclarativeBaseNoMeta( + # Inspectable is used only by the mypy plugin + inspection.Inspectable[InstanceState[Any]] +): + """Same as :class:`_orm.DeclarativeBase`, but does not use a metaclass + to intercept new attributes. + + The :class:`_orm.DeclarativeBaseNoMeta` base may be used when use of + custom metaclasses is desirable. + + .. versionadded:: 2.0 + + + """ + + _sa_registry: ClassVar[_RegistryType] + + registry: ClassVar[_RegistryType] + """Refers to the :class:`_orm.registry` in use where new + :class:`_orm.Mapper` objects will be associated.""" + + metadata: ClassVar[MetaData] + """Refers to the :class:`_schema.MetaData` collection that will be used + for new :class:`_schema.Table` objects. + + .. seealso:: + + :ref:`orm_declarative_metadata` + + """ + + # this ideally should be Mapper[Self], but mypy as of 1.4.1 does not + # like it, and breaks the declared_attr_one test. Pyright/pylance is + # ok with it. + __mapper__: ClassVar[Mapper[Any]] + """The :class:`_orm.Mapper` object to which a particular class is + mapped. + + May also be acquired using :func:`_sa.inspect`, e.g. + ``inspect(klass)``. + + """ + + __table__: Optional[FromClause] + """The :class:`_sql.FromClause` to which a particular subclass is + mapped. + + This is usually an instance of :class:`_schema.Table` but may also + refer to other kinds of :class:`_sql.FromClause` such as + :class:`_sql.Subquery`, depending on how the class is mapped. + + .. seealso:: + + :ref:`orm_declarative_metadata` + + """ + + if typing.TYPE_CHECKING: + + def _sa_inspect_type(self) -> Mapper[Self]: ... + + def _sa_inspect_instance(self) -> InstanceState[Self]: ... + + __tablename__: Any + """String name to assign to the generated + :class:`_schema.Table` object, if not specified directly via + :attr:`_orm.DeclarativeBase.__table__`. + + .. seealso:: + + :ref:`orm_declarative_table` + + """ + + __mapper_args__: Any + """Dictionary of arguments which will be passed to the + :class:`_orm.Mapper` constructor. + + .. seealso:: + + :ref:`orm_declarative_mapper_options` + + """ + + __table_args__: Any + """A dictionary or tuple of arguments that will be passed to the + :class:`_schema.Table` constructor. See + :ref:`orm_declarative_table_configuration` + for background on the specific structure of this collection. + + .. seealso:: + + :ref:`orm_declarative_table_configuration` + + """ + + def __init__(self, **kw: Any): ... + + def __init_subclass__(cls, **kw: Any) -> None: + if DeclarativeBaseNoMeta in cls.__bases__: + _check_not_declarative(cls, DeclarativeBaseNoMeta) + _setup_declarative_base(cls) + else: + _as_declarative(cls._sa_registry, cls, cls.__dict__) + super().__init_subclass__(**kw) + + +def add_mapped_attribute( + target: Type[_O], key: str, attr: MapperProperty[Any] +) -> None: + """Add a new mapped attribute to an ORM mapped class. + + E.g.:: + + add_mapped_attribute(User, "addresses", relationship(Address)) + + This may be used for ORM mappings that aren't using a declarative + metaclass that intercepts attribute set operations. + + .. versionadded:: 2.0 + + + """ + _add_attribute(target, key, attr) + + +def declarative_base( + *, + metadata: Optional[MetaData] = None, + mapper: Optional[Callable[..., Mapper[Any]]] = None, + cls: Type[Any] = object, + name: str = "Base", + class_registry: Optional[clsregistry._ClsRegistryType] = None, + type_annotation_map: Optional[_TypeAnnotationMapType] = None, + constructor: Callable[..., None] = _declarative_constructor, + metaclass: Type[Any] = DeclarativeMeta, +) -> Any: + r"""Construct a base class for declarative class definitions. + + The new base class will be given a metaclass that produces + appropriate :class:`~sqlalchemy.schema.Table` objects and makes + the appropriate :class:`_orm.Mapper` calls based on the + information provided declaratively in the class and any subclasses + of the class. + + .. versionchanged:: 2.0 Note that the :func:`_orm.declarative_base` + function is superseded by the new :class:`_orm.DeclarativeBase` class, + which generates a new "base" class using subclassing, rather than + return value of a function. This allows an approach that is compatible + with :pep:`484` typing tools. + + The :func:`_orm.declarative_base` function is a shorthand version + of using the :meth:`_orm.registry.generate_base` + method. That is, the following:: + + from sqlalchemy.orm import declarative_base + + Base = declarative_base() + + Is equivalent to:: + + from sqlalchemy.orm import registry + + mapper_registry = registry() + Base = mapper_registry.generate_base() + + See the docstring for :class:`_orm.registry` + and :meth:`_orm.registry.generate_base` + for more details. + + .. versionchanged:: 1.4 The :func:`_orm.declarative_base` + function is now a specialization of the more generic + :class:`_orm.registry` class. The function also moves to the + ``sqlalchemy.orm`` package from the ``declarative.ext`` package. + + + :param metadata: + An optional :class:`~sqlalchemy.schema.MetaData` instance. All + :class:`~sqlalchemy.schema.Table` objects implicitly declared by + subclasses of the base will share this MetaData. A MetaData instance + will be created if none is provided. The + :class:`~sqlalchemy.schema.MetaData` instance will be available via the + ``metadata`` attribute of the generated declarative base class. + + :param mapper: + An optional callable, defaults to :class:`_orm.Mapper`. Will + be used to map subclasses to their Tables. + + :param cls: + Defaults to :class:`object`. A type to use as the base for the generated + declarative base class. May be a class or tuple of classes. + + :param name: + Defaults to ``Base``. The display name for the generated + class. Customizing this is not required, but can improve clarity in + tracebacks and debugging. + + :param constructor: + Specify the implementation for the ``__init__`` function on a mapped + class that has no ``__init__`` of its own. Defaults to an + implementation that assigns \**kwargs for declared + fields and relationships to an instance. If ``None`` is supplied, + no __init__ will be provided and construction will fall back to + cls.__init__ by way of the normal Python semantics. + + :param class_registry: optional dictionary that will serve as the + registry of class names-> mapped classes when string names + are used to identify classes inside of :func:`_orm.relationship` + and others. Allows two or more declarative base classes + to share the same registry of class names for simplified + inter-base relationships. + + :param type_annotation_map: optional dictionary of Python types to + SQLAlchemy :class:`_types.TypeEngine` classes or instances. This + is used exclusively by the :class:`_orm.MappedColumn` construct + to produce column types based on annotations within the + :class:`_orm.Mapped` type. + + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`orm_declarative_mapped_column_type_map` + + :param metaclass: + Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__ + compatible callable to use as the meta type of the generated + declarative base class. + + .. seealso:: + + :class:`_orm.registry` + + """ + + return registry( + metadata=metadata, + class_registry=class_registry, + constructor=constructor, + type_annotation_map=type_annotation_map, + ).generate_base( + mapper=mapper, + cls=cls, + name=name, + metaclass=metaclass, + ) + + +class registry: + """Generalized registry for mapping classes. + + The :class:`_orm.registry` serves as the basis for maintaining a collection + of mappings, and provides configurational hooks used to map classes. + + The three general kinds of mappings supported are Declarative Base, + Declarative Decorator, and Imperative Mapping. All of these mapping + styles may be used interchangeably: + + * :meth:`_orm.registry.generate_base` returns a new declarative base + class, and is the underlying implementation of the + :func:`_orm.declarative_base` function. + + * :meth:`_orm.registry.mapped` provides a class decorator that will + apply declarative mapping to a class without the use of a declarative + base class. + + * :meth:`_orm.registry.map_imperatively` will produce a + :class:`_orm.Mapper` for a class without scanning the class for + declarative class attributes. This method suits the use case historically + provided by the ``sqlalchemy.orm.mapper()`` classical mapping function, + which is removed as of SQLAlchemy 2.0. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`orm_mapping_classes_toplevel` - overview of class mapping + styles. + + """ + + _class_registry: clsregistry._ClsRegistryType + _managers: weakref.WeakKeyDictionary[ClassManager[Any], Literal[True]] + _non_primary_mappers: weakref.WeakKeyDictionary[Mapper[Any], Literal[True]] + metadata: MetaData + constructor: CallableReference[Callable[..., None]] + type_annotation_map: _MutableTypeAnnotationMapType + _dependents: Set[_RegistryType] + _dependencies: Set[_RegistryType] + _new_mappers: bool + + def __init__( + self, + *, + metadata: Optional[MetaData] = None, + class_registry: Optional[clsregistry._ClsRegistryType] = None, + type_annotation_map: Optional[_TypeAnnotationMapType] = None, + constructor: Callable[..., None] = _declarative_constructor, + ): + r"""Construct a new :class:`_orm.registry` + + :param metadata: + An optional :class:`_schema.MetaData` instance. All + :class:`_schema.Table` objects generated using declarative + table mapping will make use of this :class:`_schema.MetaData` + collection. If this argument is left at its default of ``None``, + a blank :class:`_schema.MetaData` collection is created. + + :param constructor: + Specify the implementation for the ``__init__`` function on a mapped + class that has no ``__init__`` of its own. Defaults to an + implementation that assigns \**kwargs for declared + fields and relationships to an instance. If ``None`` is supplied, + no __init__ will be provided and construction will fall back to + cls.__init__ by way of the normal Python semantics. + + :param class_registry: optional dictionary that will serve as the + registry of class names-> mapped classes when string names + are used to identify classes inside of :func:`_orm.relationship` + and others. Allows two or more declarative base classes + to share the same registry of class names for simplified + inter-base relationships. + + :param type_annotation_map: optional dictionary of Python types to + SQLAlchemy :class:`_types.TypeEngine` classes or instances. + The provided dict will update the default type mapping. This + is used exclusively by the :class:`_orm.MappedColumn` construct + to produce column types based on annotations within the + :class:`_orm.Mapped` type. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`orm_declarative_mapped_column_type_map` + + + """ + lcl_metadata = metadata or MetaData() + + if class_registry is None: + class_registry = weakref.WeakValueDictionary() + + self._class_registry = class_registry + self._managers = weakref.WeakKeyDictionary() + self._non_primary_mappers = weakref.WeakKeyDictionary() + self.metadata = lcl_metadata + self.constructor = constructor + self.type_annotation_map = {} + if type_annotation_map is not None: + self.update_type_annotation_map(type_annotation_map) + self._dependents = set() + self._dependencies = set() + + self._new_mappers = False + + with mapperlib._CONFIGURE_MUTEX: + mapperlib._mapper_registries[self] = True + + def update_type_annotation_map( + self, + type_annotation_map: _TypeAnnotationMapType, + ) -> None: + """update the :paramref:`_orm.registry.type_annotation_map` with new + values.""" + + self.type_annotation_map.update( + { + sub_type: sqltype + for typ, sqltype in type_annotation_map.items() + for sub_type in compat_typing.expand_unions( + typ, include_union=True, discard_none=True + ) + } + ) + + def _resolve_type( + self, python_type: _MatchedOnType + ) -> Optional[sqltypes.TypeEngine[Any]]: + + python_type_to_check = python_type + while is_pep695(python_type_to_check): + python_type_to_check = python_type_to_check.__value__ + + check_is_pt = python_type is python_type_to_check + + python_type_type: Type[Any] + search: Iterable[Tuple[_MatchedOnType, Type[Any]]] + + if is_generic(python_type_to_check): + if is_literal(python_type_to_check): + python_type_type = cast("Type[Any]", python_type_to_check) + + search = ( # type: ignore[assignment] + (python_type, python_type_type), + (Literal, python_type_type), + ) + else: + python_type_type = python_type_to_check.__origin__ + search = ((python_type, python_type_type),) + elif is_newtype(python_type_to_check): + python_type_type = flatten_newtype(python_type_to_check) + search = ((python_type, python_type_type),) + elif isinstance(python_type_to_check, type): + python_type_type = python_type_to_check + search = ( + (pt if check_is_pt else python_type, pt) + for pt in python_type_type.__mro__ + ) + else: + python_type_type = python_type_to_check # type: ignore[assignment] + search = ((python_type, python_type_type),) + + for pt, flattened in search: + # we search through full __mro__ for types. however... + sql_type = self.type_annotation_map.get(pt) + if sql_type is None: + sql_type = sqltypes._type_map_get(pt) # type: ignore # noqa: E501 + + if sql_type is not None: + sql_type_inst = sqltypes.to_instance(sql_type) + + # ... this additional step will reject most + # type -> supertype matches, such as if we had + # a MyInt(int) subclass. note also we pass NewType() + # here directly; these always have to be in the + # type_annotation_map to be useful + resolved_sql_type = sql_type_inst._resolve_for_python_type( + python_type_type, + pt, + flattened, + ) + if resolved_sql_type is not None: + return resolved_sql_type + + return None + + @property + def mappers(self) -> FrozenSet[Mapper[Any]]: + """read only collection of all :class:`_orm.Mapper` objects.""" + + return frozenset(manager.mapper for manager in self._managers).union( + self._non_primary_mappers + ) + + def _set_depends_on(self, registry: RegistryType) -> None: + if registry is self: + return + registry._dependents.add(self) + self._dependencies.add(registry) + + def _flag_new_mapper(self, mapper: Mapper[Any]) -> None: + mapper._ready_for_configure = True + if self._new_mappers: + return + + for reg in self._recurse_with_dependents({self}): + reg._new_mappers = True + + @classmethod + def _recurse_with_dependents( + cls, registries: Set[RegistryType] + ) -> Iterator[RegistryType]: + todo = registries + done = set() + while todo: + reg = todo.pop() + done.add(reg) + + # if yielding would remove dependents, make sure we have + # them before + todo.update(reg._dependents.difference(done)) + yield reg + + # if yielding would add dependents, make sure we have them + # after + todo.update(reg._dependents.difference(done)) + + @classmethod + def _recurse_with_dependencies( + cls, registries: Set[RegistryType] + ) -> Iterator[RegistryType]: + todo = registries + done = set() + while todo: + reg = todo.pop() + done.add(reg) + + # if yielding would remove dependencies, make sure we have + # them before + todo.update(reg._dependencies.difference(done)) + + yield reg + + # if yielding would remove dependencies, make sure we have + # them before + todo.update(reg._dependencies.difference(done)) + + def _mappers_to_configure(self) -> Iterator[Mapper[Any]]: + return itertools.chain( + ( + manager.mapper + for manager in list(self._managers) + if manager.is_mapped + and not manager.mapper.configured + and manager.mapper._ready_for_configure + ), + ( + npm + for npm in list(self._non_primary_mappers) + if not npm.configured and npm._ready_for_configure + ), + ) + + def _add_non_primary_mapper(self, np_mapper: Mapper[Any]) -> None: + self._non_primary_mappers[np_mapper] = True + + def _dispose_cls(self, cls: Type[_O]) -> None: + clsregistry.remove_class(cls.__name__, cls, self._class_registry) + + def _add_manager(self, manager: ClassManager[Any]) -> None: + self._managers[manager] = True + if manager.is_mapped: + raise exc.ArgumentError( + "Class '%s' already has a primary mapper defined. " + % manager.class_ + ) + assert manager.registry is None + manager.registry = self + + def configure(self, cascade: bool = False) -> None: + """Configure all as-yet unconfigured mappers in this + :class:`_orm.registry`. + + The configure step is used to reconcile and initialize the + :func:`_orm.relationship` linkages between mapped classes, as well as + to invoke configuration events such as the + :meth:`_orm.MapperEvents.before_configured` and + :meth:`_orm.MapperEvents.after_configured`, which may be used by ORM + extensions or user-defined extension hooks. + + If one or more mappers in this registry contain + :func:`_orm.relationship` constructs that refer to mapped classes in + other registries, this registry is said to be *dependent* on those + registries. In order to configure those dependent registries + automatically, the :paramref:`_orm.registry.configure.cascade` flag + should be set to ``True``. Otherwise, if they are not configured, an + exception will be raised. The rationale behind this behavior is to + allow an application to programmatically invoke configuration of + registries while controlling whether or not the process implicitly + reaches other registries. + + As an alternative to invoking :meth:`_orm.registry.configure`, the ORM + function :func:`_orm.configure_mappers` function may be used to ensure + configuration is complete for all :class:`_orm.registry` objects in + memory. This is generally simpler to use and also predates the usage of + :class:`_orm.registry` objects overall. However, this function will + impact all mappings throughout the running Python process and may be + more memory/time consuming for an application that has many registries + in use for different purposes that may not be needed immediately. + + .. seealso:: + + :func:`_orm.configure_mappers` + + + .. versionadded:: 1.4.0b2 + + """ + mapperlib._configure_registries({self}, cascade=cascade) + + def dispose(self, cascade: bool = False) -> None: + """Dispose of all mappers in this :class:`_orm.registry`. + + After invocation, all the classes that were mapped within this registry + will no longer have class instrumentation associated with them. This + method is the per-:class:`_orm.registry` analogue to the + application-wide :func:`_orm.clear_mappers` function. + + If this registry contains mappers that are dependencies of other + registries, typically via :func:`_orm.relationship` links, then those + registries must be disposed as well. When such registries exist in + relation to this one, their :meth:`_orm.registry.dispose` method will + also be called, if the :paramref:`_orm.registry.dispose.cascade` flag + is set to ``True``; otherwise, an error is raised if those registries + were not already disposed. + + .. versionadded:: 1.4.0b2 + + .. seealso:: + + :func:`_orm.clear_mappers` + + """ + + mapperlib._dispose_registries({self}, cascade=cascade) + + def _dispose_manager_and_mapper(self, manager: ClassManager[Any]) -> None: + if "mapper" in manager.__dict__: + mapper = manager.mapper + + mapper._set_dispose_flags() + + class_ = manager.class_ + self._dispose_cls(class_) + instrumentation._instrumentation_factory.unregister(class_) + + def generate_base( + self, + mapper: Optional[Callable[..., Mapper[Any]]] = None, + cls: Type[Any] = object, + name: str = "Base", + metaclass: Type[Any] = DeclarativeMeta, + ) -> Any: + """Generate a declarative base class. + + Classes that inherit from the returned class object will be + automatically mapped using declarative mapping. + + E.g.:: + + from sqlalchemy.orm import registry + + mapper_registry = registry() + + Base = mapper_registry.generate_base() + + class MyClass(Base): + __tablename__ = "my_table" + id = Column(Integer, primary_key=True) + + The above dynamically generated class is equivalent to the + non-dynamic example below:: + + from sqlalchemy.orm import registry + from sqlalchemy.orm.decl_api import DeclarativeMeta + + mapper_registry = registry() + + class Base(metaclass=DeclarativeMeta): + __abstract__ = True + registry = mapper_registry + metadata = mapper_registry.metadata + + __init__ = mapper_registry.constructor + + .. versionchanged:: 2.0 Note that the + :meth:`_orm.registry.generate_base` method is superseded by the new + :class:`_orm.DeclarativeBase` class, which generates a new "base" + class using subclassing, rather than return value of a function. + This allows an approach that is compatible with :pep:`484` typing + tools. + + The :meth:`_orm.registry.generate_base` method provides the + implementation for the :func:`_orm.declarative_base` function, which + creates the :class:`_orm.registry` and base class all at once. + + See the section :ref:`orm_declarative_mapping` for background and + examples. + + :param mapper: + An optional callable, defaults to :class:`_orm.Mapper`. + This function is used to generate new :class:`_orm.Mapper` objects. + + :param cls: + Defaults to :class:`object`. A type to use as the base for the + generated declarative base class. May be a class or tuple of classes. + + :param name: + Defaults to ``Base``. The display name for the generated + class. Customizing this is not required, but can improve clarity in + tracebacks and debugging. + + :param metaclass: + Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__ + compatible callable to use as the meta type of the generated + declarative base class. + + .. seealso:: + + :ref:`orm_declarative_mapping` + + :func:`_orm.declarative_base` + + """ + metadata = self.metadata + + bases = not isinstance(cls, tuple) and (cls,) or cls + + class_dict: Dict[str, Any] = dict(registry=self, metadata=metadata) + if isinstance(cls, type): + class_dict["__doc__"] = cls.__doc__ + + if self.constructor is not None: + class_dict["__init__"] = self.constructor + + class_dict["__abstract__"] = True + if mapper: + class_dict["__mapper_cls__"] = mapper + + if hasattr(cls, "__class_getitem__"): + + def __class_getitem__(cls: Type[_T], key: Any) -> Type[_T]: + # allow generic classes in py3.9+ + return cls + + class_dict["__class_getitem__"] = __class_getitem__ + + return metaclass(name, bases, class_dict) + + @compat_typing.dataclass_transform( + field_specifiers=( + MappedColumn, + RelationshipProperty, + Composite, + Synonym, + mapped_column, + relationship, + composite, + synonym, + deferred, + ), + ) + @overload + def mapped_as_dataclass(self, __cls: Type[_O]) -> Type[_O]: ... + + @overload + def mapped_as_dataclass( + self, + __cls: Literal[None] = ..., + *, + init: Union[_NoArg, bool] = ..., + repr: Union[_NoArg, bool] = ..., # noqa: A002 + eq: Union[_NoArg, bool] = ..., + order: Union[_NoArg, bool] = ..., + unsafe_hash: Union[_NoArg, bool] = ..., + match_args: Union[_NoArg, bool] = ..., + kw_only: Union[_NoArg, bool] = ..., + dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]] = ..., + ) -> Callable[[Type[_O]], Type[_O]]: ... + + def mapped_as_dataclass( + self, + __cls: Optional[Type[_O]] = None, + *, + init: Union[_NoArg, bool] = _NoArg.NO_ARG, + repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + eq: Union[_NoArg, bool] = _NoArg.NO_ARG, + order: Union[_NoArg, bool] = _NoArg.NO_ARG, + unsafe_hash: Union[_NoArg, bool] = _NoArg.NO_ARG, + match_args: Union[_NoArg, bool] = _NoArg.NO_ARG, + kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + dataclass_callable: Union[ + _NoArg, Callable[..., Type[Any]] + ] = _NoArg.NO_ARG, + ) -> Union[Type[_O], Callable[[Type[_O]], Type[_O]]]: + """Class decorator that will apply the Declarative mapping process + to a given class, and additionally convert the class to be a + Python dataclass. + + .. seealso:: + + :ref:`orm_declarative_native_dataclasses` - complete background + on SQLAlchemy native dataclass mapping + + + .. versionadded:: 2.0 + + + """ + + def decorate(cls: Type[_O]) -> Type[_O]: + setattr( + cls, + "_sa_apply_dc_transforms", + { + "init": init, + "repr": repr, + "eq": eq, + "order": order, + "unsafe_hash": unsafe_hash, + "match_args": match_args, + "kw_only": kw_only, + "dataclass_callable": dataclass_callable, + }, + ) + _as_declarative(self, cls, cls.__dict__) + return cls + + if __cls: + return decorate(__cls) + else: + return decorate + + def mapped(self, cls: Type[_O]) -> Type[_O]: + """Class decorator that will apply the Declarative mapping process + to a given class. + + E.g.:: + + from sqlalchemy.orm import registry + + mapper_registry = registry() + + @mapper_registry.mapped + class Foo: + __tablename__ = 'some_table' + + id = Column(Integer, primary_key=True) + name = Column(String) + + See the section :ref:`orm_declarative_mapping` for complete + details and examples. + + :param cls: class to be mapped. + + :return: the class that was passed. + + .. seealso:: + + :ref:`orm_declarative_mapping` + + :meth:`_orm.registry.generate_base` - generates a base class + that will apply Declarative mapping to subclasses automatically + using a Python metaclass. + + .. seealso:: + + :meth:`_orm.registry.mapped_as_dataclass` + + """ + _as_declarative(self, cls, cls.__dict__) + return cls + + def as_declarative_base(self, **kw: Any) -> Callable[[Type[_T]], Type[_T]]: + """ + Class decorator which will invoke + :meth:`_orm.registry.generate_base` + for a given base class. + + E.g.:: + + from sqlalchemy.orm import registry + + mapper_registry = registry() + + @mapper_registry.as_declarative_base() + class Base: + @declared_attr + def __tablename__(cls): + return cls.__name__.lower() + id = Column(Integer, primary_key=True) + + class MyMappedClass(Base): + # ... + + All keyword arguments passed to + :meth:`_orm.registry.as_declarative_base` are passed + along to :meth:`_orm.registry.generate_base`. + + """ + + def decorate(cls: Type[_T]) -> Type[_T]: + kw["cls"] = cls + kw["name"] = cls.__name__ + return self.generate_base(**kw) # type: ignore + + return decorate + + def map_declaratively(self, cls: Type[_O]) -> Mapper[_O]: + """Map a class declaratively. + + In this form of mapping, the class is scanned for mapping information, + including for columns to be associated with a table, and/or an + actual table object. + + Returns the :class:`_orm.Mapper` object. + + E.g.:: + + from sqlalchemy.orm import registry + + mapper_registry = registry() + + class Foo: + __tablename__ = 'some_table' + + id = Column(Integer, primary_key=True) + name = Column(String) + + mapper = mapper_registry.map_declaratively(Foo) + + This function is more conveniently invoked indirectly via either the + :meth:`_orm.registry.mapped` class decorator or by subclassing a + declarative metaclass generated from + :meth:`_orm.registry.generate_base`. + + See the section :ref:`orm_declarative_mapping` for complete + details and examples. + + :param cls: class to be mapped. + + :return: a :class:`_orm.Mapper` object. + + .. seealso:: + + :ref:`orm_declarative_mapping` + + :meth:`_orm.registry.mapped` - more common decorator interface + to this function. + + :meth:`_orm.registry.map_imperatively` + + """ + _as_declarative(self, cls, cls.__dict__) + return cls.__mapper__ # type: ignore + + def map_imperatively( + self, + class_: Type[_O], + local_table: Optional[FromClause] = None, + **kw: Any, + ) -> Mapper[_O]: + r"""Map a class imperatively. + + In this form of mapping, the class is not scanned for any mapping + information. Instead, all mapping constructs are passed as + arguments. + + This method is intended to be fully equivalent to the now-removed + SQLAlchemy ``mapper()`` function, except that it's in terms of + a particular registry. + + E.g.:: + + from sqlalchemy.orm import registry + + mapper_registry = registry() + + my_table = Table( + "my_table", + mapper_registry.metadata, + Column('id', Integer, primary_key=True) + ) + + class MyClass: + pass + + mapper_registry.map_imperatively(MyClass, my_table) + + See the section :ref:`orm_imperative_mapping` for complete background + and usage examples. + + :param class\_: The class to be mapped. Corresponds to the + :paramref:`_orm.Mapper.class_` parameter. + + :param local_table: the :class:`_schema.Table` or other + :class:`_sql.FromClause` object that is the subject of the mapping. + Corresponds to the + :paramref:`_orm.Mapper.local_table` parameter. + + :param \**kw: all other keyword arguments are passed to the + :class:`_orm.Mapper` constructor directly. + + .. seealso:: + + :ref:`orm_imperative_mapping` + + :ref:`orm_declarative_mapping` + + """ + return _mapper(self, class_, local_table, kw) + + +RegistryType = registry + +if not TYPE_CHECKING: + # allow for runtime type resolution of ``ClassVar[_RegistryType]`` + _RegistryType = registry # noqa + + +def as_declarative(**kw: Any) -> Callable[[Type[_T]], Type[_T]]: + """ + Class decorator which will adapt a given class into a + :func:`_orm.declarative_base`. + + This function makes use of the :meth:`_orm.registry.as_declarative_base` + method, by first creating a :class:`_orm.registry` automatically + and then invoking the decorator. + + E.g.:: + + from sqlalchemy.orm import as_declarative + + @as_declarative() + class Base: + @declared_attr + def __tablename__(cls): + return cls.__name__.lower() + id = Column(Integer, primary_key=True) + + class MyMappedClass(Base): + # ... + + .. seealso:: + + :meth:`_orm.registry.as_declarative_base` + + """ + metadata, class_registry = ( + kw.pop("metadata", None), + kw.pop("class_registry", None), + ) + + return registry( + metadata=metadata, class_registry=class_registry + ).as_declarative_base(**kw) + + +@inspection._inspects( + DeclarativeMeta, DeclarativeBase, DeclarativeAttributeIntercept +) +def _inspect_decl_meta(cls: Type[Any]) -> Optional[Mapper[Any]]: + mp: Optional[Mapper[Any]] = _inspect_mapped_class(cls) + if mp is None: + if _DeferredMapperConfig.has_cls(cls): + _DeferredMapperConfig.raise_unmapped_for_cls(cls) + return mp diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py new file mode 100644 index 00000000..d43fbffc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py @@ -0,0 +1,2180 @@ +# orm/decl_base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Internal implementation for declarative.""" + +from __future__ import annotations + +import collections +import dataclasses +import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import Mapping +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import clsregistry +from . import exc as orm_exc +from . import instrumentation +from . import mapperlib +from ._typing import _O +from ._typing import attr_is_internal_proxy +from .attributes import InstrumentedAttribute +from .attributes import QueryableAttribute +from .base import _is_mapped_class +from .base import InspectionAttr +from .descriptor_props import CompositeProperty +from .descriptor_props import SynonymProperty +from .interfaces import _AttributeOptions +from .interfaces import _DCAttributeOptions +from .interfaces import _IntrospectsAnnotations +from .interfaces import _MappedAttribute +from .interfaces import _MapsColumns +from .interfaces import MapperProperty +from .mapper import Mapper +from .properties import ColumnProperty +from .properties import MappedColumn +from .util import _extract_mapped_subtype +from .util import _is_mapped_annotation +from .util import class_mapper +from .util import de_stringify_annotation +from .. import event +from .. import exc +from .. import util +from ..sql import expression +from ..sql.base import _NoArg +from ..sql.schema import Column +from ..sql.schema import Table +from ..util import topological +from ..util.typing import _AnnotationScanType +from ..util.typing import is_fwd_ref +from ..util.typing import is_literal +from ..util.typing import Protocol +from ..util.typing import TypedDict +from ..util.typing import typing_get_args + +if TYPE_CHECKING: + from ._typing import _ClassDict + from ._typing import _RegistryType + from .base import Mapped + from .decl_api import declared_attr + from .instrumentation import ClassManager + from ..sql.elements import NamedColumn + from ..sql.schema import MetaData + from ..sql.selectable import FromClause + +_T = TypeVar("_T", bound=Any) + +_MapperKwArgs = Mapping[str, Any] +_TableArgsType = Union[Tuple[Any, ...], Dict[str, Any]] + + +class MappedClassProtocol(Protocol[_O]): + """A protocol representing a SQLAlchemy mapped class. + + The protocol is generic on the type of class, use + ``MappedClassProtocol[Any]`` to allow any mapped class. + """ + + __name__: str + __mapper__: Mapper[_O] + __table__: FromClause + + def __call__(self, **kw: Any) -> _O: ... + + +class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol): + "Internal more detailed version of ``MappedClassProtocol``." + metadata: MetaData + __tablename__: str + __mapper_args__: _MapperKwArgs + __table_args__: Optional[_TableArgsType] + + _sa_apply_dc_transforms: Optional[_DataclassArguments] + + def __declare_first__(self) -> None: ... + + def __declare_last__(self) -> None: ... + + +class _DataclassArguments(TypedDict): + init: Union[_NoArg, bool] + repr: Union[_NoArg, bool] + eq: Union[_NoArg, bool] + order: Union[_NoArg, bool] + unsafe_hash: Union[_NoArg, bool] + match_args: Union[_NoArg, bool] + kw_only: Union[_NoArg, bool] + dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]] + + +def _declared_mapping_info( + cls: Type[Any], +) -> Optional[Union[_DeferredMapperConfig, Mapper[Any]]]: + # deferred mapping + if _DeferredMapperConfig.has_cls(cls): + return _DeferredMapperConfig.config_for_cls(cls) + # regular mapping + elif _is_mapped_class(cls): + return class_mapper(cls, configure=False) + else: + return None + + +def _is_supercls_for_inherits(cls: Type[Any]) -> bool: + """return True if this class will be used as a superclass to set in + 'inherits'. + + This includes deferred mapper configs that aren't mapped yet, however does + not include classes with _sa_decl_prepare_nocascade (e.g. + ``AbstractConcreteBase``); these concrete-only classes are not set up as + "inherits" until after mappers are configured using + mapper._set_concrete_base() + + """ + if _DeferredMapperConfig.has_cls(cls): + return not _get_immediate_cls_attr( + cls, "_sa_decl_prepare_nocascade", strict=True + ) + # regular mapping + elif _is_mapped_class(cls): + return True + else: + return False + + +def _resolve_for_abstract_or_classical(cls: Type[Any]) -> Optional[Type[Any]]: + if cls is object: + return None + + sup: Optional[Type[Any]] + + if cls.__dict__.get("__abstract__", False): + for base_ in cls.__bases__: + sup = _resolve_for_abstract_or_classical(base_) + if sup is not None: + return sup + else: + return None + else: + clsmanager = _dive_for_cls_manager(cls) + + if clsmanager: + return clsmanager.class_ + else: + return cls + + +def _get_immediate_cls_attr( + cls: Type[Any], attrname: str, strict: bool = False +) -> Optional[Any]: + """return an attribute of the class that is either present directly + on the class, e.g. not on a superclass, or is from a superclass but + this superclass is a non-mapped mixin, that is, not a descendant of + the declarative base and is also not classically mapped. + + This is used to detect attributes that indicate something about + a mapped class independently from any mapped classes that it may + inherit from. + + """ + + # the rules are different for this name than others, + # make sure we've moved it out. transitional + assert attrname != "__abstract__" + + if not issubclass(cls, object): + return None + + if attrname in cls.__dict__: + return getattr(cls, attrname) + + for base in cls.__mro__[1:]: + _is_classical_inherits = _dive_for_cls_manager(base) is not None + + if attrname in base.__dict__ and ( + base is cls + or ( + (base in cls.__bases__ if strict else True) + and not _is_classical_inherits + ) + ): + return getattr(base, attrname) + else: + return None + + +def _dive_for_cls_manager(cls: Type[_O]) -> Optional[ClassManager[_O]]: + # because the class manager registration is pluggable, + # we need to do the search for every class in the hierarchy, + # rather than just a simple "cls._sa_class_manager" + + for base in cls.__mro__: + manager: Optional[ClassManager[_O]] = attributes.opt_manager_of_class( + base + ) + if manager: + return manager + return None + + +def _as_declarative( + registry: _RegistryType, cls: Type[Any], dict_: _ClassDict +) -> Optional[_MapperConfig]: + # declarative scans the class for attributes. no table or mapper + # args passed separately. + return _MapperConfig.setup_mapping(registry, cls, dict_, None, {}) + + +def _mapper( + registry: _RegistryType, + cls: Type[_O], + table: Optional[FromClause], + mapper_kw: _MapperKwArgs, +) -> Mapper[_O]: + _ImperativeMapperConfig(registry, cls, table, mapper_kw) + return cast("MappedClassProtocol[_O]", cls).__mapper__ + + +@util.preload_module("sqlalchemy.orm.decl_api") +def _is_declarative_props(obj: Any) -> bool: + _declared_attr_common = util.preloaded.orm_decl_api._declared_attr_common + + return isinstance(obj, (_declared_attr_common, util.classproperty)) + + +def _check_declared_props_nocascade( + obj: Any, name: str, cls: Type[_O] +) -> bool: + if _is_declarative_props(obj): + if getattr(obj, "_cascading", False): + util.warn( + "@declared_attr.cascading is not supported on the %s " + "attribute on class %s. This attribute invokes for " + "subclasses in any case." % (name, cls) + ) + return True + else: + return False + + +class _MapperConfig: + __slots__ = ( + "cls", + "classname", + "properties", + "declared_attr_reg", + "__weakref__", + ) + + cls: Type[Any] + classname: str + properties: util.OrderedDict[ + str, + Union[ + Sequence[NamedColumn[Any]], NamedColumn[Any], MapperProperty[Any] + ], + ] + declared_attr_reg: Dict[declared_attr[Any], Any] + + @classmethod + def setup_mapping( + cls, + registry: _RegistryType, + cls_: Type[_O], + dict_: _ClassDict, + table: Optional[FromClause], + mapper_kw: _MapperKwArgs, + ) -> Optional[_MapperConfig]: + manager = attributes.opt_manager_of_class(cls) + if manager and manager.class_ is cls_: + raise exc.InvalidRequestError( + f"Class {cls!r} already has been instrumented declaratively" + ) + + if cls_.__dict__.get("__abstract__", False): + return None + + defer_map = _get_immediate_cls_attr( + cls_, "_sa_decl_prepare_nocascade", strict=True + ) or hasattr(cls_, "_sa_decl_prepare") + + if defer_map: + return _DeferredMapperConfig( + registry, cls_, dict_, table, mapper_kw + ) + else: + return _ClassScanMapperConfig( + registry, cls_, dict_, table, mapper_kw + ) + + def __init__( + self, + registry: _RegistryType, + cls_: Type[Any], + mapper_kw: _MapperKwArgs, + ): + self.cls = util.assert_arg_type(cls_, type, "cls_") + self.classname = cls_.__name__ + self.properties = util.OrderedDict() + self.declared_attr_reg = {} + + if not mapper_kw.get("non_primary", False): + instrumentation.register_class( + self.cls, + finalize=False, + registry=registry, + declarative_scan=self, + init_method=registry.constructor, + ) + else: + manager = attributes.opt_manager_of_class(self.cls) + if not manager or not manager.is_mapped: + raise exc.InvalidRequestError( + "Class %s has no primary mapper configured. Configure " + "a primary mapper first before setting up a non primary " + "Mapper." % self.cls + ) + + def set_cls_attribute(self, attrname: str, value: _T) -> _T: + manager = instrumentation.manager_of_class(self.cls) + manager.install_member(attrname, value) + return value + + def map(self, mapper_kw: _MapperKwArgs = ...) -> Mapper[Any]: + raise NotImplementedError() + + def _early_mapping(self, mapper_kw: _MapperKwArgs) -> None: + self.map(mapper_kw) + + +class _ImperativeMapperConfig(_MapperConfig): + __slots__ = ("local_table", "inherits") + + def __init__( + self, + registry: _RegistryType, + cls_: Type[_O], + table: Optional[FromClause], + mapper_kw: _MapperKwArgs, + ): + super().__init__(registry, cls_, mapper_kw) + + self.local_table = self.set_cls_attribute("__table__", table) + + with mapperlib._CONFIGURE_MUTEX: + if not mapper_kw.get("non_primary", False): + clsregistry.add_class( + self.classname, self.cls, registry._class_registry + ) + + self._setup_inheritance(mapper_kw) + + self._early_mapping(mapper_kw) + + def map(self, mapper_kw: _MapperKwArgs = util.EMPTY_DICT) -> Mapper[Any]: + mapper_cls = Mapper + + return self.set_cls_attribute( + "__mapper__", + mapper_cls(self.cls, self.local_table, **mapper_kw), + ) + + def _setup_inheritance(self, mapper_kw: _MapperKwArgs) -> None: + cls = self.cls + + inherits = mapper_kw.get("inherits", None) + + if inherits is None: + # since we search for classical mappings now, search for + # multiple mapped bases as well and raise an error. + inherits_search = [] + for base_ in cls.__bases__: + c = _resolve_for_abstract_or_classical(base_) + if c is None: + continue + + if _is_supercls_for_inherits(c) and c not in inherits_search: + inherits_search.append(c) + + if inherits_search: + if len(inherits_search) > 1: + raise exc.InvalidRequestError( + "Class %s has multiple mapped bases: %r" + % (cls, inherits_search) + ) + inherits = inherits_search[0] + elif isinstance(inherits, Mapper): + inherits = inherits.class_ + + self.inherits = inherits + + +class _CollectedAnnotation(NamedTuple): + raw_annotation: _AnnotationScanType + mapped_container: Optional[Type[Mapped[Any]]] + extracted_mapped_annotation: Union[_AnnotationScanType, str] + is_dataclass: bool + attr_value: Any + originating_module: str + originating_class: Type[Any] + + +class _ClassScanMapperConfig(_MapperConfig): + __slots__ = ( + "registry", + "clsdict_view", + "collected_attributes", + "collected_annotations", + "local_table", + "persist_selectable", + "declared_columns", + "column_ordering", + "column_copies", + "table_args", + "tablename", + "mapper_args", + "mapper_args_fn", + "table_fn", + "inherits", + "single", + "allow_dataclass_fields", + "dataclass_setup_arguments", + "is_dataclass_prior_to_mapping", + "allow_unmapped_annotations", + ) + + is_deferred = False + registry: _RegistryType + clsdict_view: _ClassDict + collected_annotations: Dict[str, _CollectedAnnotation] + collected_attributes: Dict[str, Any] + local_table: Optional[FromClause] + persist_selectable: Optional[FromClause] + declared_columns: util.OrderedSet[Column[Any]] + column_ordering: Dict[Column[Any], int] + column_copies: Dict[ + Union[MappedColumn[Any], Column[Any]], + Union[MappedColumn[Any], Column[Any]], + ] + tablename: Optional[str] + mapper_args: Mapping[str, Any] + table_args: Optional[_TableArgsType] + mapper_args_fn: Optional[Callable[[], Dict[str, Any]]] + inherits: Optional[Type[Any]] + single: bool + + is_dataclass_prior_to_mapping: bool + allow_unmapped_annotations: bool + + dataclass_setup_arguments: Optional[_DataclassArguments] + """if the class has SQLAlchemy native dataclass parameters, where + we will turn the class into a dataclass within the declarative mapping + process. + + """ + + allow_dataclass_fields: bool + """if true, look for dataclass-processed Field objects on the target + class as well as superclasses and extract ORM mapping directives from + the "metadata" attribute of each Field. + + if False, dataclass fields can still be used, however they won't be + mapped. + + """ + + def __init__( + self, + registry: _RegistryType, + cls_: Type[_O], + dict_: _ClassDict, + table: Optional[FromClause], + mapper_kw: _MapperKwArgs, + ): + # grab class dict before the instrumentation manager has been added. + # reduces cycles + self.clsdict_view = ( + util.immutabledict(dict_) if dict_ else util.EMPTY_DICT + ) + super().__init__(registry, cls_, mapper_kw) + self.registry = registry + self.persist_selectable = None + + self.collected_attributes = {} + self.collected_annotations = {} + self.declared_columns = util.OrderedSet() + self.column_ordering = {} + self.column_copies = {} + self.single = False + self.dataclass_setup_arguments = dca = getattr( + self.cls, "_sa_apply_dc_transforms", None + ) + + self.allow_unmapped_annotations = getattr( + self.cls, "__allow_unmapped__", False + ) or bool(self.dataclass_setup_arguments) + + self.is_dataclass_prior_to_mapping = cld = dataclasses.is_dataclass( + cls_ + ) + + sdk = _get_immediate_cls_attr(cls_, "__sa_dataclass_metadata_key__") + + # we don't want to consume Field objects from a not-already-dataclass. + # the Field objects won't have their "name" or "type" populated, + # and while it seems like we could just set these on Field as we + # read them, Field is documented as "user read only" and we need to + # stay far away from any off-label use of dataclasses APIs. + if (not cld or dca) and sdk: + raise exc.InvalidRequestError( + "SQLAlchemy mapped dataclasses can't consume mapping " + "information from dataclass.Field() objects if the immediate " + "class is not already a dataclass." + ) + + # if already a dataclass, and __sa_dataclass_metadata_key__ present, + # then also look inside of dataclass.Field() objects yielded by + # dataclasses.get_fields(cls) when scanning for attributes + self.allow_dataclass_fields = bool(sdk and cld) + + self._setup_declared_events() + + self._scan_attributes() + + self._setup_dataclasses_transforms() + + with mapperlib._CONFIGURE_MUTEX: + clsregistry.add_class( + self.classname, self.cls, registry._class_registry + ) + + self._setup_inheriting_mapper(mapper_kw) + + self._extract_mappable_attributes() + + self._extract_declared_columns() + + self._setup_table(table) + + self._setup_inheriting_columns(mapper_kw) + + self._early_mapping(mapper_kw) + + def _setup_declared_events(self) -> None: + if _get_immediate_cls_attr(self.cls, "__declare_last__"): + + @event.listens_for(Mapper, "after_configured") + def after_configured() -> None: + cast( + "_DeclMappedClassProtocol[Any]", self.cls + ).__declare_last__() + + if _get_immediate_cls_attr(self.cls, "__declare_first__"): + + @event.listens_for(Mapper, "before_configured") + def before_configured() -> None: + cast( + "_DeclMappedClassProtocol[Any]", self.cls + ).__declare_first__() + + def _cls_attr_override_checker( + self, cls: Type[_O] + ) -> Callable[[str, Any], bool]: + """Produce a function that checks if a class has overridden an + attribute, taking SQLAlchemy-enabled dataclass fields into account. + + """ + + if self.allow_dataclass_fields: + sa_dataclass_metadata_key = _get_immediate_cls_attr( + cls, "__sa_dataclass_metadata_key__" + ) + else: + sa_dataclass_metadata_key = None + + if not sa_dataclass_metadata_key: + + def attribute_is_overridden(key: str, obj: Any) -> bool: + return getattr(cls, key, obj) is not obj + + else: + all_datacls_fields = { + f.name: f.metadata[sa_dataclass_metadata_key] + for f in util.dataclass_fields(cls) + if sa_dataclass_metadata_key in f.metadata + } + local_datacls_fields = { + f.name: f.metadata[sa_dataclass_metadata_key] + for f in util.local_dataclass_fields(cls) + if sa_dataclass_metadata_key in f.metadata + } + + absent = object() + + def attribute_is_overridden(key: str, obj: Any) -> bool: + if _is_declarative_props(obj): + obj = obj.fget + + # this function likely has some failure modes still if + # someone is doing a deep mixing of the same attribute + # name as plain Python attribute vs. dataclass field. + + ret = local_datacls_fields.get(key, absent) + if _is_declarative_props(ret): + ret = ret.fget + + if ret is obj: + return False + elif ret is not absent: + return True + + all_field = all_datacls_fields.get(key, absent) + + ret = getattr(cls, key, obj) + + if ret is obj: + return False + + # for dataclasses, this could be the + # 'default' of the field. so filter more specifically + # for an already-mapped InstrumentedAttribute + if ret is not absent and isinstance( + ret, InstrumentedAttribute + ): + return True + + if all_field is obj: + return False + elif all_field is not absent: + return True + + # can't find another attribute + return False + + return attribute_is_overridden + + _include_dunders = { + "__table__", + "__mapper_args__", + "__tablename__", + "__table_args__", + } + + _match_exclude_dunders = re.compile(r"^(?:_sa_|__)") + + def _cls_attr_resolver( + self, cls: Type[Any] + ) -> Callable[[], Iterable[Tuple[str, Any, Any, bool]]]: + """produce a function to iterate the "attributes" of a class + which we want to consider for mapping, adjusting for SQLAlchemy fields + embedded in dataclass fields. + + """ + cls_annotations = util.get_annotations(cls) + + cls_vars = vars(cls) + + _include_dunders = self._include_dunders + _match_exclude_dunders = self._match_exclude_dunders + + names = [ + n + for n in util.merge_lists_w_ordering( + list(cls_vars), list(cls_annotations) + ) + if not _match_exclude_dunders.match(n) or n in _include_dunders + ] + + if self.allow_dataclass_fields: + sa_dataclass_metadata_key: Optional[str] = _get_immediate_cls_attr( + cls, "__sa_dataclass_metadata_key__" + ) + else: + sa_dataclass_metadata_key = None + + if not sa_dataclass_metadata_key: + + def local_attributes_for_class() -> ( + Iterable[Tuple[str, Any, Any, bool]] + ): + return ( + ( + name, + cls_vars.get(name), + cls_annotations.get(name), + False, + ) + for name in names + ) + + else: + dataclass_fields = { + field.name: field for field in util.local_dataclass_fields(cls) + } + + fixed_sa_dataclass_metadata_key = sa_dataclass_metadata_key + + def local_attributes_for_class() -> ( + Iterable[Tuple[str, Any, Any, bool]] + ): + for name in names: + field = dataclass_fields.get(name, None) + if field and sa_dataclass_metadata_key in field.metadata: + yield field.name, _as_dc_declaredattr( + field.metadata, fixed_sa_dataclass_metadata_key + ), cls_annotations.get(field.name), True + else: + yield name, cls_vars.get(name), cls_annotations.get( + name + ), False + + return local_attributes_for_class + + def _scan_attributes(self) -> None: + cls = self.cls + + cls_as_Decl = cast("_DeclMappedClassProtocol[Any]", cls) + + clsdict_view = self.clsdict_view + collected_attributes = self.collected_attributes + column_copies = self.column_copies + _include_dunders = self._include_dunders + mapper_args_fn = None + table_args = inherited_table_args = None + table_fn = None + tablename = None + fixed_table = "__table__" in clsdict_view + + attribute_is_overridden = self._cls_attr_override_checker(self.cls) + + bases = [] + + for base in cls.__mro__: + # collect bases and make sure standalone columns are copied + # to be the column they will ultimately be on the class, + # so that declared_attr functions use the right columns. + # need to do this all the way up the hierarchy first + # (see #8190) + + class_mapped = base is not cls and _is_supercls_for_inherits(base) + + local_attributes_for_class = self._cls_attr_resolver(base) + + if not class_mapped and base is not cls: + locally_collected_columns = self._produce_column_copies( + local_attributes_for_class, + attribute_is_overridden, + fixed_table, + base, + ) + else: + locally_collected_columns = {} + + bases.append( + ( + base, + class_mapped, + local_attributes_for_class, + locally_collected_columns, + ) + ) + + for ( + base, + class_mapped, + local_attributes_for_class, + locally_collected_columns, + ) in bases: + # this transfer can also take place as we scan each name + # for finer-grained control of how collected_attributes is + # populated, as this is what impacts column ordering. + # however it's simpler to get it out of the way here. + collected_attributes.update(locally_collected_columns) + + for ( + name, + obj, + annotation, + is_dataclass_field, + ) in local_attributes_for_class(): + if name in _include_dunders: + if name == "__mapper_args__": + check_decl = _check_declared_props_nocascade( + obj, name, cls + ) + if not mapper_args_fn and ( + not class_mapped or check_decl + ): + # don't even invoke __mapper_args__ until + # after we've determined everything about the + # mapped table. + # make a copy of it so a class-level dictionary + # is not overwritten when we update column-based + # arguments. + def _mapper_args_fn() -> Dict[str, Any]: + return dict(cls_as_Decl.__mapper_args__) + + mapper_args_fn = _mapper_args_fn + + elif name == "__tablename__": + check_decl = _check_declared_props_nocascade( + obj, name, cls + ) + if not tablename and (not class_mapped or check_decl): + tablename = cls_as_Decl.__tablename__ + elif name == "__table__": + check_decl = _check_declared_props_nocascade( + obj, name, cls + ) + # if a @declared_attr using "__table__" is detected, + # wrap up a callable to look for "__table__" from + # the final concrete class when we set up a table. + # this was fixed by + # #11509, regression in 2.0 from version 1.4. + if check_decl and not table_fn: + # don't even invoke __table__ until we're ready + def _table_fn() -> FromClause: + return cls_as_Decl.__table__ + + table_fn = _table_fn + + elif name == "__table_args__": + check_decl = _check_declared_props_nocascade( + obj, name, cls + ) + if not table_args and (not class_mapped or check_decl): + table_args = cls_as_Decl.__table_args__ + if not isinstance( + table_args, (tuple, dict, type(None)) + ): + raise exc.ArgumentError( + "__table_args__ value must be a tuple, " + "dict, or None" + ) + if base is not cls: + inherited_table_args = True + else: + # any other dunder names; should not be here + # as we have tested for all four names in + # _include_dunders + assert False + elif class_mapped: + if _is_declarative_props(obj) and not obj._quiet: + util.warn( + "Regular (i.e. not __special__) " + "attribute '%s.%s' uses @declared_attr, " + "but owning class %s is mapped - " + "not applying to subclass %s." + % (base.__name__, name, base, cls) + ) + + continue + elif base is not cls: + # we're a mixin, abstract base, or something that is + # acting like that for now. + + if isinstance(obj, (Column, MappedColumn)): + # already copied columns to the mapped class. + continue + elif isinstance(obj, MapperProperty): + raise exc.InvalidRequestError( + "Mapper properties (i.e. deferred," + "column_property(), relationship(), etc.) must " + "be declared as @declared_attr callables " + "on declarative mixin classes. For dataclass " + "field() objects, use a lambda:" + ) + elif _is_declarative_props(obj): + # tried to get overloads to tell this to + # pylance, no luck + assert obj is not None + + if obj._cascading: + if name in clsdict_view: + # unfortunately, while we can use the user- + # defined attribute here to allow a clean + # override, if there's another + # subclass below then it still tries to use + # this. not sure if there is enough + # information here to add this as a feature + # later on. + util.warn( + "Attribute '%s' on class %s cannot be " + "processed due to " + "@declared_attr.cascading; " + "skipping" % (name, cls) + ) + collected_attributes[name] = column_copies[obj] = ( + ret + ) = obj.__get__(obj, cls) + setattr(cls, name, ret) + else: + if is_dataclass_field: + # access attribute using normal class access + # first, to see if it's been mapped on a + # superclass. note if the dataclasses.field() + # has "default", this value can be anything. + ret = getattr(cls, name, None) + + # so, if it's anything that's not ORM + # mapped, assume we should invoke the + # declared_attr + if not isinstance(ret, InspectionAttr): + ret = obj.fget() + else: + # access attribute using normal class access. + # if the declared attr already took place + # on a superclass that is mapped, then + # this is no longer a declared_attr, it will + # be the InstrumentedAttribute + ret = getattr(cls, name) + + # correct for proxies created from hybrid_property + # or similar. note there is no known case that + # produces nested proxies, so we are only + # looking one level deep right now. + + if ( + isinstance(ret, InspectionAttr) + and attr_is_internal_proxy(ret) + and not isinstance( + ret.original_property, MapperProperty + ) + ): + ret = ret.descriptor + + collected_attributes[name] = column_copies[obj] = ( + ret + ) + + if ( + isinstance(ret, (Column, MapperProperty)) + and ret.doc is None + ): + ret.doc = obj.__doc__ + + self._collect_annotation( + name, + obj._collect_return_annotation(), + base, + True, + obj, + ) + elif _is_mapped_annotation(annotation, cls, base): + # Mapped annotation without any object. + # product_column_copies should have handled this. + # if future support for other MapperProperty, + # then test if this name is already handled and + # otherwise proceed to generate. + if not fixed_table: + assert ( + name in collected_attributes + or attribute_is_overridden(name, None) + ) + continue + else: + # here, the attribute is some other kind of + # property that we assume is not part of the + # declarative mapping. however, check for some + # more common mistakes + self._warn_for_decl_attributes(base, name, obj) + elif is_dataclass_field and ( + name not in clsdict_view or clsdict_view[name] is not obj + ): + # here, we are definitely looking at the target class + # and not a superclass. this is currently a + # dataclass-only path. if the name is only + # a dataclass field and isn't in local cls.__dict__, + # put the object there. + # assert that the dataclass-enabled resolver agrees + # with what we are seeing + + assert not attribute_is_overridden(name, obj) + + if _is_declarative_props(obj): + obj = obj.fget() + + collected_attributes[name] = obj + self._collect_annotation( + name, annotation, base, False, obj + ) + else: + collected_annotation = self._collect_annotation( + name, annotation, base, None, obj + ) + is_mapped = ( + collected_annotation is not None + and collected_annotation.mapped_container is not None + ) + generated_obj = ( + collected_annotation.attr_value + if collected_annotation is not None + else obj + ) + if obj is None and not fixed_table and is_mapped: + collected_attributes[name] = ( + generated_obj + if generated_obj is not None + else MappedColumn() + ) + elif name in clsdict_view: + collected_attributes[name] = obj + # else if the name is not in the cls.__dict__, + # don't collect it as an attribute. + # we will see the annotation only, which is meaningful + # both for mapping and dataclasses setup + + if inherited_table_args and not tablename: + table_args = None + + self.table_args = table_args + self.tablename = tablename + self.mapper_args_fn = mapper_args_fn + self.table_fn = table_fn + + def _setup_dataclasses_transforms(self) -> None: + dataclass_setup_arguments = self.dataclass_setup_arguments + if not dataclass_setup_arguments: + return + + # can't use is_dataclass since it uses hasattr + if "__dataclass_fields__" in self.cls.__dict__: + raise exc.InvalidRequestError( + f"Class {self.cls} is already a dataclass; ensure that " + "base classes / decorator styles of establishing dataclasses " + "are not being mixed. " + "This can happen if a class that inherits from " + "'MappedAsDataclass', even indirectly, is been mapped with " + "'@registry.mapped_as_dataclass'" + ) + + warn_for_non_dc_attrs = collections.defaultdict(list) + + def _allow_dataclass_field( + key: str, originating_class: Type[Any] + ) -> bool: + if ( + originating_class is not self.cls + and "__dataclass_fields__" not in originating_class.__dict__ + ): + warn_for_non_dc_attrs[originating_class].append(key) + + return True + + manager = instrumentation.manager_of_class(self.cls) + assert manager is not None + + field_list = [ + _AttributeOptions._get_arguments_for_make_dataclass( + key, + anno, + mapped_container, + self.collected_attributes.get(key, _NoArg.NO_ARG), + ) + for key, anno, mapped_container in ( + ( + key, + mapped_anno if mapped_anno else raw_anno, + mapped_container, + ) + for key, ( + raw_anno, + mapped_container, + mapped_anno, + is_dc, + attr_value, + originating_module, + originating_class, + ) in self.collected_annotations.items() + if _allow_dataclass_field(key, originating_class) + and ( + key not in self.collected_attributes + # issue #9226; check for attributes that we've collected + # which are already instrumented, which we would assume + # mean we are in an ORM inheritance mapping and this + # attribute is already mapped on the superclass. Under + # no circumstance should any QueryableAttribute be sent to + # the dataclass() function; anything that's mapped should + # be Field and that's it + or not isinstance( + self.collected_attributes[key], QueryableAttribute + ) + ) + ) + ] + + if warn_for_non_dc_attrs: + for ( + originating_class, + non_dc_attrs, + ) in warn_for_non_dc_attrs.items(): + util.warn_deprecated( + f"When transforming {self.cls} to a dataclass, " + f"attribute(s) " + f"{', '.join(repr(key) for key in non_dc_attrs)} " + f"originates from superclass " + f"{originating_class}, which is not a dataclass. This " + f"usage is deprecated and will raise an error in " + f"SQLAlchemy 2.1. When declaring SQLAlchemy Declarative " + f"Dataclasses, ensure that all mixin classes and other " + f"superclasses which include attributes are also a " + f"subclass of MappedAsDataclass.", + "2.0", + code="dcmx", + ) + + annotations = {} + defaults = {} + for item in field_list: + if len(item) == 2: + name, tp = item + elif len(item) == 3: + name, tp, spec = item + defaults[name] = spec + else: + assert False + annotations[name] = tp + + for k, v in defaults.items(): + setattr(self.cls, k, v) + + self._apply_dataclasses_to_any_class( + dataclass_setup_arguments, self.cls, annotations + ) + + @classmethod + def _update_annotations_for_non_mapped_class( + cls, klass: Type[_O] + ) -> Mapping[str, _AnnotationScanType]: + cls_annotations = util.get_annotations(klass) + + new_anno = {} + for name, annotation in cls_annotations.items(): + if _is_mapped_annotation(annotation, klass, klass): + extracted = _extract_mapped_subtype( + annotation, + klass, + klass.__module__, + name, + type(None), + required=False, + is_dataclass_field=False, + expect_mapped=False, + ) + if extracted: + inner, _ = extracted + new_anno[name] = inner + else: + new_anno[name] = annotation + return new_anno + + @classmethod + def _apply_dataclasses_to_any_class( + cls, + dataclass_setup_arguments: _DataclassArguments, + klass: Type[_O], + use_annotations: Mapping[str, _AnnotationScanType], + ) -> None: + cls._assert_dc_arguments(dataclass_setup_arguments) + + dataclass_callable = dataclass_setup_arguments["dataclass_callable"] + if dataclass_callable is _NoArg.NO_ARG: + dataclass_callable = dataclasses.dataclass + + restored: Optional[Any] + + if use_annotations: + # apply constructed annotations that should look "normal" to a + # dataclasses callable, based on the fields present. This + # means remove the Mapped[] container and ensure all Field + # entries have an annotation + restored = getattr(klass, "__annotations__", None) + klass.__annotations__ = cast("Dict[str, Any]", use_annotations) + else: + restored = None + + try: + dataclass_callable( + klass, + **{ + k: v + for k, v in dataclass_setup_arguments.items() + if v is not _NoArg.NO_ARG and k != "dataclass_callable" + }, + ) + except (TypeError, ValueError) as ex: + raise exc.InvalidRequestError( + f"Python dataclasses error encountered when creating " + f"dataclass for {klass.__name__!r}: " + f"{ex!r}. Please refer to Python dataclasses " + "documentation for additional information.", + code="dcte", + ) from ex + finally: + # restore original annotations outside of the dataclasses + # process; for mixins and __abstract__ superclasses, SQLAlchemy + # Declarative will need to see the Mapped[] container inside the + # annotations in order to map subclasses + if use_annotations: + if restored is None: + del klass.__annotations__ + else: + klass.__annotations__ = restored + + @classmethod + def _assert_dc_arguments(cls, arguments: _DataclassArguments) -> None: + allowed = { + "init", + "repr", + "order", + "eq", + "unsafe_hash", + "kw_only", + "match_args", + "dataclass_callable", + } + disallowed_args = set(arguments).difference(allowed) + if disallowed_args: + msg = ", ".join(f"{arg!r}" for arg in sorted(disallowed_args)) + raise exc.ArgumentError( + f"Dataclass argument(s) {msg} are not accepted" + ) + + def _collect_annotation( + self, + name: str, + raw_annotation: _AnnotationScanType, + originating_class: Type[Any], + expect_mapped: Optional[bool], + attr_value: Any, + ) -> Optional[_CollectedAnnotation]: + if name in self.collected_annotations: + return self.collected_annotations[name] + + if raw_annotation is None: + return None + + is_dataclass = self.is_dataclass_prior_to_mapping + allow_unmapped = self.allow_unmapped_annotations + + if expect_mapped is None: + is_dataclass_field = isinstance(attr_value, dataclasses.Field) + expect_mapped = ( + not is_dataclass_field + and not allow_unmapped + and ( + attr_value is None + or isinstance(attr_value, _MappedAttribute) + ) + ) + else: + is_dataclass_field = False + + is_dataclass_field = False + extracted = _extract_mapped_subtype( + raw_annotation, + self.cls, + originating_class.__module__, + name, + type(attr_value), + required=False, + is_dataclass_field=is_dataclass_field, + expect_mapped=expect_mapped + and not is_dataclass, # self.allow_dataclass_fields, + ) + + if extracted is None: + # ClassVar can come out here + return None + + extracted_mapped_annotation, mapped_container = extracted + + if attr_value is None and not is_literal(extracted_mapped_annotation): + for elem in typing_get_args(extracted_mapped_annotation): + if isinstance(elem, str) or is_fwd_ref( + elem, check_generic=True + ): + elem = de_stringify_annotation( + self.cls, + elem, + originating_class.__module__, + include_generic=True, + ) + # look in Annotated[...] for an ORM construct, + # such as Annotated[int, mapped_column(primary_key=True)] + if isinstance(elem, _IntrospectsAnnotations): + attr_value = elem.found_in_pep593_annotated() + + self.collected_annotations[name] = ca = _CollectedAnnotation( + raw_annotation, + mapped_container, + extracted_mapped_annotation, + is_dataclass, + attr_value, + originating_class.__module__, + originating_class, + ) + return ca + + def _warn_for_decl_attributes( + self, cls: Type[Any], key: str, c: Any + ) -> None: + if isinstance(c, expression.ColumnElement): + util.warn( + f"Attribute '{key}' on class {cls} appears to " + "be a non-schema SQLAlchemy expression " + "object; this won't be part of the declarative mapping. " + "To map arbitrary expressions, use ``column_property()`` " + "or a similar function such as ``deferred()``, " + "``query_expression()`` etc. " + ) + + def _produce_column_copies( + self, + attributes_for_class: Callable[ + [], Iterable[Tuple[str, Any, Any, bool]] + ], + attribute_is_overridden: Callable[[str, Any], bool], + fixed_table: bool, + originating_class: Type[Any], + ) -> Dict[str, Union[Column[Any], MappedColumn[Any]]]: + cls = self.cls + dict_ = self.clsdict_view + locally_collected_attributes = {} + column_copies = self.column_copies + # copy mixin columns to the mapped class + + for name, obj, annotation, is_dataclass in attributes_for_class(): + if ( + not fixed_table + and obj is None + and _is_mapped_annotation(annotation, cls, originating_class) + ): + # obj is None means this is the annotation only path + + if attribute_is_overridden(name, obj): + # perform same "overridden" check as we do for + # Column/MappedColumn, this is how a mixin col is not + # applied to an inherited subclass that does not have + # the mixin. the anno-only path added here for + # #9564 + continue + + collected_annotation = self._collect_annotation( + name, annotation, originating_class, True, obj + ) + obj = ( + collected_annotation.attr_value + if collected_annotation is not None + else obj + ) + if obj is None: + obj = MappedColumn() + + locally_collected_attributes[name] = obj + setattr(cls, name, obj) + + elif isinstance(obj, (Column, MappedColumn)): + if attribute_is_overridden(name, obj): + # if column has been overridden + # (like by the InstrumentedAttribute of the + # superclass), skip. don't collect the annotation + # either (issue #8718) + continue + + collected_annotation = self._collect_annotation( + name, annotation, originating_class, True, obj + ) + obj = ( + collected_annotation.attr_value + if collected_annotation is not None + else obj + ) + + if name not in dict_ and not ( + "__table__" in dict_ + and (getattr(obj, "name", None) or name) + in dict_["__table__"].c + ): + if obj.foreign_keys: + for fk in obj.foreign_keys: + if ( + fk._table_column is not None + and fk._table_column.table is None + ): + raise exc.InvalidRequestError( + "Columns with foreign keys to " + "non-table-bound " + "columns must be declared as " + "@declared_attr callables " + "on declarative mixin classes. " + "For dataclass " + "field() objects, use a lambda:." + ) + + column_copies[obj] = copy_ = obj._copy() + + locally_collected_attributes[name] = copy_ + setattr(cls, name, copy_) + + return locally_collected_attributes + + def _extract_mappable_attributes(self) -> None: + cls = self.cls + collected_attributes = self.collected_attributes + + our_stuff = self.properties + + _include_dunders = self._include_dunders + + late_mapped = _get_immediate_cls_attr( + cls, "_sa_decl_prepare_nocascade", strict=True + ) + + allow_unmapped_annotations = self.allow_unmapped_annotations + expect_annotations_wo_mapped = ( + allow_unmapped_annotations or self.is_dataclass_prior_to_mapping + ) + + look_for_dataclass_things = bool(self.dataclass_setup_arguments) + + for k in list(collected_attributes): + if k in _include_dunders: + continue + + value = collected_attributes[k] + + if _is_declarative_props(value): + # @declared_attr in collected_attributes only occurs here for a + # @declared_attr that's directly on the mapped class; + # for a mixin, these have already been evaluated + if value._cascading: + util.warn( + "Use of @declared_attr.cascading only applies to " + "Declarative 'mixin' and 'abstract' classes. " + "Currently, this flag is ignored on mapped class " + "%s" % self.cls + ) + + value = getattr(cls, k) + + elif ( + isinstance(value, QueryableAttribute) + and value.class_ is not cls + and value.key != k + ): + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = SynonymProperty(value.key) + setattr(cls, k, value) + + if ( + isinstance(value, tuple) + and len(value) == 1 + and isinstance(value[0], (Column, _MappedAttribute)) + ): + util.warn( + "Ignoring declarative-like tuple value of attribute " + "'%s': possibly a copy-and-paste error with a comma " + "accidentally placed at the end of the line?" % k + ) + continue + elif look_for_dataclass_things and isinstance( + value, dataclasses.Field + ): + # we collected a dataclass Field; dataclasses would have + # set up the correct state on the class + continue + elif not isinstance(value, (Column, _DCAttributeOptions)): + # using @declared_attr for some object that + # isn't Column/MapperProperty/_DCAttributeOptions; remove + # from the clsdict_view + # and place the evaluated value onto the class. + collected_attributes.pop(k) + self._warn_for_decl_attributes(cls, k, value) + if not late_mapped: + setattr(cls, k, value) + continue + # we expect to see the name 'metadata' in some valid cases; + # however at this point we see it's assigned to something trying + # to be mapped, so raise for that. + # TODO: should "registry" here be also? might be too late + # to change that now (2.0 betas) + elif k in ("metadata",): + raise exc.InvalidRequestError( + f"Attribute name '{k}' is reserved when using the " + "Declarative API." + ) + elif isinstance(value, Column): + _undefer_column_name( + k, self.column_copies.get(value, value) # type: ignore + ) + else: + if isinstance(value, _IntrospectsAnnotations): + ( + annotation, + mapped_container, + extracted_mapped_annotation, + is_dataclass, + attr_value, + originating_module, + originating_class, + ) = self.collected_annotations.get( + k, (None, None, None, False, None, None, None) + ) + + # issue #8692 - don't do any annotation interpretation if + # an annotation were present and a container such as + # Mapped[] etc. were not used. If annotation is None, + # do declarative_scan so that the property can raise + # for required + if ( + mapped_container is not None + or annotation is None + # issue #10516: need to do declarative_scan even with + # a non-Mapped annotation if we are doing + # __allow_unmapped__, for things like col.name + # assignment + or allow_unmapped_annotations + ): + try: + value.declarative_scan( + self, + self.registry, + cls, + originating_module, + k, + mapped_container, + annotation, + extracted_mapped_annotation, + is_dataclass, + ) + except NameError as ne: + raise exc.ArgumentError( + f"Could not resolve all types within mapped " + f'annotation: "{annotation}". Ensure all ' + f"types are written correctly and are " + f"imported within the module in use." + ) from ne + else: + # assert that we were expecting annotations + # without Mapped[] were going to be passed. + # otherwise an error should have been raised + # by util._extract_mapped_subtype before we got here. + assert expect_annotations_wo_mapped + + if isinstance(value, _DCAttributeOptions): + if ( + value._has_dataclass_arguments + and not look_for_dataclass_things + ): + if isinstance(value, MapperProperty): + argnames = [ + "init", + "default_factory", + "repr", + "default", + ] + else: + argnames = ["init", "default_factory", "repr"] + + args = { + a + for a in argnames + if getattr( + value._attribute_options, f"dataclasses_{a}" + ) + is not _NoArg.NO_ARG + } + + raise exc.ArgumentError( + f"Attribute '{k}' on class {cls} includes " + f"dataclasses argument(s): " + f"{', '.join(sorted(repr(a) for a in args))} but " + f"class does not specify " + "SQLAlchemy native dataclass configuration." + ) + + if not isinstance(value, (MapperProperty, _MapsColumns)): + # filter for _DCAttributeOptions objects that aren't + # MapperProperty / mapped_column(). Currently this + # includes AssociationProxy. pop it from the things + # we're going to map and set it up as a descriptor + # on the class. + collected_attributes.pop(k) + + # Assoc Prox (or other descriptor object that may + # use _DCAttributeOptions) is usually here, except if + # 1. we're a + # dataclass, dataclasses would have removed the + # attr here or 2. assoc proxy is coming from a + # superclass, we want it to be direct here so it + # tracks state or 3. assoc prox comes from + # declared_attr, uncommon case + setattr(cls, k, value) + continue + + our_stuff[k] = value + + def _extract_declared_columns(self) -> None: + our_stuff = self.properties + + # extract columns from the class dict + declared_columns = self.declared_columns + column_ordering = self.column_ordering + name_to_prop_key = collections.defaultdict(set) + + for key, c in list(our_stuff.items()): + if isinstance(c, _MapsColumns): + mp_to_assign = c.mapper_property_to_assign + if mp_to_assign: + our_stuff[key] = mp_to_assign + else: + # if no mapper property to assign, this currently means + # this is a MappedColumn that will produce a Column for us + del our_stuff[key] + + for col, sort_order in c.columns_to_assign: + if not isinstance(c, CompositeProperty): + name_to_prop_key[col.name].add(key) + declared_columns.add(col) + + # we would assert this, however we want the below + # warning to take effect instead. See #9630 + # assert col not in column_ordering + + column_ordering[col] = sort_order + + # if this is a MappedColumn and the attribute key we + # have is not what the column has for its key, map the + # Column explicitly under the attribute key name. + # otherwise, Mapper will map it under the column key. + if mp_to_assign is None and key != col.key: + our_stuff[key] = col + elif isinstance(c, Column): + # undefer previously occurred here, and now occurs earlier. + # ensure every column we get here has been named + assert c.name is not None + name_to_prop_key[c.name].add(key) + declared_columns.add(c) + # if the column is the same name as the key, + # remove it from the explicit properties dict. + # the normal rules for assigning column-based properties + # will take over, including precedence of columns + # in multi-column ColumnProperties. + if key == c.key: + del our_stuff[key] + + for name, keys in name_to_prop_key.items(): + if len(keys) > 1: + util.warn( + "On class %r, Column object %r named " + "directly multiple times, " + "only one will be used: %s. " + "Consider using orm.synonym instead" + % (self.classname, name, (", ".join(sorted(keys)))) + ) + + def _setup_table(self, table: Optional[FromClause] = None) -> None: + cls = self.cls + cls_as_Decl = cast("MappedClassProtocol[Any]", cls) + + tablename = self.tablename + table_args = self.table_args + clsdict_view = self.clsdict_view + declared_columns = self.declared_columns + column_ordering = self.column_ordering + + manager = attributes.manager_of_class(cls) + + if ( + self.table_fn is None + and "__table__" not in clsdict_view + and table is None + ): + if hasattr(cls, "__table_cls__"): + table_cls = cast( + Type[Table], + util.unbound_method_to_callable(cls.__table_cls__), # type: ignore # noqa: E501 + ) + else: + table_cls = Table + + if tablename is not None: + args: Tuple[Any, ...] = () + table_kw: Dict[str, Any] = {} + + if table_args: + if isinstance(table_args, dict): + table_kw = table_args + elif isinstance(table_args, tuple): + if isinstance(table_args[-1], dict): + args, table_kw = table_args[0:-1], table_args[-1] + else: + args = table_args + + autoload_with = clsdict_view.get("__autoload_with__") + if autoload_with: + table_kw["autoload_with"] = autoload_with + + autoload = clsdict_view.get("__autoload__") + if autoload: + table_kw["autoload"] = True + + sorted_columns = sorted( + declared_columns, + key=lambda c: column_ordering.get(c, 0), + ) + table = self.set_cls_attribute( + "__table__", + table_cls( + tablename, + self._metadata_for_cls(manager), + *sorted_columns, + *args, + **table_kw, + ), + ) + else: + if table is None: + if self.table_fn: + table = self.set_cls_attribute( + "__table__", self.table_fn() + ) + else: + table = cls_as_Decl.__table__ + if declared_columns: + for c in declared_columns: + if not table.c.contains_column(c): + raise exc.ArgumentError( + "Can't add additional column %r when " + "specifying __table__" % c.key + ) + + self.local_table = table + + def _metadata_for_cls(self, manager: ClassManager[Any]) -> MetaData: + meta: Optional[MetaData] = getattr(self.cls, "metadata", None) + if meta is not None: + return meta + else: + return manager.registry.metadata + + def _setup_inheriting_mapper(self, mapper_kw: _MapperKwArgs) -> None: + cls = self.cls + + inherits = mapper_kw.get("inherits", None) + + if inherits is None: + # since we search for classical mappings now, search for + # multiple mapped bases as well and raise an error. + inherits_search = [] + for base_ in cls.__bases__: + c = _resolve_for_abstract_or_classical(base_) + if c is None: + continue + + if _is_supercls_for_inherits(c) and c not in inherits_search: + inherits_search.append(c) + + if inherits_search: + if len(inherits_search) > 1: + raise exc.InvalidRequestError( + "Class %s has multiple mapped bases: %r" + % (cls, inherits_search) + ) + inherits = inherits_search[0] + elif isinstance(inherits, Mapper): + inherits = inherits.class_ + + self.inherits = inherits + + clsdict_view = self.clsdict_view + if "__table__" not in clsdict_view and self.tablename is None: + self.single = True + + def _setup_inheriting_columns(self, mapper_kw: _MapperKwArgs) -> None: + table = self.local_table + cls = self.cls + table_args = self.table_args + declared_columns = self.declared_columns + + if ( + table is None + and self.inherits is None + and not _get_immediate_cls_attr(cls, "__no_table__") + ): + raise exc.InvalidRequestError( + "Class %r does not have a __table__ or __tablename__ " + "specified and does not inherit from an existing " + "table-mapped class." % cls + ) + elif self.inherits: + inherited_mapper_or_config = _declared_mapping_info(self.inherits) + assert inherited_mapper_or_config is not None + inherited_table = inherited_mapper_or_config.local_table + inherited_persist_selectable = ( + inherited_mapper_or_config.persist_selectable + ) + + if table is None: + # single table inheritance. + # ensure no table args + if table_args: + raise exc.ArgumentError( + "Can't place __table_args__ on an inherited class " + "with no table." + ) + + # add any columns declared here to the inherited table. + if declared_columns and not isinstance(inherited_table, Table): + raise exc.ArgumentError( + f"Can't declare columns on single-table-inherited " + f"subclass {self.cls}; superclass {self.inherits} " + "is not mapped to a Table" + ) + + for col in declared_columns: + assert inherited_table is not None + if col.name in inherited_table.c: + if inherited_table.c[col.name] is col: + continue + raise exc.ArgumentError( + f"Column '{col}' on class {cls.__name__} " + f"conflicts with existing column " + f"'{inherited_table.c[col.name]}'. If using " + f"Declarative, consider using the " + "use_existing_column parameter of mapped_column() " + "to resolve conflicts." + ) + if col.primary_key: + raise exc.ArgumentError( + "Can't place primary key columns on an inherited " + "class with no table." + ) + + if TYPE_CHECKING: + assert isinstance(inherited_table, Table) + + inherited_table.append_column(col) + if ( + inherited_persist_selectable is not None + and inherited_persist_selectable is not inherited_table + ): + inherited_persist_selectable._refresh_for_new_column( + col + ) + + def _prepare_mapper_arguments(self, mapper_kw: _MapperKwArgs) -> None: + properties = self.properties + + if self.mapper_args_fn: + mapper_args = self.mapper_args_fn() + else: + mapper_args = {} + + if mapper_kw: + mapper_args.update(mapper_kw) + + if "properties" in mapper_args: + properties = dict(properties) + properties.update(mapper_args["properties"]) + + # make sure that column copies are used rather + # than the original columns from any mixins + for k in ("version_id_col", "polymorphic_on"): + if k in mapper_args: + v = mapper_args[k] + mapper_args[k] = self.column_copies.get(v, v) + + if "primary_key" in mapper_args: + mapper_args["primary_key"] = [ + self.column_copies.get(v, v) + for v in util.to_list(mapper_args["primary_key"]) + ] + + if "inherits" in mapper_args: + inherits_arg = mapper_args["inherits"] + if isinstance(inherits_arg, Mapper): + inherits_arg = inherits_arg.class_ + + if inherits_arg is not self.inherits: + raise exc.InvalidRequestError( + "mapper inherits argument given for non-inheriting " + "class %s" % (mapper_args["inherits"]) + ) + + if self.inherits: + mapper_args["inherits"] = self.inherits + + if self.inherits and not mapper_args.get("concrete", False): + # note the superclass is expected to have a Mapper assigned and + # not be a deferred config, as this is called within map() + inherited_mapper = class_mapper(self.inherits, False) + inherited_table = inherited_mapper.local_table + + # single or joined inheritance + # exclude any cols on the inherited table which are + # not mapped on the parent class, to avoid + # mapping columns specific to sibling/nephew classes + if "exclude_properties" not in mapper_args: + mapper_args["exclude_properties"] = exclude_properties = { + c.key + for c in inherited_table.c + if c not in inherited_mapper._columntoproperty + }.union(inherited_mapper.exclude_properties or ()) + exclude_properties.difference_update( + [c.key for c in self.declared_columns] + ) + + # look through columns in the current mapper that + # are keyed to a propname different than the colname + # (if names were the same, we'd have popped it out above, + # in which case the mapper makes this combination). + # See if the superclass has a similar column property. + # If so, join them together. + for k, col in list(properties.items()): + if not isinstance(col, expression.ColumnElement): + continue + if k in inherited_mapper._props: + p = inherited_mapper._props[k] + if isinstance(p, ColumnProperty): + # note here we place the subclass column + # first. See [ticket:1892] for background. + properties[k] = [col] + p.columns + result_mapper_args = mapper_args.copy() + result_mapper_args["properties"] = properties + self.mapper_args = result_mapper_args + + def map(self, mapper_kw: _MapperKwArgs = util.EMPTY_DICT) -> Mapper[Any]: + self._prepare_mapper_arguments(mapper_kw) + if hasattr(self.cls, "__mapper_cls__"): + mapper_cls = cast( + "Type[Mapper[Any]]", + util.unbound_method_to_callable( + self.cls.__mapper_cls__ # type: ignore + ), + ) + else: + mapper_cls = Mapper + + return self.set_cls_attribute( + "__mapper__", + mapper_cls(self.cls, self.local_table, **self.mapper_args), + ) + + +@util.preload_module("sqlalchemy.orm.decl_api") +def _as_dc_declaredattr( + field_metadata: Mapping[str, Any], sa_dataclass_metadata_key: str +) -> Any: + # wrap lambdas inside dataclass fields inside an ad-hoc declared_attr. + # we can't write it because field.metadata is immutable :( so we have + # to go through extra trouble to compare these + decl_api = util.preloaded.orm_decl_api + obj = field_metadata[sa_dataclass_metadata_key] + if callable(obj) and not isinstance(obj, decl_api.declared_attr): + return decl_api.declared_attr(obj) + else: + return obj + + +class _DeferredMapperConfig(_ClassScanMapperConfig): + _cls: weakref.ref[Type[Any]] + + is_deferred = True + + _configs: util.OrderedDict[ + weakref.ref[Type[Any]], _DeferredMapperConfig + ] = util.OrderedDict() + + def _early_mapping(self, mapper_kw: _MapperKwArgs) -> None: + pass + + # mypy disallows plain property override of variable + @property # type: ignore + def cls(self) -> Type[Any]: + return self._cls() # type: ignore + + @cls.setter + def cls(self, class_: Type[Any]) -> None: + self._cls = weakref.ref(class_, self._remove_config_cls) + self._configs[self._cls] = self + + @classmethod + def _remove_config_cls(cls, ref: weakref.ref[Type[Any]]) -> None: + cls._configs.pop(ref, None) + + @classmethod + def has_cls(cls, class_: Type[Any]) -> bool: + # 2.6 fails on weakref if class_ is an old style class + return isinstance(class_, type) and weakref.ref(class_) in cls._configs + + @classmethod + def raise_unmapped_for_cls(cls, class_: Type[Any]) -> NoReturn: + if hasattr(class_, "_sa_raise_deferred_config"): + class_._sa_raise_deferred_config() + + raise orm_exc.UnmappedClassError( + class_, + msg=( + f"Class {orm_exc._safe_cls_name(class_)} has a deferred " + "mapping on it. It is not yet usable as a mapped class." + ), + ) + + @classmethod + def config_for_cls(cls, class_: Type[Any]) -> _DeferredMapperConfig: + return cls._configs[weakref.ref(class_)] + + @classmethod + def classes_for_base( + cls, base_cls: Type[Any], sort: bool = True + ) -> List[_DeferredMapperConfig]: + classes_for_base = [ + m + for m, cls_ in [(m, m.cls) for m in cls._configs.values()] + if cls_ is not None and issubclass(cls_, base_cls) + ] + + if not sort: + return classes_for_base + + all_m_by_cls = {m.cls: m for m in classes_for_base} + + tuples: List[Tuple[_DeferredMapperConfig, _DeferredMapperConfig]] = [] + for m_cls in all_m_by_cls: + tuples.extend( + (all_m_by_cls[base_cls], all_m_by_cls[m_cls]) + for base_cls in m_cls.__bases__ + if base_cls in all_m_by_cls + ) + return list(topological.sort(tuples, classes_for_base)) + + def map(self, mapper_kw: _MapperKwArgs = util.EMPTY_DICT) -> Mapper[Any]: + self._configs.pop(self._cls, None) + return super().map(mapper_kw) + + +def _add_attribute( + cls: Type[Any], key: str, value: MapperProperty[Any] +) -> None: + """add an attribute to an existing declarative class. + + This runs through the logic to determine MapperProperty, + adds it to the Mapper, adds a column to the mapped Table, etc. + + """ + + if "__mapper__" in cls.__dict__: + mapped_cls = cast("MappedClassProtocol[Any]", cls) + + def _table_or_raise(mc: MappedClassProtocol[Any]) -> Table: + if isinstance(mc.__table__, Table): + return mc.__table__ + raise exc.InvalidRequestError( + f"Cannot add a new attribute to mapped class {mc.__name__!r} " + "because it's not mapped against a table." + ) + + if isinstance(value, Column): + _undefer_column_name(key, value) + _table_or_raise(mapped_cls).append_column( + value, replace_existing=True + ) + mapped_cls.__mapper__.add_property(key, value) + elif isinstance(value, _MapsColumns): + mp = value.mapper_property_to_assign + for col, _ in value.columns_to_assign: + _undefer_column_name(key, col) + _table_or_raise(mapped_cls).append_column( + col, replace_existing=True + ) + if not mp: + mapped_cls.__mapper__.add_property(key, col) + if mp: + mapped_cls.__mapper__.add_property(key, mp) + elif isinstance(value, MapperProperty): + mapped_cls.__mapper__.add_property(key, value) + elif isinstance(value, QueryableAttribute) and value.key != key: + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = SynonymProperty(value.key) + mapped_cls.__mapper__.add_property(key, value) + else: + type.__setattr__(cls, key, value) + mapped_cls.__mapper__._expire_memoizations() + else: + type.__setattr__(cls, key, value) + + +def _del_attribute(cls: Type[Any], key: str) -> None: + if ( + "__mapper__" in cls.__dict__ + and key in cls.__dict__ + and not cast( + "MappedClassProtocol[Any]", cls + ).__mapper__._dispose_called + ): + value = cls.__dict__[key] + if isinstance( + value, (Column, _MapsColumns, MapperProperty, QueryableAttribute) + ): + raise NotImplementedError( + "Can't un-map individual mapped attributes on a mapped class." + ) + else: + type.__delattr__(cls, key) + cast( + "MappedClassProtocol[Any]", cls + ).__mapper__._expire_memoizations() + else: + type.__delattr__(cls, key) + + +def _declarative_constructor(self: Any, **kwargs: Any) -> None: + """A simple constructor that allows initialization from kwargs. + + Sets attributes on the constructed instance using the names and + values in ``kwargs``. + + Only keys that are present as + attributes of the instance's class are allowed. These could be, + for example, any mapped columns or relationships. + """ + cls_ = type(self) + for k in kwargs: + if not hasattr(cls_, k): + raise TypeError( + "%r is an invalid keyword argument for %s" % (k, cls_.__name__) + ) + setattr(self, k, kwargs[k]) + + +_declarative_constructor.__name__ = "__init__" + + +def _undefer_column_name(key: str, column: Column[Any]) -> None: + if column.key is None: + column.key = key + if column.name is None: + column.name = key diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py new file mode 100644 index 00000000..71c06fbe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py @@ -0,0 +1,1304 @@ +# orm/dependency.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""Relationship dependencies. + +""" + +from __future__ import annotations + +from . import attributes +from . import exc +from . import sync +from . import unitofwork +from . import util as mapperutil +from .interfaces import MANYTOMANY +from .interfaces import MANYTOONE +from .interfaces import ONETOMANY +from .. import exc as sa_exc +from .. import sql +from .. import util + + +class DependencyProcessor: + def __init__(self, prop): + self.prop = prop + self.cascade = prop.cascade + self.mapper = prop.mapper + self.parent = prop.parent + self.secondary = prop.secondary + self.direction = prop.direction + self.post_update = prop.post_update + self.passive_deletes = prop.passive_deletes + self.passive_updates = prop.passive_updates + self.enable_typechecks = prop.enable_typechecks + if self.passive_deletes: + self._passive_delete_flag = attributes.PASSIVE_NO_INITIALIZE + else: + self._passive_delete_flag = attributes.PASSIVE_OFF + if self.passive_updates: + self._passive_update_flag = attributes.PASSIVE_NO_INITIALIZE + else: + self._passive_update_flag = attributes.PASSIVE_OFF + + self.sort_key = "%s_%s" % (self.parent._sort_key, prop.key) + self.key = prop.key + if not self.prop.synchronize_pairs: + raise sa_exc.ArgumentError( + "Can't build a DependencyProcessor for relationship %s. " + "No target attributes to populate between parent and " + "child are present" % self.prop + ) + + @classmethod + def from_relationship(cls, prop): + return _direction_to_processor[prop.direction](prop) + + def hasparent(self, state): + """return True if the given object instance has a parent, + according to the ``InstrumentedAttribute`` handled by this + ``DependencyProcessor``. + + """ + return self.parent.class_manager.get_impl(self.key).hasparent(state) + + def per_property_preprocessors(self, uow): + """establish actions and dependencies related to a flush. + + These actions will operate on all relevant states in + the aggregate. + + """ + uow.register_preprocessor(self, True) + + def per_property_flush_actions(self, uow): + after_save = unitofwork.ProcessAll(uow, self, False, True) + before_delete = unitofwork.ProcessAll(uow, self, True, True) + + parent_saves = unitofwork.SaveUpdateAll( + uow, self.parent.primary_base_mapper + ) + child_saves = unitofwork.SaveUpdateAll( + uow, self.mapper.primary_base_mapper + ) + + parent_deletes = unitofwork.DeleteAll( + uow, self.parent.primary_base_mapper + ) + child_deletes = unitofwork.DeleteAll( + uow, self.mapper.primary_base_mapper + ) + + self.per_property_dependencies( + uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete, + ) + + def per_state_flush_actions(self, uow, states, isdelete): + """establish actions and dependencies related to a flush. + + These actions will operate on all relevant states + individually. This occurs only if there are cycles + in the 'aggregated' version of events. + + """ + + child_base_mapper = self.mapper.primary_base_mapper + child_saves = unitofwork.SaveUpdateAll(uow, child_base_mapper) + child_deletes = unitofwork.DeleteAll(uow, child_base_mapper) + + # locate and disable the aggregate processors + # for this dependency + + if isdelete: + before_delete = unitofwork.ProcessAll(uow, self, True, True) + before_delete.disabled = True + else: + after_save = unitofwork.ProcessAll(uow, self, False, True) + after_save.disabled = True + + # check if the "child" side is part of the cycle + + if child_saves not in uow.cycles: + # based on the current dependencies we use, the saves/ + # deletes should always be in the 'cycles' collection + # together. if this changes, we will have to break up + # this method a bit more. + assert child_deletes not in uow.cycles + + # child side is not part of the cycle, so we will link per-state + # actions to the aggregate "saves", "deletes" actions + child_actions = [(child_saves, False), (child_deletes, True)] + child_in_cycles = False + else: + child_in_cycles = True + + # check if the "parent" side is part of the cycle + if not isdelete: + parent_saves = unitofwork.SaveUpdateAll( + uow, self.parent.base_mapper + ) + parent_deletes = before_delete = None + if parent_saves in uow.cycles: + parent_in_cycles = True + else: + parent_deletes = unitofwork.DeleteAll(uow, self.parent.base_mapper) + parent_saves = after_save = None + if parent_deletes in uow.cycles: + parent_in_cycles = True + + # now create actions /dependencies for each state. + + for state in states: + # detect if there's anything changed or loaded + # by a preprocessor on this state/attribute. In the + # case of deletes we may try to load missing items here as well. + sum_ = state.manager[self.key].impl.get_all_pending( + state, + state.dict, + ( + self._passive_delete_flag + if isdelete + else attributes.PASSIVE_NO_INITIALIZE + ), + ) + + if not sum_: + continue + + if isdelete: + before_delete = unitofwork.ProcessState(uow, self, True, state) + if parent_in_cycles: + parent_deletes = unitofwork.DeleteState(uow, state) + else: + after_save = unitofwork.ProcessState(uow, self, False, state) + if parent_in_cycles: + parent_saves = unitofwork.SaveUpdateState(uow, state) + + if child_in_cycles: + child_actions = [] + for child_state, child in sum_: + if child_state not in uow.states: + child_action = (None, None) + else: + (deleted, listonly) = uow.states[child_state] + if deleted: + child_action = ( + unitofwork.DeleteState(uow, child_state), + True, + ) + else: + child_action = ( + unitofwork.SaveUpdateState(uow, child_state), + False, + ) + child_actions.append(child_action) + + # establish dependencies between our possibly per-state + # parent action and our possibly per-state child action. + for child_action, childisdelete in child_actions: + self.per_state_dependencies( + uow, + parent_saves, + parent_deletes, + child_action, + after_save, + before_delete, + isdelete, + childisdelete, + ) + + def presort_deletes(self, uowcommit, states): + return False + + def presort_saves(self, uowcommit, states): + return False + + def process_deletes(self, uowcommit, states): + pass + + def process_saves(self, uowcommit, states): + pass + + def prop_has_changes(self, uowcommit, states, isdelete): + if not isdelete or self.passive_deletes: + passive = ( + attributes.PASSIVE_NO_INITIALIZE + | attributes.INCLUDE_PENDING_MUTATIONS + ) + elif self.direction is MANYTOONE: + # here, we were hoping to optimize having to fetch many-to-one + # for history and ignore it, if there's no further cascades + # to take place. however there are too many less common conditions + # that still take place and tests in test_relationships / + # test_cascade etc. will still fail. + passive = attributes.PASSIVE_NO_FETCH_RELATED + else: + passive = ( + attributes.PASSIVE_OFF | attributes.INCLUDE_PENDING_MUTATIONS + ) + + for s in states: + # TODO: add a high speed method + # to InstanceState which returns: attribute + # has a non-None value, or had one + history = uowcommit.get_attribute_history(s, self.key, passive) + if history and not history.empty(): + return True + else: + return ( + states + and not self.prop._is_self_referential + and self.mapper in uowcommit.mappers + ) + + def _verify_canload(self, state): + if self.prop.uselist and state is None: + raise exc.FlushError( + "Can't flush None value found in " + "collection %s" % (self.prop,) + ) + elif state is not None and not self.mapper._canload( + state, allow_subtypes=not self.enable_typechecks + ): + if self.mapper._canload(state, allow_subtypes=True): + raise exc.FlushError( + "Attempting to flush an item of type " + "%(x)s as a member of collection " + '"%(y)s". Expected an object of type ' + "%(z)s or a polymorphic subclass of " + "this type. If %(x)s is a subclass of " + '%(z)s, configure mapper "%(zm)s" to ' + "load this subtype polymorphically, or " + "set enable_typechecks=False to allow " + "any subtype to be accepted for flush. " + % { + "x": state.class_, + "y": self.prop, + "z": self.mapper.class_, + "zm": self.mapper, + } + ) + else: + raise exc.FlushError( + "Attempting to flush an item of type " + "%(x)s as a member of collection " + '"%(y)s". Expected an object of type ' + "%(z)s or a polymorphic subclass of " + "this type." + % { + "x": state.class_, + "y": self.prop, + "z": self.mapper.class_, + } + ) + + def _synchronize(self, state, child, associationrow, clearkeys, uowcommit): + raise NotImplementedError() + + def _get_reversed_processed_set(self, uow): + if not self.prop._reverse_property: + return None + + process_key = tuple( + sorted([self.key] + [p.key for p in self.prop._reverse_property]) + ) + return uow.memo(("reverse_key", process_key), set) + + def _post_update(self, state, uowcommit, related, is_m2o_delete=False): + for x in related: + if not is_m2o_delete or x is not None: + uowcommit.register_post_update( + state, [r for l, r in self.prop.synchronize_pairs] + ) + break + + def _pks_changed(self, uowcommit, state): + raise NotImplementedError() + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, self.prop) + + +class OneToManyDP(DependencyProcessor): + def per_property_dependencies( + self, + uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete, + ): + if self.post_update: + child_post_updates = unitofwork.PostUpdateAll( + uow, self.mapper.primary_base_mapper, False + ) + child_pre_updates = unitofwork.PostUpdateAll( + uow, self.mapper.primary_base_mapper, True + ) + + uow.dependencies.update( + [ + (child_saves, after_save), + (parent_saves, after_save), + (after_save, child_post_updates), + (before_delete, child_pre_updates), + (child_pre_updates, parent_deletes), + (child_pre_updates, child_deletes), + ] + ) + else: + uow.dependencies.update( + [ + (parent_saves, after_save), + (after_save, child_saves), + (after_save, child_deletes), + (child_saves, parent_deletes), + (child_deletes, parent_deletes), + (before_delete, child_saves), + (before_delete, child_deletes), + ] + ) + + def per_state_dependencies( + self, + uow, + save_parent, + delete_parent, + child_action, + after_save, + before_delete, + isdelete, + childisdelete, + ): + if self.post_update: + child_post_updates = unitofwork.PostUpdateAll( + uow, self.mapper.primary_base_mapper, False + ) + child_pre_updates = unitofwork.PostUpdateAll( + uow, self.mapper.primary_base_mapper, True + ) + + # TODO: this whole block is not covered + # by any tests + if not isdelete: + if childisdelete: + uow.dependencies.update( + [ + (child_action, after_save), + (after_save, child_post_updates), + ] + ) + else: + uow.dependencies.update( + [ + (save_parent, after_save), + (child_action, after_save), + (after_save, child_post_updates), + ] + ) + else: + if childisdelete: + uow.dependencies.update( + [ + (before_delete, child_pre_updates), + (child_pre_updates, delete_parent), + ] + ) + else: + uow.dependencies.update( + [ + (before_delete, child_pre_updates), + (child_pre_updates, delete_parent), + ] + ) + elif not isdelete: + uow.dependencies.update( + [ + (save_parent, after_save), + (after_save, child_action), + (save_parent, child_action), + ] + ) + else: + uow.dependencies.update( + [(before_delete, child_action), (child_action, delete_parent)] + ) + + def presort_deletes(self, uowcommit, states): + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their + # foreign key to the parent set to NULL + should_null_fks = ( + not self.cascade.delete and not self.passive_deletes == "all" + ) + + for state in states: + history = uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + if history: + for child in history.deleted: + if child is not None and self.hasparent(child) is False: + if self.cascade.delete_orphan: + uowcommit.register_object(child, isdelete=True) + else: + uowcommit.register_object(child) + + if should_null_fks: + for child in history.unchanged: + if child is not None: + uowcommit.register_object( + child, operation="delete", prop=self.prop + ) + + def presort_saves(self, uowcommit, states): + children_added = uowcommit.memo(("children_added", self), set) + + should_null_fks = ( + not self.cascade.delete_orphan + and not self.passive_deletes == "all" + ) + + for state in states: + pks_changed = self._pks_changed(uowcommit, state) + + if not pks_changed or self.passive_updates: + passive = ( + attributes.PASSIVE_NO_INITIALIZE + | attributes.INCLUDE_PENDING_MUTATIONS + ) + else: + passive = ( + attributes.PASSIVE_OFF + | attributes.INCLUDE_PENDING_MUTATIONS + ) + + history = uowcommit.get_attribute_history(state, self.key, passive) + if history: + for child in history.added: + if child is not None: + uowcommit.register_object( + child, + cancel_delete=True, + operation="add", + prop=self.prop, + ) + + children_added.update(history.added) + + for child in history.deleted: + if not self.cascade.delete_orphan: + if should_null_fks: + uowcommit.register_object( + child, + isdelete=False, + operation="delete", + prop=self.prop, + ) + elif self.hasparent(child) is False: + uowcommit.register_object( + child, + isdelete=True, + operation="delete", + prop=self.prop, + ) + for c, m, st_, dct_ in self.mapper.cascade_iterator( + "delete", child + ): + uowcommit.register_object(st_, isdelete=True) + + if pks_changed: + if history: + for child in history.unchanged: + if child is not None: + uowcommit.register_object( + child, + False, + self.passive_updates, + operation="pk change", + prop=self.prop, + ) + + def process_deletes(self, uowcommit, states): + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their foreign + # key to the parent set to NULL this phase can be called + # safely for any cascade but is unnecessary if delete cascade + # is on. + + if self.post_update or not self.passive_deletes == "all": + children_added = uowcommit.memo(("children_added", self), set) + + for state in states: + history = uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + if history: + for child in history.deleted: + if ( + child is not None + and self.hasparent(child) is False + ): + self._synchronize( + state, child, None, True, uowcommit, False + ) + if self.post_update and child: + self._post_update(child, uowcommit, [state]) + + if self.post_update or not self.cascade.delete: + for child in set(history.unchanged).difference( + children_added + ): + if child is not None: + self._synchronize( + state, child, None, True, uowcommit, False + ) + if self.post_update and child: + self._post_update( + child, uowcommit, [state] + ) + + # technically, we can even remove each child from the + # collection here too. but this would be a somewhat + # inconsistent behavior since it wouldn't happen + # if the old parent wasn't deleted but child was moved. + + def process_saves(self, uowcommit, states): + should_null_fks = ( + not self.cascade.delete_orphan + and not self.passive_deletes == "all" + ) + + for state in states: + history = uowcommit.get_attribute_history( + state, self.key, attributes.PASSIVE_NO_INITIALIZE + ) + if history: + for child in history.added: + self._synchronize( + state, child, None, False, uowcommit, False + ) + if child is not None and self.post_update: + self._post_update(child, uowcommit, [state]) + + for child in history.deleted: + if ( + should_null_fks + and not self.cascade.delete_orphan + and not self.hasparent(child) + ): + self._synchronize( + state, child, None, True, uowcommit, False + ) + + if self._pks_changed(uowcommit, state): + for child in history.unchanged: + self._synchronize( + state, child, None, False, uowcommit, True + ) + + def _synchronize( + self, state, child, associationrow, clearkeys, uowcommit, pks_changed + ): + source = state + dest = child + self._verify_canload(child) + if dest is None or ( + not self.post_update and uowcommit.is_deleted(dest) + ): + return + if clearkeys: + sync.clear(dest, self.mapper, self.prop.synchronize_pairs) + else: + sync.populate( + source, + self.parent, + dest, + self.mapper, + self.prop.synchronize_pairs, + uowcommit, + self.passive_updates and pks_changed, + ) + + def _pks_changed(self, uowcommit, state): + return sync.source_modified( + uowcommit, state, self.parent, self.prop.synchronize_pairs + ) + + +class ManyToOneDP(DependencyProcessor): + def __init__(self, prop): + DependencyProcessor.__init__(self, prop) + for mapper in self.mapper.self_and_descendants: + mapper._dependency_processors.append(DetectKeySwitch(prop)) + + def per_property_dependencies( + self, + uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete, + ): + if self.post_update: + parent_post_updates = unitofwork.PostUpdateAll( + uow, self.parent.primary_base_mapper, False + ) + parent_pre_updates = unitofwork.PostUpdateAll( + uow, self.parent.primary_base_mapper, True + ) + + uow.dependencies.update( + [ + (child_saves, after_save), + (parent_saves, after_save), + (after_save, parent_post_updates), + (after_save, parent_pre_updates), + (before_delete, parent_pre_updates), + (parent_pre_updates, child_deletes), + (parent_pre_updates, parent_deletes), + ] + ) + else: + uow.dependencies.update( + [ + (child_saves, after_save), + (after_save, parent_saves), + (parent_saves, child_deletes), + (parent_deletes, child_deletes), + ] + ) + + def per_state_dependencies( + self, + uow, + save_parent, + delete_parent, + child_action, + after_save, + before_delete, + isdelete, + childisdelete, + ): + if self.post_update: + if not isdelete: + parent_post_updates = unitofwork.PostUpdateAll( + uow, self.parent.primary_base_mapper, False + ) + if childisdelete: + uow.dependencies.update( + [ + (after_save, parent_post_updates), + (parent_post_updates, child_action), + ] + ) + else: + uow.dependencies.update( + [ + (save_parent, after_save), + (child_action, after_save), + (after_save, parent_post_updates), + ] + ) + else: + parent_pre_updates = unitofwork.PostUpdateAll( + uow, self.parent.primary_base_mapper, True + ) + + uow.dependencies.update( + [ + (before_delete, parent_pre_updates), + (parent_pre_updates, delete_parent), + (parent_pre_updates, child_action), + ] + ) + + elif not isdelete: + if not childisdelete: + uow.dependencies.update( + [(child_action, after_save), (after_save, save_parent)] + ) + else: + uow.dependencies.update([(after_save, save_parent)]) + + else: + if childisdelete: + uow.dependencies.update([(delete_parent, child_action)]) + + def presort_deletes(self, uowcommit, states): + if self.cascade.delete or self.cascade.delete_orphan: + for state in states: + history = uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + if history: + if self.cascade.delete_orphan: + todelete = history.sum() + else: + todelete = history.non_deleted() + for child in todelete: + if child is None: + continue + uowcommit.register_object( + child, + isdelete=True, + operation="delete", + prop=self.prop, + ) + t = self.mapper.cascade_iterator("delete", child) + for c, m, st_, dct_ in t: + uowcommit.register_object(st_, isdelete=True) + + def presort_saves(self, uowcommit, states): + for state in states: + uowcommit.register_object(state, operation="add", prop=self.prop) + if self.cascade.delete_orphan: + history = uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + if history: + for child in history.deleted: + if self.hasparent(child) is False: + uowcommit.register_object( + child, + isdelete=True, + operation="delete", + prop=self.prop, + ) + + t = self.mapper.cascade_iterator("delete", child) + for c, m, st_, dct_ in t: + uowcommit.register_object(st_, isdelete=True) + + def process_deletes(self, uowcommit, states): + if ( + self.post_update + and not self.cascade.delete_orphan + and not self.passive_deletes == "all" + ): + # post_update means we have to update our + # row to not reference the child object + # before we can DELETE the row + for state in states: + self._synchronize(state, None, None, True, uowcommit) + if state and self.post_update: + history = uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + if history: + self._post_update( + state, uowcommit, history.sum(), is_m2o_delete=True + ) + + def process_saves(self, uowcommit, states): + for state in states: + history = uowcommit.get_attribute_history( + state, self.key, attributes.PASSIVE_NO_INITIALIZE + ) + if history: + if history.added: + for child in history.added: + self._synchronize( + state, child, None, False, uowcommit, "add" + ) + elif history.deleted: + self._synchronize( + state, None, None, True, uowcommit, "delete" + ) + if self.post_update: + self._post_update(state, uowcommit, history.sum()) + + def _synchronize( + self, + state, + child, + associationrow, + clearkeys, + uowcommit, + operation=None, + ): + if state is None or ( + not self.post_update and uowcommit.is_deleted(state) + ): + return + + if ( + operation is not None + and child is not None + and not uowcommit.session._contains_state(child) + ): + util.warn( + "Object of type %s not in session, %s " + "operation along '%s' won't proceed" + % (mapperutil.state_class_str(child), operation, self.prop) + ) + return + + if clearkeys or child is None: + sync.clear(state, self.parent, self.prop.synchronize_pairs) + else: + self._verify_canload(child) + sync.populate( + child, + self.mapper, + state, + self.parent, + self.prop.synchronize_pairs, + uowcommit, + False, + ) + + +class DetectKeySwitch(DependencyProcessor): + """For many-to-one relationships with no one-to-many backref, + searches for parents through the unit of work when a primary + key has changed and updates them. + + Theoretically, this approach could be expanded to support transparent + deletion of objects referenced via many-to-one as well, although + the current attribute system doesn't do enough bookkeeping for this + to be efficient. + + """ + + def per_property_preprocessors(self, uow): + if self.prop._reverse_property: + if self.passive_updates: + return + else: + if False in ( + prop.passive_updates + for prop in self.prop._reverse_property + ): + return + + uow.register_preprocessor(self, False) + + def per_property_flush_actions(self, uow): + parent_saves = unitofwork.SaveUpdateAll(uow, self.parent.base_mapper) + after_save = unitofwork.ProcessAll(uow, self, False, False) + uow.dependencies.update([(parent_saves, after_save)]) + + def per_state_flush_actions(self, uow, states, isdelete): + pass + + def presort_deletes(self, uowcommit, states): + pass + + def presort_saves(self, uow, states): + if not self.passive_updates: + # for non-passive updates, register in the preprocess stage + # so that mapper save_obj() gets a hold of changes + self._process_key_switches(states, uow) + + def prop_has_changes(self, uow, states, isdelete): + if not isdelete and self.passive_updates: + d = self._key_switchers(uow, states) + return bool(d) + + return False + + def process_deletes(self, uowcommit, states): + assert False + + def process_saves(self, uowcommit, states): + # for passive updates, register objects in the process stage + # so that we avoid ManyToOneDP's registering the object without + # the listonly flag in its own preprocess stage (results in UPDATE) + # statements being emitted + assert self.passive_updates + self._process_key_switches(states, uowcommit) + + def _key_switchers(self, uow, states): + switched, notswitched = uow.memo( + ("pk_switchers", self), lambda: (set(), set()) + ) + + allstates = switched.union(notswitched) + for s in states: + if s not in allstates: + if self._pks_changed(uow, s): + switched.add(s) + else: + notswitched.add(s) + return switched + + def _process_key_switches(self, deplist, uowcommit): + switchers = self._key_switchers(uowcommit, deplist) + if switchers: + # if primary key values have actually changed somewhere, perform + # a linear search through the UOW in search of a parent. + for state in uowcommit.session.identity_map.all_states(): + if not issubclass(state.class_, self.parent.class_): + continue + dict_ = state.dict + related = state.get_impl(self.key).get( + state, dict_, passive=self._passive_update_flag + ) + if ( + related is not attributes.PASSIVE_NO_RESULT + and related is not None + ): + if self.prop.uselist: + if not related: + continue + related_obj = related[0] + else: + related_obj = related + related_state = attributes.instance_state(related_obj) + if related_state in switchers: + uowcommit.register_object( + state, False, self.passive_updates + ) + sync.populate( + related_state, + self.mapper, + state, + self.parent, + self.prop.synchronize_pairs, + uowcommit, + self.passive_updates, + ) + + def _pks_changed(self, uowcommit, state): + return bool(state.key) and sync.source_modified( + uowcommit, state, self.mapper, self.prop.synchronize_pairs + ) + + +class ManyToManyDP(DependencyProcessor): + def per_property_dependencies( + self, + uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete, + ): + uow.dependencies.update( + [ + (parent_saves, after_save), + (child_saves, after_save), + (after_save, child_deletes), + # a rowswitch on the parent from deleted to saved + # can make this one occur, as the "save" may remove + # an element from the + # "deleted" list before we have a chance to + # process its child rows + (before_delete, parent_saves), + (before_delete, parent_deletes), + (before_delete, child_deletes), + (before_delete, child_saves), + ] + ) + + def per_state_dependencies( + self, + uow, + save_parent, + delete_parent, + child_action, + after_save, + before_delete, + isdelete, + childisdelete, + ): + if not isdelete: + if childisdelete: + uow.dependencies.update( + [(save_parent, after_save), (after_save, child_action)] + ) + else: + uow.dependencies.update( + [(save_parent, after_save), (child_action, after_save)] + ) + else: + uow.dependencies.update( + [(before_delete, child_action), (before_delete, delete_parent)] + ) + + def presort_deletes(self, uowcommit, states): + # TODO: no tests fail if this whole + # thing is removed !!!! + if not self.passive_deletes: + # if no passive deletes, load history on + # the collection, so that prop_has_changes() + # returns True + for state in states: + uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + + def presort_saves(self, uowcommit, states): + if not self.passive_updates: + # if no passive updates, load history on + # each collection where parent has changed PK, + # so that prop_has_changes() returns True + for state in states: + if self._pks_changed(uowcommit, state): + history = uowcommit.get_attribute_history( + state, self.key, attributes.PASSIVE_OFF + ) + + if not self.cascade.delete_orphan: + return + + # check for child items removed from the collection + # if delete_orphan check is turned on. + for state in states: + history = uowcommit.get_attribute_history( + state, self.key, attributes.PASSIVE_NO_INITIALIZE + ) + if history: + for child in history.deleted: + if self.hasparent(child) is False: + uowcommit.register_object( + child, + isdelete=True, + operation="delete", + prop=self.prop, + ) + for c, m, st_, dct_ in self.mapper.cascade_iterator( + "delete", child + ): + uowcommit.register_object(st_, isdelete=True) + + def process_deletes(self, uowcommit, states): + secondary_delete = [] + secondary_insert = [] + secondary_update = [] + + processed = self._get_reversed_processed_set(uowcommit) + tmp = set() + for state in states: + # this history should be cached already, as + # we loaded it in preprocess_deletes + history = uowcommit.get_attribute_history( + state, self.key, self._passive_delete_flag + ) + if history: + for child in history.non_added(): + if child is None or ( + processed is not None and (state, child) in processed + ): + continue + associationrow = {} + if not self._synchronize( + state, + child, + associationrow, + False, + uowcommit, + "delete", + ): + continue + secondary_delete.append(associationrow) + + tmp.update((c, state) for c in history.non_added()) + + if processed is not None: + processed.update(tmp) + + self._run_crud( + uowcommit, secondary_insert, secondary_update, secondary_delete + ) + + def process_saves(self, uowcommit, states): + secondary_delete = [] + secondary_insert = [] + secondary_update = [] + + processed = self._get_reversed_processed_set(uowcommit) + tmp = set() + + for state in states: + need_cascade_pks = not self.passive_updates and self._pks_changed( + uowcommit, state + ) + if need_cascade_pks: + passive = ( + attributes.PASSIVE_OFF + | attributes.INCLUDE_PENDING_MUTATIONS + ) + else: + passive = ( + attributes.PASSIVE_NO_INITIALIZE + | attributes.INCLUDE_PENDING_MUTATIONS + ) + history = uowcommit.get_attribute_history(state, self.key, passive) + if history: + for child in history.added: + if processed is not None and (state, child) in processed: + continue + associationrow = {} + if not self._synchronize( + state, child, associationrow, False, uowcommit, "add" + ): + continue + secondary_insert.append(associationrow) + for child in history.deleted: + if processed is not None and (state, child) in processed: + continue + associationrow = {} + if not self._synchronize( + state, + child, + associationrow, + False, + uowcommit, + "delete", + ): + continue + secondary_delete.append(associationrow) + + tmp.update((c, state) for c in history.added + history.deleted) + + if need_cascade_pks: + for child in history.unchanged: + associationrow = {} + sync.update( + state, + self.parent, + associationrow, + "old_", + self.prop.synchronize_pairs, + ) + sync.update( + child, + self.mapper, + associationrow, + "old_", + self.prop.secondary_synchronize_pairs, + ) + + secondary_update.append(associationrow) + + if processed is not None: + processed.update(tmp) + + self._run_crud( + uowcommit, secondary_insert, secondary_update, secondary_delete + ) + + def _run_crud( + self, uowcommit, secondary_insert, secondary_update, secondary_delete + ): + connection = uowcommit.transaction.connection(self.mapper) + + if secondary_delete: + associationrow = secondary_delete[0] + statement = self.secondary.delete().where( + sql.and_( + *[ + c == sql.bindparam(c.key, type_=c.type) + for c in self.secondary.c + if c.key in associationrow + ] + ) + ) + result = connection.execute(statement, secondary_delete) + + if ( + result.supports_sane_multi_rowcount() + ) and result.rowcount != len(secondary_delete): + raise exc.StaleDataError( + "DELETE statement on table '%s' expected to delete " + "%d row(s); Only %d were matched." + % ( + self.secondary.description, + len(secondary_delete), + result.rowcount, + ) + ) + + if secondary_update: + associationrow = secondary_update[0] + statement = self.secondary.update().where( + sql.and_( + *[ + c == sql.bindparam("old_" + c.key, type_=c.type) + for c in self.secondary.c + if c.key in associationrow + ] + ) + ) + result = connection.execute(statement, secondary_update) + + if ( + result.supports_sane_multi_rowcount() + ) and result.rowcount != len(secondary_update): + raise exc.StaleDataError( + "UPDATE statement on table '%s' expected to update " + "%d row(s); Only %d were matched." + % ( + self.secondary.description, + len(secondary_update), + result.rowcount, + ) + ) + + if secondary_insert: + statement = self.secondary.insert() + connection.execute(statement, secondary_insert) + + def _synchronize( + self, state, child, associationrow, clearkeys, uowcommit, operation + ): + # this checks for None if uselist=True + self._verify_canload(child) + + # but if uselist=False we get here. If child is None, + # no association row can be generated, so return. + if child is None: + return False + + if child is not None and not uowcommit.session._contains_state(child): + if not child.deleted: + util.warn( + "Object of type %s not in session, %s " + "operation along '%s' won't proceed" + % (mapperutil.state_class_str(child), operation, self.prop) + ) + return False + + sync.populate_dict( + state, self.parent, associationrow, self.prop.synchronize_pairs + ) + sync.populate_dict( + child, + self.mapper, + associationrow, + self.prop.secondary_synchronize_pairs, + ) + + return True + + def _pks_changed(self, uowcommit, state): + return sync.source_modified( + uowcommit, state, self.parent, self.prop.synchronize_pairs + ) + + +_direction_to_processor = { + ONETOMANY: OneToManyDP, + MANYTOONE: ManyToOneDP, + MANYTOMANY: ManyToManyDP, +} diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py new file mode 100644 index 00000000..faf287cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py @@ -0,0 +1,1076 @@ +# orm/descriptor_props.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Descriptor properties are more "auxiliary" properties +that exist as configurational elements, but don't participate +as actively in the load/persist ORM loop. + +""" +from __future__ import annotations + +from dataclasses import is_dataclass +import inspect +import itertools +import operator +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import util as orm_util +from .base import _DeclarativeMapped +from .base import LoaderCallableStatus +from .base import Mapped +from .base import PassiveFlag +from .base import SQLORMOperations +from .interfaces import _AttributeOptions +from .interfaces import _IntrospectsAnnotations +from .interfaces import _MapsColumns +from .interfaces import MapperProperty +from .interfaces import PropComparator +from .util import _none_set +from .util import de_stringify_annotation +from .. import event +from .. import exc as sa_exc +from .. import schema +from .. import sql +from .. import util +from ..sql import expression +from ..sql import operators +from ..sql.elements import BindParameter +from ..util.typing import is_fwd_ref +from ..util.typing import is_pep593 +from ..util.typing import typing_get_args + +if typing.TYPE_CHECKING: + from ._typing import _InstanceDict + from ._typing import _RegistryType + from .attributes import History + from .attributes import InstrumentedAttribute + from .attributes import QueryableAttribute + from .context import ORMCompileState + from .decl_base import _ClassScanMapperConfig + from .mapper import Mapper + from .properties import ColumnProperty + from .properties import MappedColumn + from .state import InstanceState + from ..engine.base import Connection + from ..engine.row import Row + from ..sql._typing import _DMLColumnArgument + from ..sql._typing import _InfoType + from ..sql.elements import ClauseList + from ..sql.elements import ColumnElement + from ..sql.operators import OperatorType + from ..sql.schema import Column + from ..sql.selectable import Select + from ..util.typing import _AnnotationScanType + from ..util.typing import CallableReference + from ..util.typing import DescriptorReference + from ..util.typing import RODescriptorReference + +_T = TypeVar("_T", bound=Any) +_PT = TypeVar("_PT", bound=Any) + + +class DescriptorProperty(MapperProperty[_T]): + """:class:`.MapperProperty` which proxies access to a + user-defined descriptor.""" + + doc: Optional[str] = None + + uses_objects = False + _links_to_entity = False + + descriptor: DescriptorReference[Any] + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> History: + raise NotImplementedError() + + def instrument_class(self, mapper: Mapper[Any]) -> None: + prop = self + + class _ProxyImpl(attributes.AttributeImpl): + accepts_scalar_loader = False + load_on_unexpire = True + collection = False + + @property + def uses_objects(self) -> bool: # type: ignore + return prop.uses_objects + + def __init__(self, key: str): + self.key = key + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> History: + return prop.get_history(state, dict_, passive) + + if self.descriptor is None: + desc = getattr(mapper.class_, self.key, None) + if mapper._is_userland_descriptor(self.key, desc): + self.descriptor = desc + + if self.descriptor is None: + + def fset(obj: Any, value: Any) -> None: + setattr(obj, self.name, value) + + def fdel(obj: Any) -> None: + delattr(obj, self.name) + + def fget(obj: Any) -> Any: + return getattr(obj, self.name) + + self.descriptor = property(fget=fget, fset=fset, fdel=fdel) + + proxy_attr = attributes.create_proxied_attribute(self.descriptor)( + self.parent.class_, + self.key, + self.descriptor, + lambda: self._comparator_factory(mapper), + doc=self.doc, + original_property=self, + ) + proxy_attr.impl = _ProxyImpl(self.key) + mapper.class_manager.instrument_attribute(self.key, proxy_attr) + + +_CompositeAttrType = Union[ + str, + "Column[_T]", + "MappedColumn[_T]", + "InstrumentedAttribute[_T]", + "Mapped[_T]", +] + + +_CC = TypeVar("_CC", bound=Any) + + +_composite_getters: weakref.WeakKeyDictionary[ + Type[Any], Callable[[Any], Tuple[Any, ...]] +] = weakref.WeakKeyDictionary() + + +class CompositeProperty( + _MapsColumns[_CC], _IntrospectsAnnotations, DescriptorProperty[_CC] +): + """Defines a "composite" mapped attribute, representing a collection + of columns as one attribute. + + :class:`.CompositeProperty` is constructed using the :func:`.composite` + function. + + .. seealso:: + + :ref:`mapper_composite` + + """ + + composite_class: Union[Type[_CC], Callable[..., _CC]] + attrs: Tuple[_CompositeAttrType[Any], ...] + + _generated_composite_accessor: CallableReference[ + Optional[Callable[[_CC], Tuple[Any, ...]]] + ] + + comparator_factory: Type[Comparator[_CC]] + + def __init__( + self, + _class_or_attr: Union[ + None, Type[_CC], Callable[..., _CC], _CompositeAttrType[Any] + ] = None, + *attrs: _CompositeAttrType[Any], + attribute_options: Optional[_AttributeOptions] = None, + active_history: bool = False, + deferred: bool = False, + group: Optional[str] = None, + comparator_factory: Optional[Type[Comparator[_CC]]] = None, + info: Optional[_InfoType] = None, + **kwargs: Any, + ): + super().__init__(attribute_options=attribute_options) + + if isinstance(_class_or_attr, (Mapped, str, sql.ColumnElement)): + self.attrs = (_class_or_attr,) + attrs + # will initialize within declarative_scan + self.composite_class = None # type: ignore + else: + self.composite_class = _class_or_attr # type: ignore + self.attrs = attrs + + self.active_history = active_history + self.deferred = deferred + self.group = group + self.comparator_factory = ( + comparator_factory + if comparator_factory is not None + else self.__class__.Comparator + ) + self._generated_composite_accessor = None + if info is not None: + self.info.update(info) + + util.set_creation_order(self) + self._create_descriptor() + self._init_accessor() + + def instrument_class(self, mapper: Mapper[Any]) -> None: + super().instrument_class(mapper) + self._setup_event_handlers() + + def _composite_values_from_instance(self, value: _CC) -> Tuple[Any, ...]: + if self._generated_composite_accessor: + return self._generated_composite_accessor(value) + else: + try: + accessor = value.__composite_values__ + except AttributeError as ae: + raise sa_exc.InvalidRequestError( + f"Composite class {self.composite_class.__name__} is not " + f"a dataclass and does not define a __composite_values__()" + " method; can't get state" + ) from ae + else: + return accessor() # type: ignore + + def do_init(self) -> None: + """Initialization which occurs after the :class:`.Composite` + has been associated with its parent mapper. + + """ + self._setup_arguments_on_columns() + + _COMPOSITE_FGET = object() + + def _create_descriptor(self) -> None: + """Create the Python descriptor that will serve as + the access point on instances of the mapped class. + + """ + + def fget(instance: Any) -> Any: + dict_ = attributes.instance_dict(instance) + state = attributes.instance_state(instance) + + if self.key not in dict_: + # key not present. Iterate through related + # attributes, retrieve their values. This + # ensures they all load. + values = [ + getattr(instance, key) for key in self._attribute_keys + ] + + # current expected behavior here is that the composite is + # created on access if the object is persistent or if + # col attributes have non-None. This would be better + # if the composite were created unconditionally, + # but that would be a behavioral change. + if self.key not in dict_ and ( + state.key is not None or not _none_set.issuperset(values) + ): + dict_[self.key] = self.composite_class(*values) + state.manager.dispatch.refresh( + state, self._COMPOSITE_FGET, [self.key] + ) + + return dict_.get(self.key, None) + + def fset(instance: Any, value: Any) -> None: + dict_ = attributes.instance_dict(instance) + state = attributes.instance_state(instance) + attr = state.manager[self.key] + + if attr.dispatch._active_history: + previous = fget(instance) + else: + previous = dict_.get(self.key, LoaderCallableStatus.NO_VALUE) + + for fn in attr.dispatch.set: + value = fn(state, value, previous, attr.impl) + dict_[self.key] = value + if value is None: + for key in self._attribute_keys: + setattr(instance, key, None) + else: + for key, value in zip( + self._attribute_keys, + self._composite_values_from_instance(value), + ): + setattr(instance, key, value) + + def fdel(instance: Any) -> None: + state = attributes.instance_state(instance) + dict_ = attributes.instance_dict(instance) + attr = state.manager[self.key] + + if attr.dispatch._active_history: + previous = fget(instance) + dict_.pop(self.key, None) + else: + previous = dict_.pop(self.key, LoaderCallableStatus.NO_VALUE) + + attr = state.manager[self.key] + attr.dispatch.remove(state, previous, attr.impl) + for key in self._attribute_keys: + setattr(instance, key, None) + + self.descriptor = property(fget, fset, fdel) + + @util.preload_module("sqlalchemy.orm.properties") + def declarative_scan( + self, + decl_scan: _ClassScanMapperConfig, + registry: _RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + mapped_container: Optional[Type[Mapped[Any]]], + annotation: Optional[_AnnotationScanType], + extracted_mapped_annotation: Optional[_AnnotationScanType], + is_dataclass_field: bool, + ) -> None: + MappedColumn = util.preloaded.orm_properties.MappedColumn + if ( + self.composite_class is None + and extracted_mapped_annotation is None + ): + self._raise_for_required(key, cls) + argument = extracted_mapped_annotation + + if is_pep593(argument): + argument = typing_get_args(argument)[0] + + if argument and self.composite_class is None: + if isinstance(argument, str) or is_fwd_ref( + argument, check_generic=True + ): + if originating_module is None: + str_arg = ( + argument.__forward_arg__ + if hasattr(argument, "__forward_arg__") + else str(argument) + ) + raise sa_exc.ArgumentError( + f"Can't use forward ref {argument} for composite " + f"class argument; set up the type as Mapped[{str_arg}]" + ) + argument = de_stringify_annotation( + cls, argument, originating_module, include_generic=True + ) + + self.composite_class = argument + + if is_dataclass(self.composite_class): + self._setup_for_dataclass(registry, cls, originating_module, key) + else: + for attr in self.attrs: + if ( + isinstance(attr, (MappedColumn, schema.Column)) + and attr.name is None + ): + raise sa_exc.ArgumentError( + "Composite class column arguments must be named " + "unless a dataclass is used" + ) + self._init_accessor() + + def _init_accessor(self) -> None: + if is_dataclass(self.composite_class) and not hasattr( + self.composite_class, "__composite_values__" + ): + insp = inspect.signature(self.composite_class) + getter = operator.attrgetter( + *[p.name for p in insp.parameters.values()] + ) + if len(insp.parameters) == 1: + self._generated_composite_accessor = lambda obj: (getter(obj),) + else: + self._generated_composite_accessor = getter + + if ( + self.composite_class is not None + and isinstance(self.composite_class, type) + and self.composite_class not in _composite_getters + ): + if self._generated_composite_accessor is not None: + _composite_getters[self.composite_class] = ( + self._generated_composite_accessor + ) + elif hasattr(self.composite_class, "__composite_values__"): + _composite_getters[self.composite_class] = ( + lambda obj: obj.__composite_values__() + ) + + @util.preload_module("sqlalchemy.orm.properties") + @util.preload_module("sqlalchemy.orm.decl_base") + def _setup_for_dataclass( + self, + registry: _RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + ) -> None: + MappedColumn = util.preloaded.orm_properties.MappedColumn + + decl_base = util.preloaded.orm_decl_base + + insp = inspect.signature(self.composite_class) + for param, attr in itertools.zip_longest( + insp.parameters.values(), self.attrs + ): + if param is None: + raise sa_exc.ArgumentError( + f"number of composite attributes " + f"{len(self.attrs)} exceeds " + f"that of the number of attributes in class " + f"{self.composite_class.__name__} {len(insp.parameters)}" + ) + if attr is None: + # fill in missing attr spots with empty MappedColumn + attr = MappedColumn() + self.attrs += (attr,) + + if isinstance(attr, MappedColumn): + attr.declarative_scan_for_composite( + registry, + cls, + originating_module, + key, + param.name, + param.annotation, + ) + elif isinstance(attr, schema.Column): + decl_base._undefer_column_name(param.name, attr) + + @util.memoized_property + def _comparable_elements(self) -> Sequence[QueryableAttribute[Any]]: + return [getattr(self.parent.class_, prop.key) for prop in self.props] + + @util.memoized_property + @util.preload_module("orm.properties") + def props(self) -> Sequence[MapperProperty[Any]]: + props = [] + MappedColumn = util.preloaded.orm_properties.MappedColumn + + for attr in self.attrs: + if isinstance(attr, str): + prop = self.parent.get_property(attr, _configure_mappers=False) + elif isinstance(attr, schema.Column): + prop = self.parent._columntoproperty[attr] + elif isinstance(attr, MappedColumn): + prop = self.parent._columntoproperty[attr.column] + elif isinstance(attr, attributes.InstrumentedAttribute): + prop = attr.property + else: + prop = None + + if not isinstance(prop, MapperProperty): + raise sa_exc.ArgumentError( + "Composite expects Column objects or mapped " + f"attributes/attribute names as arguments, got: {attr!r}" + ) + + props.append(prop) + return props + + @util.non_memoized_property + @util.preload_module("orm.properties") + def columns(self) -> Sequence[Column[Any]]: + MappedColumn = util.preloaded.orm_properties.MappedColumn + return [ + a.column if isinstance(a, MappedColumn) else a + for a in self.attrs + if isinstance(a, (schema.Column, MappedColumn)) + ] + + @property + def mapper_property_to_assign(self) -> Optional[MapperProperty[_CC]]: + return self + + @property + def columns_to_assign(self) -> List[Tuple[schema.Column[Any], int]]: + return [(c, 0) for c in self.columns if c.table is None] + + @util.preload_module("orm.properties") + def _setup_arguments_on_columns(self) -> None: + """Propagate configuration arguments made on this composite + to the target columns, for those that apply. + + """ + ColumnProperty = util.preloaded.orm_properties.ColumnProperty + + for prop in self.props: + if not isinstance(prop, ColumnProperty): + continue + else: + cprop = prop + + cprop.active_history = self.active_history + if self.deferred: + cprop.deferred = self.deferred + cprop.strategy_key = (("deferred", True), ("instrument", True)) + cprop.group = self.group + + def _setup_event_handlers(self) -> None: + """Establish events that populate/expire the composite attribute.""" + + def load_handler( + state: InstanceState[Any], context: ORMCompileState + ) -> None: + _load_refresh_handler(state, context, None, is_refresh=False) + + def refresh_handler( + state: InstanceState[Any], + context: ORMCompileState, + to_load: Optional[Sequence[str]], + ) -> None: + # note this corresponds to sqlalchemy.ext.mutable load_attrs() + + if not to_load or ( + {self.key}.union(self._attribute_keys) + ).intersection(to_load): + _load_refresh_handler(state, context, to_load, is_refresh=True) + + def _load_refresh_handler( + state: InstanceState[Any], + context: ORMCompileState, + to_load: Optional[Sequence[str]], + is_refresh: bool, + ) -> None: + dict_ = state.dict + + # if context indicates we are coming from the + # fget() handler, this already set the value; skip the + # handler here. (other handlers like mutablecomposite will still + # want to catch it) + # there's an insufficiency here in that the fget() handler + # really should not be using the refresh event and there should + # be some other event that mutablecomposite can subscribe + # towards for this. + + if ( + not is_refresh or context is self._COMPOSITE_FGET + ) and self.key in dict_: + return + + # if column elements aren't loaded, skip. + # __get__() will initiate a load for those + # columns + for k in self._attribute_keys: + if k not in dict_: + return + + dict_[self.key] = self.composite_class( + *[state.dict[key] for key in self._attribute_keys] + ) + + def expire_handler( + state: InstanceState[Any], keys: Optional[Sequence[str]] + ) -> None: + if keys is None or set(self._attribute_keys).intersection(keys): + state.dict.pop(self.key, None) + + def insert_update_handler( + mapper: Mapper[Any], + connection: Connection, + state: InstanceState[Any], + ) -> None: + """After an insert or update, some columns may be expired due + to server side defaults, or re-populated due to client side + defaults. Pop out the composite value here so that it + recreates. + + """ + + state.dict.pop(self.key, None) + + event.listen( + self.parent, "after_insert", insert_update_handler, raw=True + ) + event.listen( + self.parent, "after_update", insert_update_handler, raw=True + ) + event.listen( + self.parent, "load", load_handler, raw=True, propagate=True + ) + event.listen( + self.parent, "refresh", refresh_handler, raw=True, propagate=True + ) + event.listen( + self.parent, "expire", expire_handler, raw=True, propagate=True + ) + + proxy_attr = self.parent.class_manager[self.key] + proxy_attr.impl.dispatch = proxy_attr.dispatch # type: ignore + proxy_attr.impl.dispatch._active_history = self.active_history + + # TODO: need a deserialize hook here + + @util.memoized_property + def _attribute_keys(self) -> Sequence[str]: + return [prop.key for prop in self.props] + + def _populate_composite_bulk_save_mappings_fn( + self, + ) -> Callable[[Dict[str, Any]], None]: + if self._generated_composite_accessor: + get_values = self._generated_composite_accessor + else: + + def get_values(val: Any) -> Tuple[Any]: + return val.__composite_values__() # type: ignore + + attrs = [prop.key for prop in self.props] + + def populate(dest_dict: Dict[str, Any]) -> None: + dest_dict.update( + { + key: val + for key, val in zip( + attrs, get_values(dest_dict.pop(self.key)) + ) + } + ) + + return populate + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> History: + """Provided for userland code that uses attributes.get_history().""" + + added: List[Any] = [] + deleted: List[Any] = [] + + has_history = False + for prop in self.props: + key = prop.key + hist = state.manager[key].impl.get_history(state, dict_) + if hist.has_changes(): + has_history = True + + non_deleted = hist.non_deleted() + if non_deleted: + added.extend(non_deleted) + else: + added.append(None) + if hist.deleted: + deleted.extend(hist.deleted) + else: + deleted.append(None) + + if has_history: + return attributes.History( + [self.composite_class(*added)], + (), + [self.composite_class(*deleted)], + ) + else: + return attributes.History((), [self.composite_class(*added)], ()) + + def _comparator_factory( + self, mapper: Mapper[Any] + ) -> Composite.Comparator[_CC]: + return self.comparator_factory(self, mapper) + + class CompositeBundle(orm_util.Bundle[_T]): + def __init__( + self, + property_: Composite[_T], + expr: ClauseList, + ): + self.property = property_ + super().__init__(property_.key, *expr) + + def create_row_processor( + self, + query: Select[Any], + procs: Sequence[Callable[[Row[Any]], Any]], + labels: Sequence[str], + ) -> Callable[[Row[Any]], Any]: + def proc(row: Row[Any]) -> Any: + return self.property.composite_class( + *[proc(row) for proc in procs] + ) + + return proc + + class Comparator(PropComparator[_PT]): + """Produce boolean, comparison, and other operators for + :class:`.Composite` attributes. + + See the example in :ref:`composite_operations` for an overview + of usage , as well as the documentation for :class:`.PropComparator`. + + .. seealso:: + + :class:`.PropComparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + # https://github.com/python/mypy/issues/4266 + __hash__ = None # type: ignore + + prop: RODescriptorReference[Composite[_PT]] + + @util.memoized_property + def clauses(self) -> ClauseList: + return expression.ClauseList( + group=False, *self._comparable_elements + ) + + def __clause_element__(self) -> CompositeProperty.CompositeBundle[_PT]: + return self.expression + + @util.memoized_property + def expression(self) -> CompositeProperty.CompositeBundle[_PT]: + clauses = self.clauses._annotate( + { + "parententity": self._parententity, + "parentmapper": self._parententity, + "proxy_key": self.prop.key, + } + ) + return CompositeProperty.CompositeBundle(self.prop, clauses) + + def _bulk_update_tuples( + self, value: Any + ) -> Sequence[Tuple[_DMLColumnArgument, Any]]: + if isinstance(value, BindParameter): + value = value.value + + values: Sequence[Any] + + if value is None: + values = [None for key in self.prop._attribute_keys] + elif isinstance(self.prop.composite_class, type) and isinstance( + value, self.prop.composite_class + ): + values = self.prop._composite_values_from_instance( + value # type: ignore[arg-type] + ) + else: + raise sa_exc.ArgumentError( + "Can't UPDATE composite attribute %s to %r" + % (self.prop, value) + ) + + return list(zip(self._comparable_elements, values)) + + @util.memoized_property + def _comparable_elements(self) -> Sequence[QueryableAttribute[Any]]: + if self._adapt_to_entity: + return [ + getattr(self._adapt_to_entity.entity, prop.key) + for prop in self.prop._comparable_elements + ] + else: + return self.prop._comparable_elements + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + return self._compare(operators.eq, other) + + def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + return self._compare(operators.ne, other) + + def __lt__(self, other: Any) -> ColumnElement[bool]: + return self._compare(operators.lt, other) + + def __gt__(self, other: Any) -> ColumnElement[bool]: + return self._compare(operators.gt, other) + + def __le__(self, other: Any) -> ColumnElement[bool]: + return self._compare(operators.le, other) + + def __ge__(self, other: Any) -> ColumnElement[bool]: + return self._compare(operators.ge, other) + + # what might be interesting would be if we create + # an instance of the composite class itself with + # the columns as data members, then use "hybrid style" comparison + # to create these comparisons. then your Point.__eq__() method could + # be where comparison behavior is defined for SQL also. Likely + # not a good choice for default behavior though, not clear how it would + # work w/ dataclasses, etc. also no demand for any of this anyway. + def _compare( + self, operator: OperatorType, other: Any + ) -> ColumnElement[bool]: + values: Sequence[Any] + if other is None: + values = [None] * len(self.prop._comparable_elements) + else: + values = self.prop._composite_values_from_instance(other) + comparisons = [ + operator(a, b) + for a, b in zip(self.prop._comparable_elements, values) + ] + if self._adapt_to_entity: + assert self.adapter is not None + comparisons = [self.adapter(x) for x in comparisons] + return sql.and_(*comparisons) + + def __str__(self) -> str: + return str(self.parent.class_.__name__) + "." + self.key + + +class Composite(CompositeProperty[_T], _DeclarativeMapped[_T]): + """Declarative-compatible front-end for the :class:`.CompositeProperty` + class. + + Public constructor is the :func:`_orm.composite` function. + + .. versionchanged:: 2.0 Added :class:`_orm.Composite` as a Declarative + compatible subclass of :class:`_orm.CompositeProperty`. + + .. seealso:: + + :ref:`mapper_composite` + + """ + + inherit_cache = True + """:meta private:""" + + +class ConcreteInheritedProperty(DescriptorProperty[_T]): + """A 'do nothing' :class:`.MapperProperty` that disables + an attribute on a concrete subclass that is only present + on the inherited mapper, not the concrete classes' mapper. + + Cases where this occurs include: + + * When the superclass mapper is mapped against a + "polymorphic union", which includes all attributes from + all subclasses. + * When a relationship() is configured on an inherited mapper, + but not on the subclass mapper. Concrete mappers require + that relationship() is configured explicitly on each + subclass. + + """ + + def _comparator_factory( + self, mapper: Mapper[Any] + ) -> Type[PropComparator[_T]]: + comparator_callable = None + + for m in self.parent.iterate_to_root(): + p = m._props[self.key] + if getattr(p, "comparator_factory", None) is not None: + comparator_callable = p.comparator_factory + break + assert comparator_callable is not None + return comparator_callable(p, mapper) # type: ignore + + def __init__(self) -> None: + super().__init__() + + def warn() -> NoReturn: + raise AttributeError( + "Concrete %s does not implement " + "attribute %r at the instance level. Add " + "this property explicitly to %s." + % (self.parent, self.key, self.parent) + ) + + class NoninheritedConcreteProp: + def __set__(s: Any, obj: Any, value: Any) -> NoReturn: + warn() + + def __delete__(s: Any, obj: Any) -> NoReturn: + warn() + + def __get__(s: Any, obj: Any, owner: Any) -> Any: + if obj is None: + return self.descriptor + warn() + + self.descriptor = NoninheritedConcreteProp() + + +class SynonymProperty(DescriptorProperty[_T]): + """Denote an attribute name as a synonym to a mapped property, + in that the attribute will mirror the value and expression behavior + of another attribute. + + :class:`.Synonym` is constructed using the :func:`_orm.synonym` + function. + + .. seealso:: + + :ref:`synonyms` - Overview of synonyms + + """ + + comparator_factory: Optional[Type[PropComparator[_T]]] + + def __init__( + self, + name: str, + map_column: Optional[bool] = None, + descriptor: Optional[Any] = None, + comparator_factory: Optional[Type[PropComparator[_T]]] = None, + attribute_options: Optional[_AttributeOptions] = None, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, + ): + super().__init__(attribute_options=attribute_options) + + self.name = name + self.map_column = map_column + self.descriptor = descriptor + self.comparator_factory = comparator_factory + if doc: + self.doc = doc + elif descriptor and descriptor.__doc__: + self.doc = descriptor.__doc__ + else: + self.doc = None + if info: + self.info.update(info) + + util.set_creation_order(self) + + if not TYPE_CHECKING: + + @property + def uses_objects(self) -> bool: + return getattr(self.parent.class_, self.name).impl.uses_objects + + # TODO: when initialized, check _proxied_object, + # emit a warning if its not a column-based property + + @util.memoized_property + def _proxied_object( + self, + ) -> Union[MapperProperty[_T], SQLORMOperations[_T]]: + attr = getattr(self.parent.class_, self.name) + if not hasattr(attr, "property") or not isinstance( + attr.property, MapperProperty + ): + # attribute is a non-MapperProprerty proxy such as + # hybrid or association proxy + if isinstance(attr, attributes.QueryableAttribute): + return attr.comparator + elif isinstance(attr, SQLORMOperations): + # assocaition proxy comes here + return attr + + raise sa_exc.InvalidRequestError( + """synonym() attribute "%s.%s" only supports """ + """ORM mapped attributes, got %r""" + % (self.parent.class_.__name__, self.name, attr) + ) + return attr.property + + def _comparator_factory(self, mapper: Mapper[Any]) -> SQLORMOperations[_T]: + prop = self._proxied_object + + if isinstance(prop, MapperProperty): + if self.comparator_factory: + comp = self.comparator_factory(prop, mapper) + else: + comp = prop.comparator_factory(prop, mapper) + return comp + else: + return prop + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> History: + attr: QueryableAttribute[Any] = getattr(self.parent.class_, self.name) + return attr.impl.get_history(state, dict_, passive=passive) + + @util.preload_module("sqlalchemy.orm.properties") + def set_parent(self, parent: Mapper[Any], init: bool) -> None: + properties = util.preloaded.orm_properties + + if self.map_column: + # implement the 'map_column' option. + if self.key not in parent.persist_selectable.c: + raise sa_exc.ArgumentError( + "Can't compile synonym '%s': no column on table " + "'%s' named '%s'" + % ( + self.name, + parent.persist_selectable.description, + self.key, + ) + ) + elif ( + parent.persist_selectable.c[self.key] + in parent._columntoproperty + and parent._columntoproperty[ + parent.persist_selectable.c[self.key] + ].key + == self.name + ): + raise sa_exc.ArgumentError( + "Can't call map_column=True for synonym %r=%r, " + "a ColumnProperty already exists keyed to the name " + "%r for column %r" + % (self.key, self.name, self.name, self.key) + ) + p: ColumnProperty[Any] = properties.ColumnProperty( + parent.persist_selectable.c[self.key] + ) + parent._configure_property(self.name, p, init=init, setparent=True) + p._mapped_by_synonym = self.key + + self.parent = parent + + +class Synonym(SynonymProperty[_T], _DeclarativeMapped[_T]): + """Declarative front-end for the :class:`.SynonymProperty` class. + + Public constructor is the :func:`_orm.synonym` function. + + .. versionchanged:: 2.0 Added :class:`_orm.Synonym` as a Declarative + compatible subclass for :class:`_orm.SynonymProperty` + + .. seealso:: + + :ref:`synonyms` - Overview of synonyms + + """ + + inherit_cache = True + """:meta private:""" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py new file mode 100644 index 00000000..ad1b239c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py @@ -0,0 +1,300 @@ +# orm/dynamic.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + + +"""Dynamic collection API. + +Dynamic collections act like Query() objects for read operations and support +basic add/delete mutation. + +.. legacy:: the "dynamic" loader is a legacy feature, superseded by the + "write_only" loader. + + +""" + +from __future__ import annotations + +from typing import Any +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import attributes +from . import exc as orm_exc +from . import relationships +from . import util as orm_util +from .base import PassiveFlag +from .query import Query +from .session import object_session +from .writeonly import AbstractCollectionWriter +from .writeonly import WriteOnlyAttributeImpl +from .writeonly import WriteOnlyHistory +from .writeonly import WriteOnlyLoader +from .. import util +from ..engine import result + + +if TYPE_CHECKING: + from . import QueryableAttribute + from .mapper import Mapper + from .relationships import _RelationshipOrderByArg + from .session import Session + from .state import InstanceState + from .util import AliasedClass + from ..event import _Dispatch + from ..sql.elements import ColumnElement + +_T = TypeVar("_T", bound=Any) + + +class DynamicCollectionHistory(WriteOnlyHistory[_T]): + def __init__( + self, + attr: DynamicAttributeImpl, + state: InstanceState[_T], + passive: PassiveFlag, + apply_to: Optional[DynamicCollectionHistory[_T]] = None, + ) -> None: + if apply_to: + coll = AppenderQuery(attr, state).autoflush(False) + self.unchanged_items = util.OrderedIdentitySet(coll) + self.added_items = apply_to.added_items + self.deleted_items = apply_to.deleted_items + self._reconcile_collection = True + else: + self.deleted_items = util.OrderedIdentitySet() + self.added_items = util.OrderedIdentitySet() + self.unchanged_items = util.OrderedIdentitySet() + self._reconcile_collection = False + + +class DynamicAttributeImpl(WriteOnlyAttributeImpl): + _supports_dynamic_iteration = True + collection_history_cls = DynamicCollectionHistory[Any] + query_class: Type[AppenderMixin[Any]] # type: ignore[assignment] + + def __init__( + self, + class_: Union[Type[Any], AliasedClass[Any]], + key: str, + dispatch: _Dispatch[QueryableAttribute[Any]], + target_mapper: Mapper[_T], + order_by: _RelationshipOrderByArg, + query_class: Optional[Type[AppenderMixin[_T]]] = None, + **kw: Any, + ) -> None: + attributes.AttributeImpl.__init__( + self, class_, key, None, dispatch, **kw + ) + self.target_mapper = target_mapper + if order_by: + self.order_by = tuple(order_by) + if not query_class: + self.query_class = AppenderQuery + elif AppenderMixin in query_class.mro(): + self.query_class = query_class + else: + self.query_class = mixin_user_query(query_class) + + +@relationships.RelationshipProperty.strategy_for(lazy="dynamic") +class DynaLoader(WriteOnlyLoader): + impl_class = DynamicAttributeImpl + + +class AppenderMixin(AbstractCollectionWriter[_T]): + """A mixin that expects to be mixing in a Query class with + AbstractAppender. + + + """ + + query_class: Optional[Type[Query[_T]]] = None + _order_by_clauses: Tuple[ColumnElement[Any], ...] + + def __init__( + self, attr: DynamicAttributeImpl, state: InstanceState[_T] + ) -> None: + Query.__init__( + self, # type: ignore[arg-type] + attr.target_mapper, + None, + ) + super().__init__(attr, state) + + @property + def session(self) -> Optional[Session]: + sess = object_session(self.instance) + if sess is not None and sess.autoflush and self.instance in sess: + sess.flush() + if not orm_util.has_identity(self.instance): + return None + else: + return sess + + @session.setter + def session(self, session: Session) -> None: + self.sess = session + + def _iter(self) -> Union[result.ScalarResult[_T], result.Result[_T]]: + sess = self.session + if sess is None: + state = attributes.instance_state(self.instance) + if state.detached: + util.warn( + "Instance %s is detached, dynamic relationship cannot " + "return a correct result. This warning will become " + "a DetachedInstanceError in a future release." + % (orm_util.state_str(state)) + ) + + return result.IteratorResult( + result.SimpleResultMetaData([self.attr.class_.__name__]), + iter( + self.attr._get_collection_history( + attributes.instance_state(self.instance), + PassiveFlag.PASSIVE_NO_INITIALIZE, + ).added_items + ), + _source_supports_scalars=True, + ).scalars() + else: + return self._generate(sess)._iter() + + if TYPE_CHECKING: + + def __iter__(self) -> Iterator[_T]: ... + + def __getitem__(self, index: Any) -> Union[_T, List[_T]]: + sess = self.session + if sess is None: + return self.attr._get_collection_history( + attributes.instance_state(self.instance), + PassiveFlag.PASSIVE_NO_INITIALIZE, + ).indexed(index) + else: + return self._generate(sess).__getitem__(index) # type: ignore[no-any-return] # noqa: E501 + + def count(self) -> int: + sess = self.session + if sess is None: + return len( + self.attr._get_collection_history( + attributes.instance_state(self.instance), + PassiveFlag.PASSIVE_NO_INITIALIZE, + ).added_items + ) + else: + return self._generate(sess).count() + + def _generate( + self, + sess: Optional[Session] = None, + ) -> Query[_T]: + # note we're returning an entirely new Query class instance + # here without any assignment capabilities; the class of this + # query is determined by the session. + instance = self.instance + if sess is None: + sess = object_session(instance) + if sess is None: + raise orm_exc.DetachedInstanceError( + "Parent instance %s is not bound to a Session, and no " + "contextual session is established; lazy load operation " + "of attribute '%s' cannot proceed" + % (orm_util.instance_str(instance), self.attr.key) + ) + + if self.query_class: + query = self.query_class(self.attr.target_mapper, session=sess) + else: + query = sess.query(self.attr.target_mapper) + + query._where_criteria = self._where_criteria + query._from_obj = self._from_obj + query._order_by_clauses = self._order_by_clauses + + return query + + def add_all(self, iterator: Iterable[_T]) -> None: + """Add an iterable of items to this :class:`_orm.AppenderQuery`. + + The given items will be persisted to the database in terms of + the parent instance's collection on the next flush. + + This method is provided to assist in delivering forwards-compatibility + with the :class:`_orm.WriteOnlyCollection` collection class. + + .. versionadded:: 2.0 + + """ + self._add_all_impl(iterator) + + def add(self, item: _T) -> None: + """Add an item to this :class:`_orm.AppenderQuery`. + + The given item will be persisted to the database in terms of + the parent instance's collection on the next flush. + + This method is provided to assist in delivering forwards-compatibility + with the :class:`_orm.WriteOnlyCollection` collection class. + + .. versionadded:: 2.0 + + """ + self._add_all_impl([item]) + + def extend(self, iterator: Iterable[_T]) -> None: + """Add an iterable of items to this :class:`_orm.AppenderQuery`. + + The given items will be persisted to the database in terms of + the parent instance's collection on the next flush. + + """ + self._add_all_impl(iterator) + + def append(self, item: _T) -> None: + """Append an item to this :class:`_orm.AppenderQuery`. + + The given item will be persisted to the database in terms of + the parent instance's collection on the next flush. + + """ + self._add_all_impl([item]) + + def remove(self, item: _T) -> None: + """Remove an item from this :class:`_orm.AppenderQuery`. + + The given item will be removed from the parent instance's collection on + the next flush. + + """ + self._remove_impl(item) + + +class AppenderQuery(AppenderMixin[_T], Query[_T]): # type: ignore[misc] + """A dynamic query that supports basic collection storage operations. + + Methods on :class:`.AppenderQuery` include all methods of + :class:`_orm.Query`, plus additional methods used for collection + persistence. + + + """ + + +def mixin_user_query(cls: Any) -> type[AppenderMixin[Any]]: + """Return a new class with AppenderQuery functionality layered over.""" + name = "Appender" + cls.__name__ + return type(name, (AppenderMixin, cls), {"query_class": cls}) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py new file mode 100644 index 00000000..2c10ec55 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py @@ -0,0 +1,379 @@ +# orm/evaluator.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + +"""Evaluation functions used **INTERNALLY** by ORM DML use cases. + + +This module is **private, for internal use by SQLAlchemy**. + +.. versionchanged:: 2.0.4 renamed ``EvaluatorCompiler`` to + ``_EvaluatorCompiler``. + +""" + + +from __future__ import annotations + +from typing import Type + +from . import exc as orm_exc +from .base import LoaderCallableStatus +from .base import PassiveFlag +from .. import exc +from .. import inspect +from ..sql import and_ +from ..sql import operators +from ..sql.sqltypes import Concatenable +from ..sql.sqltypes import Integer +from ..sql.sqltypes import Numeric +from ..util import warn_deprecated + + +class UnevaluatableError(exc.InvalidRequestError): + pass + + +class _NoObject(operators.ColumnOperators): + def operate(self, *arg, **kw): + return None + + def reverse_operate(self, *arg, **kw): + return None + + +class _ExpiredObject(operators.ColumnOperators): + def operate(self, *arg, **kw): + return self + + def reverse_operate(self, *arg, **kw): + return self + + +_NO_OBJECT = _NoObject() +_EXPIRED_OBJECT = _ExpiredObject() + + +class _EvaluatorCompiler: + def __init__(self, target_cls=None): + self.target_cls = target_cls + + def process(self, clause, *clauses): + if clauses: + clause = and_(clause, *clauses) + + meth = getattr(self, f"visit_{clause.__visit_name__}", None) + if not meth: + raise UnevaluatableError( + f"Cannot evaluate {type(clause).__name__}" + ) + return meth(clause) + + def visit_grouping(self, clause): + return self.process(clause.element) + + def visit_null(self, clause): + return lambda obj: None + + def visit_false(self, clause): + return lambda obj: False + + def visit_true(self, clause): + return lambda obj: True + + def visit_column(self, clause): + try: + parentmapper = clause._annotations["parentmapper"] + except KeyError as ke: + raise UnevaluatableError( + f"Cannot evaluate column: {clause}" + ) from ke + + if self.target_cls and not issubclass( + self.target_cls, parentmapper.class_ + ): + raise UnevaluatableError( + "Can't evaluate criteria against " + f"alternate class {parentmapper.class_}" + ) + + parentmapper._check_configure() + + # we'd like to use "proxy_key" annotation to get the "key", however + # in relationship primaryjoin cases proxy_key is sometimes deannotated + # and sometimes apparently not present in the first place (?). + # While I can stop it from being deannotated (though need to see if + # this breaks other things), not sure right now about cases where it's + # not there in the first place. can fix at some later point. + # key = clause._annotations["proxy_key"] + + # for now, use the old way + try: + key = parentmapper._columntoproperty[clause].key + except orm_exc.UnmappedColumnError as err: + raise UnevaluatableError( + f"Cannot evaluate expression: {err}" + ) from err + + # note this used to fall back to a simple `getattr(obj, key)` evaluator + # if impl was None; as of #8656, we ensure mappers are configured + # so that impl is available + impl = parentmapper.class_manager[key].impl + + def get_corresponding_attr(obj): + if obj is None: + return _NO_OBJECT + state = inspect(obj) + dict_ = state.dict + + value = impl.get( + state, dict_, passive=PassiveFlag.PASSIVE_NO_FETCH + ) + if value is LoaderCallableStatus.PASSIVE_NO_RESULT: + return _EXPIRED_OBJECT + return value + + return get_corresponding_attr + + def visit_tuple(self, clause): + return self.visit_clauselist(clause) + + def visit_expression_clauselist(self, clause): + return self.visit_clauselist(clause) + + def visit_clauselist(self, clause): + evaluators = [self.process(clause) for clause in clause.clauses] + + dispatch = ( + f"visit_{clause.operator.__name__.rstrip('_')}_clauselist_op" + ) + meth = getattr(self, dispatch, None) + if meth: + return meth(clause.operator, evaluators, clause) + else: + raise UnevaluatableError( + f"Cannot evaluate clauselist with operator {clause.operator}" + ) + + def visit_binary(self, clause): + eval_left = self.process(clause.left) + eval_right = self.process(clause.right) + + dispatch = f"visit_{clause.operator.__name__.rstrip('_')}_binary_op" + meth = getattr(self, dispatch, None) + if meth: + return meth(clause.operator, eval_left, eval_right, clause) + else: + raise UnevaluatableError( + f"Cannot evaluate {type(clause).__name__} with " + f"operator {clause.operator}" + ) + + def visit_or_clauselist_op(self, operator, evaluators, clause): + def evaluate(obj): + has_null = False + for sub_evaluate in evaluators: + value = sub_evaluate(obj) + if value is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + elif value: + return True + has_null = has_null or value is None + if has_null: + return None + return False + + return evaluate + + def visit_and_clauselist_op(self, operator, evaluators, clause): + def evaluate(obj): + for sub_evaluate in evaluators: + value = sub_evaluate(obj) + if value is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + + if not value: + if value is None or value is _NO_OBJECT: + return None + return False + return True + + return evaluate + + def visit_comma_op_clauselist_op(self, operator, evaluators, clause): + def evaluate(obj): + values = [] + for sub_evaluate in evaluators: + value = sub_evaluate(obj) + if value is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + elif value is None or value is _NO_OBJECT: + return None + values.append(value) + return tuple(values) + + return evaluate + + def visit_custom_op_binary_op( + self, operator, eval_left, eval_right, clause + ): + if operator.python_impl: + return self._straight_evaluate( + operator, eval_left, eval_right, clause + ) + else: + raise UnevaluatableError( + f"Custom operator {operator.opstring!r} can't be evaluated " + "in Python unless it specifies a callable using " + "`.python_impl`." + ) + + def visit_is_binary_op(self, operator, eval_left, eval_right, clause): + def evaluate(obj): + left_val = eval_left(obj) + right_val = eval_right(obj) + if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + return left_val == right_val + + return evaluate + + def visit_is_not_binary_op(self, operator, eval_left, eval_right, clause): + def evaluate(obj): + left_val = eval_left(obj) + right_val = eval_right(obj) + if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + return left_val != right_val + + return evaluate + + def _straight_evaluate(self, operator, eval_left, eval_right, clause): + def evaluate(obj): + left_val = eval_left(obj) + right_val = eval_right(obj) + if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + elif left_val is None or right_val is None: + return None + + return operator(eval_left(obj), eval_right(obj)) + + return evaluate + + def _straight_evaluate_numeric_only( + self, operator, eval_left, eval_right, clause + ): + if clause.left.type._type_affinity not in ( + Numeric, + Integer, + ) or clause.right.type._type_affinity not in (Numeric, Integer): + raise UnevaluatableError( + f'Cannot evaluate math operator "{operator.__name__}" for ' + f"datatypes {clause.left.type}, {clause.right.type}" + ) + + return self._straight_evaluate(operator, eval_left, eval_right, clause) + + visit_add_binary_op = _straight_evaluate_numeric_only + visit_mul_binary_op = _straight_evaluate_numeric_only + visit_sub_binary_op = _straight_evaluate_numeric_only + visit_mod_binary_op = _straight_evaluate_numeric_only + visit_truediv_binary_op = _straight_evaluate_numeric_only + visit_lt_binary_op = _straight_evaluate + visit_le_binary_op = _straight_evaluate + visit_ne_binary_op = _straight_evaluate + visit_gt_binary_op = _straight_evaluate + visit_ge_binary_op = _straight_evaluate + visit_eq_binary_op = _straight_evaluate + + def visit_in_op_binary_op(self, operator, eval_left, eval_right, clause): + return self._straight_evaluate( + lambda a, b: a in b if a is not _NO_OBJECT else None, + eval_left, + eval_right, + clause, + ) + + def visit_not_in_op_binary_op( + self, operator, eval_left, eval_right, clause + ): + return self._straight_evaluate( + lambda a, b: a not in b if a is not _NO_OBJECT else None, + eval_left, + eval_right, + clause, + ) + + def visit_concat_op_binary_op( + self, operator, eval_left, eval_right, clause + ): + + if not issubclass( + clause.left.type._type_affinity, Concatenable + ) or not issubclass(clause.right.type._type_affinity, Concatenable): + raise UnevaluatableError( + f"Cannot evaluate concatenate operator " + f'"{operator.__name__}" for ' + f"datatypes {clause.left.type}, {clause.right.type}" + ) + + return self._straight_evaluate( + lambda a, b: a + b, eval_left, eval_right, clause + ) + + def visit_startswith_op_binary_op( + self, operator, eval_left, eval_right, clause + ): + return self._straight_evaluate( + lambda a, b: a.startswith(b), eval_left, eval_right, clause + ) + + def visit_endswith_op_binary_op( + self, operator, eval_left, eval_right, clause + ): + return self._straight_evaluate( + lambda a, b: a.endswith(b), eval_left, eval_right, clause + ) + + def visit_unary(self, clause): + eval_inner = self.process(clause.element) + if clause.operator is operators.inv: + + def evaluate(obj): + value = eval_inner(obj) + if value is _EXPIRED_OBJECT: + return _EXPIRED_OBJECT + elif value is None: + return None + return not value + + return evaluate + raise UnevaluatableError( + f"Cannot evaluate {type(clause).__name__} " + f"with operator {clause.operator}" + ) + + def visit_bindparam(self, clause): + if clause.callable: + val = clause.callable() + else: + val = clause.value + return lambda obj: val + + +def __getattr__(name: str) -> Type[_EvaluatorCompiler]: + if name == "EvaluatorCompiler": + warn_deprecated( + "Direct use of 'EvaluatorCompiler' is not supported, and this " + "name will be removed in a future release. " + "'_EvaluatorCompiler' is for internal use only", + "2.0", + ) + return _EvaluatorCompiler + else: + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py new file mode 100644 index 00000000..f2eae852 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py @@ -0,0 +1,3261 @@ +# orm/events.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""ORM event interfaces. + +""" +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Collection +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import instrumentation +from . import interfaces +from . import mapperlib +from .attributes import QueryableAttribute +from .base import _mapper_or_none +from .base import NO_KEY +from .instrumentation import ClassManager +from .instrumentation import InstrumentationFactory +from .query import BulkDelete +from .query import BulkUpdate +from .query import Query +from .scoping import scoped_session +from .session import Session +from .session import sessionmaker +from .. import event +from .. import exc +from .. import util +from ..event import EventTarget +from ..event.registry import _ET +from ..util.compat import inspect_getfullargspec + +if TYPE_CHECKING: + from weakref import ReferenceType + + from ._typing import _InstanceDict + from ._typing import _InternalEntityType + from ._typing import _O + from ._typing import _T + from .attributes import Event + from .base import EventConstants + from .session import ORMExecuteState + from .session import SessionTransaction + from .unitofwork import UOWTransaction + from ..engine import Connection + from ..event.base import _Dispatch + from ..event.base import _HasEventsDispatch + from ..event.registry import _EventKey + from ..orm.collections import CollectionAdapter + from ..orm.context import QueryContext + from ..orm.decl_api import DeclarativeAttributeIntercept + from ..orm.decl_api import DeclarativeMeta + from ..orm.mapper import Mapper + from ..orm.state import InstanceState + +_KT = TypeVar("_KT", bound=Any) +_ET2 = TypeVar("_ET2", bound=EventTarget) + + +class InstrumentationEvents(event.Events[InstrumentationFactory]): + """Events related to class instrumentation events. + + The listeners here support being established against + any new style class, that is any object that is a subclass + of 'type'. Events will then be fired off for events + against that class. If the "propagate=True" flag is passed + to event.listen(), the event will fire off for subclasses + of that class as well. + + The Python ``type`` builtin is also accepted as a target, + which when used has the effect of events being emitted + for all classes. + + Note the "propagate" flag here is defaulted to ``True``, + unlike the other class level events where it defaults + to ``False``. This means that new subclasses will also + be the subject of these events, when a listener + is established on a superclass. + + """ + + _target_class_doc = "SomeBaseClass" + _dispatch_target = InstrumentationFactory + + @classmethod + def _accept_with( + cls, + target: Union[ + InstrumentationFactory, + Type[InstrumentationFactory], + ], + identifier: str, + ) -> Optional[ + Union[ + InstrumentationFactory, + Type[InstrumentationFactory], + ] + ]: + if isinstance(target, type): + return _InstrumentationEventsHold(target) # type: ignore [return-value] # noqa: E501 + else: + return None + + @classmethod + def _listen( + cls, event_key: _EventKey[_T], propagate: bool = True, **kw: Any + ) -> None: + target, identifier, fn = ( + event_key.dispatch_target, + event_key.identifier, + event_key._listen_fn, + ) + + def listen(target_cls: type, *arg: Any) -> Optional[Any]: + listen_cls = target() + + # if weakref were collected, however this is not something + # that normally happens. it was occurring during test teardown + # between mapper/registry/instrumentation_manager, however this + # interaction was changed to not rely upon the event system. + if listen_cls is None: + return None + + if propagate and issubclass(target_cls, listen_cls): + return fn(target_cls, *arg) + elif not propagate and target_cls is listen_cls: + return fn(target_cls, *arg) + else: + return None + + def remove(ref: ReferenceType[_T]) -> None: + key = event.registry._EventKey( # type: ignore [type-var] + None, + identifier, + listen, + instrumentation._instrumentation_factory, + ) + getattr( + instrumentation._instrumentation_factory.dispatch, identifier + ).remove(key) + + target = weakref.ref(target.class_, remove) + + event_key.with_dispatch_target( + instrumentation._instrumentation_factory + ).with_wrapper(listen).base_listen(**kw) + + @classmethod + def _clear(cls) -> None: + super()._clear() + instrumentation._instrumentation_factory.dispatch._clear() + + def class_instrument(self, cls: ClassManager[_O]) -> None: + """Called after the given class is instrumented. + + To get at the :class:`.ClassManager`, use + :func:`.manager_of_class`. + + """ + + def class_uninstrument(self, cls: ClassManager[_O]) -> None: + """Called before the given class is uninstrumented. + + To get at the :class:`.ClassManager`, use + :func:`.manager_of_class`. + + """ + + def attribute_instrument( + self, cls: ClassManager[_O], key: _KT, inst: _O + ) -> None: + """Called when an attribute is instrumented.""" + + +class _InstrumentationEventsHold: + """temporary marker object used to transfer from _accept_with() to + _listen() on the InstrumentationEvents class. + + """ + + def __init__(self, class_: type) -> None: + self.class_ = class_ + + dispatch = event.dispatcher(InstrumentationEvents) + + +class InstanceEvents(event.Events[ClassManager[Any]]): + """Define events specific to object lifecycle. + + e.g.:: + + from sqlalchemy import event + + def my_load_listener(target, context): + print("on load!") + + event.listen(SomeClass, 'load', my_load_listener) + + Available targets include: + + * mapped classes + * unmapped superclasses of mapped or to-be-mapped classes + (using the ``propagate=True`` flag) + * :class:`_orm.Mapper` objects + * the :class:`_orm.Mapper` class itself indicates listening for all + mappers. + + Instance events are closely related to mapper events, but + are more specific to the instance and its instrumentation, + rather than its system of persistence. + + When using :class:`.InstanceEvents`, several modifiers are + available to the :func:`.event.listen` function. + + :param propagate=False: When True, the event listener should + be applied to all inheriting classes as well as the + class which is the target of this listener. + :param raw=False: When True, the "target" argument passed + to applicable event listener functions will be the + instance's :class:`.InstanceState` management + object, rather than the mapped instance itself. + :param restore_load_context=False: Applies to the + :meth:`.InstanceEvents.load` and :meth:`.InstanceEvents.refresh` + events. Restores the loader context of the object when the event + hook is complete, so that ongoing eager load operations continue + to target the object appropriately. A warning is emitted if the + object is moved to a new loader context from within one of these + events if this flag is not set. + + .. versionadded:: 1.3.14 + + + """ + + _target_class_doc = "SomeClass" + + _dispatch_target = ClassManager + + @classmethod + def _new_classmanager_instance( + cls, + class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type], + classmanager: ClassManager[_O], + ) -> None: + _InstanceEventsHold.populate(class_, classmanager) + + @classmethod + @util.preload_module("sqlalchemy.orm") + def _accept_with( + cls, + target: Union[ + ClassManager[Any], + Type[ClassManager[Any]], + ], + identifier: str, + ) -> Optional[Union[ClassManager[Any], Type[ClassManager[Any]]]]: + orm = util.preloaded.orm + + if isinstance(target, ClassManager): + return target + elif isinstance(target, mapperlib.Mapper): + return target.class_manager + elif target is orm.mapper: # type: ignore [attr-defined] + util.warn_deprecated( + "The `sqlalchemy.orm.mapper()` symbol is deprecated and " + "will be removed in a future release. For the mapper-wide " + "event target, use the 'sqlalchemy.orm.Mapper' class.", + "2.0", + ) + return ClassManager + elif isinstance(target, type): + if issubclass(target, mapperlib.Mapper): + return ClassManager + else: + manager = instrumentation.opt_manager_of_class(target) + if manager: + return manager + else: + return _InstanceEventsHold(target) # type: ignore [return-value] # noqa: E501 + return None + + @classmethod + def _listen( + cls, + event_key: _EventKey[ClassManager[Any]], + raw: bool = False, + propagate: bool = False, + restore_load_context: bool = False, + **kw: Any, + ) -> None: + target, fn = (event_key.dispatch_target, event_key._listen_fn) + + if not raw or restore_load_context: + + def wrap( + state: InstanceState[_O], *arg: Any, **kw: Any + ) -> Optional[Any]: + if not raw: + target: Any = state.obj() + else: + target = state + if restore_load_context: + runid = state.runid + try: + return fn(target, *arg, **kw) + finally: + if restore_load_context: + state.runid = runid + + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(propagate=propagate, **kw) + + if propagate: + for mgr in target.subclass_managers(True): + event_key.with_dispatch_target(mgr).base_listen(propagate=True) + + @classmethod + def _clear(cls) -> None: + super()._clear() + _InstanceEventsHold._clear() + + def first_init(self, manager: ClassManager[_O], cls: Type[_O]) -> None: + """Called when the first instance of a particular mapping is called. + + This event is called when the ``__init__`` method of a class + is called the first time for that particular class. The event + invokes before ``__init__`` actually proceeds as well as before + the :meth:`.InstanceEvents.init` event is invoked. + + """ + + def init(self, target: _O, args: Any, kwargs: Any) -> None: + """Receive an instance when its constructor is called. + + This method is only called during a userland construction of + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is + loaded from the database; see the :meth:`.InstanceEvents.load` + event in order to intercept a database load. + + The event is called before the actual ``__init__`` constructor + of the object is called. The ``kwargs`` dictionary may be + modified in-place in order to affect what is passed to + ``__init__``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments passed to the ``__init__`` method. + This is passed as a tuple and is currently immutable. + :param kwargs: keyword arguments passed to the ``__init__`` method. + This structure *can* be altered in place. + + .. seealso:: + + :meth:`.InstanceEvents.init_failure` + + :meth:`.InstanceEvents.load` + + """ + + def init_failure(self, target: _O, args: Any, kwargs: Any) -> None: + """Receive an instance when its constructor has been called, + and raised an exception. + + This method is only called during a userland construction of + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is loaded + from the database. + + The event is invoked after an exception raised by the ``__init__`` + method is caught. After the event + is invoked, the original exception is re-raised outwards, so that + the construction of the object still raises an exception. The + actual exception and stack trace raised should be present in + ``sys.exc_info()``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments that were passed to the ``__init__`` + method. + :param kwargs: keyword arguments that were passed to the ``__init__`` + method. + + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.load` + + """ + + def _sa_event_merge_wo_load( + self, target: _O, context: QueryContext + ) -> None: + """receive an object instance after it was the subject of a merge() + call, when load=False was passed. + + The target would be the already-loaded object in the Session which + would have had its attributes overwritten by the incoming object. This + overwrite operation does not use attribute events, instead just + populating dict directly. Therefore the purpose of this event is so + that extensions like sqlalchemy.ext.mutable know that object state has + changed and incoming state needs to be set up for "parents" etc. + + This functionality is acceptable to be made public in a later release. + + .. versionadded:: 1.4.41 + + """ + + def load(self, target: _O, context: QueryContext) -> None: + """Receive an object instance after it has been created via + ``__new__``, and after initial attribute population has + occurred. + + This typically occurs when the instance is created based on + incoming result rows, and is only called once for that + instance's lifetime. + + .. warning:: + + During a result-row load, this event is invoked when the + first row received for this instance is processed. When using + eager loading with collection-oriented attributes, the additional + rows that are to be loaded / processed in order to load subsequent + collection items have not occurred yet. This has the effect + both that collections will not be fully loaded, as well as that + if an operation occurs within this event handler that emits + another database load operation for the object, the "loading + context" for the object can change and interfere with the + existing eager loaders still in progress. + + Examples of what can cause the "loading context" to change within + the event handler include, but are not necessarily limited to: + + * accessing deferred attributes that weren't part of the row, + will trigger an "undefer" operation and refresh the object + + * accessing attributes on a joined-inheritance subclass that + weren't part of the row, will trigger a refresh operation. + + As of SQLAlchemy 1.3.14, a warning is emitted when this occurs. The + :paramref:`.InstanceEvents.restore_load_context` option may be + used on the event to prevent this warning; this will ensure that + the existing loading context is maintained for the object after the + event is called:: + + @event.listens_for( + SomeClass, "load", restore_load_context=True) + def on_load(instance, context): + instance.some_unloaded_attribute + + .. versionchanged:: 1.3.14 Added + :paramref:`.InstanceEvents.restore_load_context` + and :paramref:`.SessionEvents.restore_load_context` flags which + apply to "on load" events, which will ensure that the loading + context for an object is restored when the event hook is + complete; a warning is emitted if the load context of the object + changes without this flag being set. + + + The :meth:`.InstanceEvents.load` event is also available in a + class-method decorator format called :func:`_orm.reconstructor`. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param context: the :class:`.QueryContext` corresponding to the + current :class:`_query.Query` in progress. This argument may be + ``None`` if the load does not correspond to a :class:`_query.Query`, + such as during :meth:`.Session.merge`. + + .. seealso:: + + :ref:`mapped_class_load_events` + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.refresh` + + :meth:`.SessionEvents.loaded_as_persistent` + + """ + + def refresh( + self, target: _O, context: QueryContext, attrs: Optional[Iterable[str]] + ) -> None: + """Receive an object instance after one or more attributes have + been refreshed from a query. + + Contrast this to the :meth:`.InstanceEvents.load` method, which + is invoked when the object is first loaded from a query. + + .. note:: This event is invoked within the loader process before + eager loaders may have been completed, and the object's state may + not be complete. Additionally, invoking row-level refresh + operations on the object will place the object into a new loader + context, interfering with the existing load context. See the note + on :meth:`.InstanceEvents.load` for background on making use of the + :paramref:`.InstanceEvents.restore_load_context` parameter, in + order to resolve this scenario. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param context: the :class:`.QueryContext` corresponding to the + current :class:`_query.Query` in progress. + :param attrs: sequence of attribute names which + were populated, or None if all column-mapped, non-deferred + attributes were populated. + + .. seealso:: + + :ref:`mapped_class_load_events` + + :meth:`.InstanceEvents.load` + + """ + + def refresh_flush( + self, + target: _O, + flush_context: UOWTransaction, + attrs: Optional[Iterable[str]], + ) -> None: + """Receive an object instance after one or more attributes that + contain a column-level default or onupdate handler have been refreshed + during persistence of the object's state. + + This event is the same as :meth:`.InstanceEvents.refresh` except + it is invoked within the unit of work flush process, and includes + only non-primary-key columns that have column level default or + onupdate handlers, including Python callables as well as server side + defaults and triggers which may be fetched via the RETURNING clause. + + .. note:: + + While the :meth:`.InstanceEvents.refresh_flush` event is triggered + for an object that was INSERTed as well as for an object that was + UPDATEd, the event is geared primarily towards the UPDATE process; + it is mostly an internal artifact that INSERT actions can also + trigger this event, and note that **primary key columns for an + INSERTed row are explicitly omitted** from this event. In order to + intercept the newly INSERTed state of an object, the + :meth:`.SessionEvents.pending_to_persistent` and + :meth:`.MapperEvents.after_insert` are better choices. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param attrs: sequence of attribute names which + were populated. + + .. seealso:: + + :ref:`mapped_class_load_events` + + :ref:`orm_server_defaults` + + :ref:`metadata_defaults_toplevel` + + """ + + def expire(self, target: _O, attrs: Optional[Iterable[str]]) -> None: + """Receive an object instance after its attributes or some subset + have been expired. + + 'keys' is a list of attribute names. If None, the entire + state was expired. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param attrs: sequence of attribute + names which were expired, or None if all attributes were + expired. + + """ + + def pickle(self, target: _O, state_dict: _InstanceDict) -> None: + """Receive an object instance when its associated state is + being pickled. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param state_dict: the dictionary returned by + :class:`.InstanceState.__getstate__`, containing the state + to be pickled. + + """ + + def unpickle(self, target: _O, state_dict: _InstanceDict) -> None: + """Receive an object instance after its associated state has + been unpickled. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param state_dict: the dictionary sent to + :class:`.InstanceState.__setstate__`, containing the state + dictionary which was pickled. + + """ + + +class _EventsHold(event.RefCollection[_ET]): + """Hold onto listeners against unmapped, uninstrumented classes. + + Establish _listen() for that class' mapper/instrumentation when + those objects are created for that class. + + """ + + all_holds: weakref.WeakKeyDictionary[Any, Any] + + def __init__( + self, + class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type], + ) -> None: + self.class_ = class_ + + @classmethod + def _clear(cls) -> None: + cls.all_holds.clear() + + class HoldEvents(Generic[_ET2]): + _dispatch_target: Optional[Type[_ET2]] = None + + @classmethod + def _listen( + cls, + event_key: _EventKey[_ET2], + raw: bool = False, + propagate: bool = False, + retval: bool = False, + **kw: Any, + ) -> None: + target = event_key.dispatch_target + + if target.class_ in target.all_holds: + collection = target.all_holds[target.class_] + else: + collection = target.all_holds[target.class_] = {} + + event.registry._stored_in_collection(event_key, target) + collection[event_key._key] = ( + event_key, + raw, + propagate, + retval, + kw, + ) + + if propagate: + stack = list(target.class_.__subclasses__()) + while stack: + subclass = stack.pop(0) + stack.extend(subclass.__subclasses__()) + subject = target.resolve(subclass) + if subject is not None: + # we are already going through __subclasses__() + # so leave generic propagate flag False + event_key.with_dispatch_target(subject).listen( + raw=raw, propagate=False, retval=retval, **kw + ) + + def remove(self, event_key: _EventKey[_ET]) -> None: + target = event_key.dispatch_target + + if isinstance(target, _EventsHold): + collection = target.all_holds[target.class_] + del collection[event_key._key] + + @classmethod + def populate( + cls, + class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type], + subject: Union[ClassManager[_O], Mapper[_O]], + ) -> None: + for subclass in class_.__mro__: + if subclass in cls.all_holds: + collection = cls.all_holds[subclass] + for ( + event_key, + raw, + propagate, + retval, + kw, + ) in collection.values(): + if propagate or subclass is class_: + # since we can't be sure in what order different + # classes in a hierarchy are triggered with + # populate(), we rely upon _EventsHold for all event + # assignment, instead of using the generic propagate + # flag. + event_key.with_dispatch_target(subject).listen( + raw=raw, propagate=False, retval=retval, **kw + ) + + +class _InstanceEventsHold(_EventsHold[_ET]): + all_holds: weakref.WeakKeyDictionary[Any, Any] = ( + weakref.WeakKeyDictionary() + ) + + def resolve(self, class_: Type[_O]) -> Optional[ClassManager[_O]]: + return instrumentation.opt_manager_of_class(class_) + + class HoldInstanceEvents(_EventsHold.HoldEvents[_ET], InstanceEvents): # type: ignore [misc] # noqa: E501 + pass + + dispatch = event.dispatcher(HoldInstanceEvents) + + +class MapperEvents(event.Events[mapperlib.Mapper[Any]]): + """Define events specific to mappings. + + e.g.:: + + from sqlalchemy import event + + def my_before_insert_listener(mapper, connection, target): + # execute a stored procedure upon INSERT, + # apply the value to the row to be inserted + target.calculated_value = connection.execute( + text("select my_special_function(%d)" % target.special_number) + ).scalar() + + # associate the listener function with SomeClass, + # to execute during the "before_insert" hook + event.listen( + SomeClass, 'before_insert', my_before_insert_listener) + + Available targets include: + + * mapped classes + * unmapped superclasses of mapped or to-be-mapped classes + (using the ``propagate=True`` flag) + * :class:`_orm.Mapper` objects + * the :class:`_orm.Mapper` class itself indicates listening for all + mappers. + + Mapper events provide hooks into critical sections of the + mapper, including those related to object instrumentation, + object loading, and object persistence. In particular, the + persistence methods :meth:`~.MapperEvents.before_insert`, + and :meth:`~.MapperEvents.before_update` are popular + places to augment the state being persisted - however, these + methods operate with several significant restrictions. The + user is encouraged to evaluate the + :meth:`.SessionEvents.before_flush` and + :meth:`.SessionEvents.after_flush` methods as more + flexible and user-friendly hooks in which to apply + additional database state during a flush. + + When using :class:`.MapperEvents`, several modifiers are + available to the :func:`.event.listen` function. + + :param propagate=False: When True, the event listener should + be applied to all inheriting mappers and/or the mappers of + inheriting classes, as well as any + mapper which is the target of this listener. + :param raw=False: When True, the "target" argument passed + to applicable event listener functions will be the + instance's :class:`.InstanceState` management + object, rather than the mapped instance itself. + :param retval=False: when True, the user-defined event function + must have a return value, the purpose of which is either to + control subsequent event propagation, or to otherwise alter + the operation in progress by the mapper. Possible return + values are: + + * ``sqlalchemy.orm.interfaces.EXT_CONTINUE`` - continue event + processing normally. + * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent + event handlers in the chain. + * other values - the return value specified by specific listeners. + + """ + + _target_class_doc = "SomeClass" + _dispatch_target = mapperlib.Mapper + + @classmethod + def _new_mapper_instance( + cls, + class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type], + mapper: Mapper[_O], + ) -> None: + _MapperEventsHold.populate(class_, mapper) + + @classmethod + @util.preload_module("sqlalchemy.orm") + def _accept_with( + cls, + target: Union[mapperlib.Mapper[Any], Type[mapperlib.Mapper[Any]]], + identifier: str, + ) -> Optional[Union[mapperlib.Mapper[Any], Type[mapperlib.Mapper[Any]]]]: + orm = util.preloaded.orm + + if target is orm.mapper: # type: ignore [attr-defined] + util.warn_deprecated( + "The `sqlalchemy.orm.mapper()` symbol is deprecated and " + "will be removed in a future release. For the mapper-wide " + "event target, use the 'sqlalchemy.orm.Mapper' class.", + "2.0", + ) + return mapperlib.Mapper + elif isinstance(target, type): + if issubclass(target, mapperlib.Mapper): + return target + else: + mapper = _mapper_or_none(target) + if mapper is not None: + return mapper + else: + return _MapperEventsHold(target) + else: + return target + + @classmethod + def _listen( + cls, + event_key: _EventKey[_ET], + raw: bool = False, + retval: bool = False, + propagate: bool = False, + **kw: Any, + ) -> None: + target, identifier, fn = ( + event_key.dispatch_target, + event_key.identifier, + event_key._listen_fn, + ) + + if ( + identifier in ("before_configured", "after_configured") + and target is not mapperlib.Mapper + ): + util.warn( + "'before_configured' and 'after_configured' ORM events " + "only invoke with the Mapper class " + "as the target." + ) + + if not raw or not retval: + if not raw: + meth = getattr(cls, identifier) + try: + target_index = ( + inspect_getfullargspec(meth)[0].index("target") - 1 + ) + except ValueError: + target_index = None + + def wrap(*arg: Any, **kw: Any) -> Any: + if not raw and target_index is not None: + arg = list(arg) # type: ignore [assignment] + arg[target_index] = arg[target_index].obj() # type: ignore [index] # noqa: E501 + if not retval: + fn(*arg, **kw) + return interfaces.EXT_CONTINUE + else: + return fn(*arg, **kw) + + event_key = event_key.with_wrapper(wrap) + + if propagate: + for mapper in target.self_and_descendants: + event_key.with_dispatch_target(mapper).base_listen( + propagate=True, **kw + ) + else: + event_key.base_listen(**kw) + + @classmethod + def _clear(cls) -> None: + super()._clear() + _MapperEventsHold._clear() + + def instrument_class(self, mapper: Mapper[_O], class_: Type[_O]) -> None: + r"""Receive a class when the mapper is first constructed, + before instrumentation is applied to the mapped class. + + This event is the earliest phase of mapper construction. + Most attributes of the mapper are not yet initialized. To + receive an event within initial mapper construction where basic + state is available such as the :attr:`_orm.Mapper.attrs` collection, + the :meth:`_orm.MapperEvents.after_mapper_constructed` event may + be a better choice. + + This listener can either be applied to the :class:`_orm.Mapper` + class overall, or to any un-mapped class which serves as a base + for classes that will be mapped (using the ``propagate=True`` flag):: + + Base = declarative_base() + + @event.listens_for(Base, "instrument_class", propagate=True) + def on_new_class(mapper, cls_): + " ... " + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param class\_: the mapped class. + + .. seealso:: + + :meth:`_orm.MapperEvents.after_mapper_constructed` + + """ + + def after_mapper_constructed( + self, mapper: Mapper[_O], class_: Type[_O] + ) -> None: + """Receive a class and mapper when the :class:`_orm.Mapper` has been + fully constructed. + + This event is called after the initial constructor for + :class:`_orm.Mapper` completes. This occurs after the + :meth:`_orm.MapperEvents.instrument_class` event and after the + :class:`_orm.Mapper` has done an initial pass of its arguments + to generate its collection of :class:`_orm.MapperProperty` objects, + which are accessible via the :meth:`_orm.Mapper.get_property` + method and the :attr:`_orm.Mapper.iterate_properties` attribute. + + This event differs from the + :meth:`_orm.MapperEvents.before_mapper_configured` event in that it + is invoked within the constructor for :class:`_orm.Mapper`, rather + than within the :meth:`_orm.registry.configure` process. Currently, + this event is the only one which is appropriate for handlers that + wish to create additional mapped classes in response to the + construction of this :class:`_orm.Mapper`, which will be part of the + same configure step when :meth:`_orm.registry.configure` next runs. + + .. versionadded:: 2.0.2 + + .. seealso:: + + :ref:`examples_versioning` - an example which illustrates the use + of the :meth:`_orm.MapperEvents.before_mapper_configured` + event to create new mappers to record change-audit histories on + objects. + + """ + + def before_mapper_configured( + self, mapper: Mapper[_O], class_: Type[_O] + ) -> None: + """Called right before a specific mapper is to be configured. + + This event is intended to allow a specific mapper to be skipped during + the configure step, by returning the :attr:`.orm.interfaces.EXT_SKIP` + symbol which indicates to the :func:`.configure_mappers` call that this + particular mapper (or hierarchy of mappers, if ``propagate=True`` is + used) should be skipped in the current configuration run. When one or + more mappers are skipped, the he "new mappers" flag will remain set, + meaning the :func:`.configure_mappers` function will continue to be + called when mappers are used, to continue to try to configure all + available mappers. + + In comparison to the other configure-level events, + :meth:`.MapperEvents.before_configured`, + :meth:`.MapperEvents.after_configured`, and + :meth:`.MapperEvents.mapper_configured`, the + :meth;`.MapperEvents.before_mapper_configured` event provides for a + meaningful return value when it is registered with the ``retval=True`` + parameter. + + .. versionadded:: 1.3 + + e.g.:: + + from sqlalchemy.orm import EXT_SKIP + + Base = declarative_base() + + DontConfigureBase = declarative_base() + + @event.listens_for( + DontConfigureBase, + "before_mapper_configured", retval=True, propagate=True) + def dont_configure(mapper, cls): + return EXT_SKIP + + + .. seealso:: + + :meth:`.MapperEvents.before_configured` + + :meth:`.MapperEvents.after_configured` + + :meth:`.MapperEvents.mapper_configured` + + """ + + def mapper_configured(self, mapper: Mapper[_O], class_: Type[_O]) -> None: + r"""Called when a specific mapper has completed its own configuration + within the scope of the :func:`.configure_mappers` call. + + The :meth:`.MapperEvents.mapper_configured` event is invoked + for each mapper that is encountered when the + :func:`_orm.configure_mappers` function proceeds through the current + list of not-yet-configured mappers. + :func:`_orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + When the event is called, the mapper should be in its final + state, but **not including backrefs** that may be invoked from + other mappers; they might still be pending within the + configuration operation. Bidirectional relationships that + are instead configured via the + :paramref:`.orm.relationship.back_populates` argument + *will* be fully available, since this style of relationship does not + rely upon other possibly-not-configured mappers to know that they + exist. + + For an event that is guaranteed to have **all** mappers ready + to go including backrefs that are defined only on other + mappings, use the :meth:`.MapperEvents.after_configured` + event; this event invokes only after all known mappings have been + fully configured. + + The :meth:`.MapperEvents.mapper_configured` event, unlike + :meth:`.MapperEvents.before_configured` or + :meth:`.MapperEvents.after_configured`, + is called for each mapper/class individually, and the mapper is + passed to the event itself. It also is called exactly once for + a particular mapper. The event is therefore useful for + configurational steps that benefit from being invoked just once + on a specific mapper basis, which don't require that "backref" + configurations are necessarily ready yet. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param class\_: the mapped class. + + .. seealso:: + + :meth:`.MapperEvents.before_configured` + + :meth:`.MapperEvents.after_configured` + + :meth:`.MapperEvents.before_mapper_configured` + + """ + # TODO: need coverage for this event + + def before_configured(self) -> None: + """Called before a series of mappers have been configured. + + The :meth:`.MapperEvents.before_configured` event is invoked + each time the :func:`_orm.configure_mappers` function is + invoked, before the function has done any of its work. + :func:`_orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + This event can **only** be applied to the :class:`_orm.Mapper` class, + and not to individual mappings or mapped classes. It is only invoked + for all mappings as a whole:: + + from sqlalchemy.orm import Mapper + + @event.listens_for(Mapper, "before_configured") + def go(): + ... + + Contrast this event to :meth:`.MapperEvents.after_configured`, + which is invoked after the series of mappers has been configured, + as well as :meth:`.MapperEvents.before_mapper_configured` + and :meth:`.MapperEvents.mapper_configured`, which are both invoked + on a per-mapper basis. + + Theoretically this event is called once per + application, but is actually called any time new mappers + are to be affected by a :func:`_orm.configure_mappers` + call. If new mappings are constructed after existing ones have + already been used, this event will likely be called again. To ensure + that a particular event is only called once and no further, the + ``once=True`` argument (new in 0.9.4) can be applied:: + + from sqlalchemy.orm import mapper + + @event.listens_for(mapper, "before_configured", once=True) + def go(): + ... + + + .. seealso:: + + :meth:`.MapperEvents.before_mapper_configured` + + :meth:`.MapperEvents.mapper_configured` + + :meth:`.MapperEvents.after_configured` + + """ + + def after_configured(self) -> None: + """Called after a series of mappers have been configured. + + The :meth:`.MapperEvents.after_configured` event is invoked + each time the :func:`_orm.configure_mappers` function is + invoked, after the function has completed its work. + :func:`_orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + Contrast this event to the :meth:`.MapperEvents.mapper_configured` + event, which is called on a per-mapper basis while the configuration + operation proceeds; unlike that event, when this event is invoked, + all cross-configurations (e.g. backrefs) will also have been made + available for any mappers that were pending. + Also contrast to :meth:`.MapperEvents.before_configured`, + which is invoked before the series of mappers has been configured. + + This event can **only** be applied to the :class:`_orm.Mapper` class, + and not to individual mappings or + mapped classes. It is only invoked for all mappings as a whole:: + + from sqlalchemy.orm import Mapper + + @event.listens_for(Mapper, "after_configured") + def go(): + # ... + + Theoretically this event is called once per + application, but is actually called any time new mappers + have been affected by a :func:`_orm.configure_mappers` + call. If new mappings are constructed after existing ones have + already been used, this event will likely be called again. To ensure + that a particular event is only called once and no further, the + ``once=True`` argument (new in 0.9.4) can be applied:: + + from sqlalchemy.orm import mapper + + @event.listens_for(mapper, "after_configured", once=True) + def go(): + # ... + + .. seealso:: + + :meth:`.MapperEvents.before_mapper_configured` + + :meth:`.MapperEvents.mapper_configured` + + :meth:`.MapperEvents.before_configured` + + """ + + def before_insert( + self, mapper: Mapper[_O], connection: Connection, target: _O + ) -> None: + """Receive an object instance before an INSERT statement + is emitted corresponding to that instance. + + .. note:: this event **only** applies to the + :ref:`session flush operation ` + and does **not** apply to the ORM DML operations described at + :ref:`orm_expression_update_delete`. To intercept ORM + DML events, use :meth:`_orm.SessionEvents.do_orm_execute`. + + This event is used to modify local, non-object related + attributes on the instance before an INSERT occurs, as well + as to emit additional SQL statements on the given + connection. + + The event is often called for a batch of objects of the + same class before their INSERT statements are emitted at + once in a later step. In the extremely rare case that + this is not desirable, the :class:`_orm.Mapper` object can be + configured with ``batch=False``, which will cause + batches of instances to be broken up into individual + (and more poorly performing) event->persist->event + steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`_engine.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param connection: the :class:`_engine.Connection` being used to + emit INSERT statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def after_insert( + self, mapper: Mapper[_O], connection: Connection, target: _O + ) -> None: + """Receive an object instance after an INSERT statement + is emitted corresponding to that instance. + + .. note:: this event **only** applies to the + :ref:`session flush operation ` + and does **not** apply to the ORM DML operations described at + :ref:`orm_expression_update_delete`. To intercept ORM + DML events, use :meth:`_orm.SessionEvents.do_orm_execute`. + + This event is used to modify in-Python-only + state on the instance after an INSERT occurs, as well + as to emit additional SQL statements on the given + connection. + + The event is often called for a batch of objects of the + same class after their INSERT statements have been + emitted at once in a previous step. In the extremely + rare case that this is not desirable, the + :class:`_orm.Mapper` object can be configured with ``batch=False``, + which will cause batches of instances to be broken up + into individual (and more poorly performing) + event->persist->event steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`_engine.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param connection: the :class:`_engine.Connection` being used to + emit INSERT statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def before_update( + self, mapper: Mapper[_O], connection: Connection, target: _O + ) -> None: + """Receive an object instance before an UPDATE statement + is emitted corresponding to that instance. + + .. note:: this event **only** applies to the + :ref:`session flush operation ` + and does **not** apply to the ORM DML operations described at + :ref:`orm_expression_update_delete`. To intercept ORM + DML events, use :meth:`_orm.SessionEvents.do_orm_execute`. + + This event is used to modify local, non-object related + attributes on the instance before an UPDATE occurs, as well + as to emit additional SQL statements on the given + connection. + + This method is called for all instances that are + marked as "dirty", *even those which have no net changes + to their column-based attributes*. An object is marked + as dirty when any of its column-based attributes have a + "set attribute" operation called or when any of its + collections are modified. If, at update time, no + column-based attributes have any net changes, no UPDATE + statement will be issued. This means that an instance + being sent to :meth:`~.MapperEvents.before_update` is + *not* a guarantee that an UPDATE statement will be + issued, although you can affect the outcome here by + modifying attributes so that a net change in value does + exist. + + To detect if the column-based attributes on the object have net + changes, and will therefore generate an UPDATE statement, use + ``object_session(instance).is_modified(instance, + include_collections=False)``. + + The event is often called for a batch of objects of the + same class before their UPDATE statements are emitted at + once in a later step. In the extremely rare case that + this is not desirable, the :class:`_orm.Mapper` can be + configured with ``batch=False``, which will cause + batches of instances to be broken up into individual + (and more poorly performing) event->persist->event + steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`_engine.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param connection: the :class:`_engine.Connection` being used to + emit UPDATE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def after_update( + self, mapper: Mapper[_O], connection: Connection, target: _O + ) -> None: + """Receive an object instance after an UPDATE statement + is emitted corresponding to that instance. + + .. note:: this event **only** applies to the + :ref:`session flush operation ` + and does **not** apply to the ORM DML operations described at + :ref:`orm_expression_update_delete`. To intercept ORM + DML events, use :meth:`_orm.SessionEvents.do_orm_execute`. + + This event is used to modify in-Python-only + state on the instance after an UPDATE occurs, as well + as to emit additional SQL statements on the given + connection. + + This method is called for all instances that are + marked as "dirty", *even those which have no net changes + to their column-based attributes*, and for which + no UPDATE statement has proceeded. An object is marked + as dirty when any of its column-based attributes have a + "set attribute" operation called or when any of its + collections are modified. If, at update time, no + column-based attributes have any net changes, no UPDATE + statement will be issued. This means that an instance + being sent to :meth:`~.MapperEvents.after_update` is + *not* a guarantee that an UPDATE statement has been + issued. + + To detect if the column-based attributes on the object have net + changes, and therefore resulted in an UPDATE statement, use + ``object_session(instance).is_modified(instance, + include_collections=False)``. + + The event is often called for a batch of objects of the + same class after their UPDATE statements have been emitted at + once in a previous step. In the extremely rare case that + this is not desirable, the :class:`_orm.Mapper` can be + configured with ``batch=False``, which will cause + batches of instances to be broken up into individual + (and more poorly performing) event->persist->event + steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`_engine.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param connection: the :class:`_engine.Connection` being used to + emit UPDATE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def before_delete( + self, mapper: Mapper[_O], connection: Connection, target: _O + ) -> None: + """Receive an object instance before a DELETE statement + is emitted corresponding to that instance. + + .. note:: this event **only** applies to the + :ref:`session flush operation ` + and does **not** apply to the ORM DML operations described at + :ref:`orm_expression_update_delete`. To intercept ORM + DML events, use :meth:`_orm.SessionEvents.do_orm_execute`. + + This event is used to emit additional SQL statements on + the given connection as well as to perform application + specific bookkeeping related to a deletion event. + + The event is often called for a batch of objects of the + same class before their DELETE statements are emitted at + once in a later step. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`_engine.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param connection: the :class:`_engine.Connection` being used to + emit DELETE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def after_delete( + self, mapper: Mapper[_O], connection: Connection, target: _O + ) -> None: + """Receive an object instance after a DELETE statement + has been emitted corresponding to that instance. + + .. note:: this event **only** applies to the + :ref:`session flush operation ` + and does **not** apply to the ORM DML operations described at + :ref:`orm_expression_update_delete`. To intercept ORM + DML events, use :meth:`_orm.SessionEvents.do_orm_execute`. + + This event is used to emit additional SQL statements on + the given connection as well as to perform application + specific bookkeeping related to a deletion event. + + The event is often called for a batch of objects of the + same class after their DELETE statements have been emitted at + once in a previous step. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`_engine.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`_orm.Mapper` which is the target + of this event. + :param connection: the :class:`_engine.Connection` being used to + emit DELETE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + +class _MapperEventsHold(_EventsHold[_ET]): + all_holds = weakref.WeakKeyDictionary() + + def resolve( + self, class_: Union[Type[_T], _InternalEntityType[_T]] + ) -> Optional[Mapper[_T]]: + return _mapper_or_none(class_) + + class HoldMapperEvents(_EventsHold.HoldEvents[_ET], MapperEvents): # type: ignore [misc] # noqa: E501 + pass + + dispatch = event.dispatcher(HoldMapperEvents) + + +_sessionevents_lifecycle_event_names: Set[str] = set() + + +class SessionEvents(event.Events[Session]): + """Define events specific to :class:`.Session` lifecycle. + + e.g.:: + + from sqlalchemy import event + from sqlalchemy.orm import sessionmaker + + def my_before_commit(session): + print("before commit!") + + Session = sessionmaker() + + event.listen(Session, "before_commit", my_before_commit) + + The :func:`~.event.listen` function will accept + :class:`.Session` objects as well as the return result + of :class:`~.sessionmaker()` and :class:`~.scoped_session()`. + + Additionally, it accepts the :class:`.Session` class which + will apply listeners to all :class:`.Session` instances + globally. + + :param raw=False: When True, the "target" argument passed + to applicable event listener functions that work on individual + objects will be the instance's :class:`.InstanceState` management + object, rather than the mapped instance itself. + + .. versionadded:: 1.3.14 + + :param restore_load_context=False: Applies to the + :meth:`.SessionEvents.loaded_as_persistent` event. Restores the loader + context of the object when the event hook is complete, so that ongoing + eager load operations continue to target the object appropriately. A + warning is emitted if the object is moved to a new loader context from + within this event if this flag is not set. + + .. versionadded:: 1.3.14 + + """ + + _target_class_doc = "SomeSessionClassOrObject" + + _dispatch_target = Session + + def _lifecycle_event( # type: ignore [misc] + fn: Callable[[SessionEvents, Session, Any], None] + ) -> Callable[[SessionEvents, Session, Any], None]: + _sessionevents_lifecycle_event_names.add(fn.__name__) + return fn + + @classmethod + def _accept_with( # type: ignore [return] + cls, target: Any, identifier: str + ) -> Union[Session, type]: + if isinstance(target, scoped_session): + target = target.session_factory + if not isinstance(target, sessionmaker) and ( + not isinstance(target, type) or not issubclass(target, Session) + ): + raise exc.ArgumentError( + "Session event listen on a scoped_session " + "requires that its creation callable " + "is associated with the Session class." + ) + + if isinstance(target, sessionmaker): + return target.class_ + elif isinstance(target, type): + if issubclass(target, scoped_session): + return Session + elif issubclass(target, Session): + return target + elif isinstance(target, Session): + return target + elif hasattr(target, "_no_async_engine_events"): + target._no_async_engine_events() + else: + # allows alternate SessionEvents-like-classes to be consulted + return event.Events._accept_with(target, identifier) # type: ignore [return-value] # noqa: E501 + + @classmethod + def _listen( + cls, + event_key: Any, + *, + raw: bool = False, + restore_load_context: bool = False, + **kw: Any, + ) -> None: + is_instance_event = ( + event_key.identifier in _sessionevents_lifecycle_event_names + ) + + if is_instance_event: + if not raw or restore_load_context: + fn = event_key._listen_fn + + def wrap( + session: Session, + state: InstanceState[_O], + *arg: Any, + **kw: Any, + ) -> Optional[Any]: + if not raw: + target = state.obj() + if target is None: + # existing behavior is that if the object is + # garbage collected, no event is emitted + return None + else: + target = state # type: ignore [assignment] + if restore_load_context: + runid = state.runid + try: + return fn(session, target, *arg, **kw) + finally: + if restore_load_context: + state.runid = runid + + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(**kw) + + def do_orm_execute(self, orm_execute_state: ORMExecuteState) -> None: + """Intercept statement executions that occur on behalf of an + ORM :class:`.Session` object. + + This event is invoked for all top-level SQL statements invoked from the + :meth:`_orm.Session.execute` method, as well as related methods such as + :meth:`_orm.Session.scalars` and :meth:`_orm.Session.scalar`. As of + SQLAlchemy 1.4, all ORM queries that run through the + :meth:`_orm.Session.execute` method as well as related methods + :meth:`_orm.Session.scalars`, :meth:`_orm.Session.scalar` etc. + will participate in this event. + This event hook does **not** apply to the queries that are + emitted internally within the ORM flush process, i.e. the + process described at :ref:`session_flushing`. + + .. note:: The :meth:`_orm.SessionEvents.do_orm_execute` event hook + is triggered **for ORM statement executions only**, meaning those + invoked via the :meth:`_orm.Session.execute` and similar methods on + the :class:`_orm.Session` object. It does **not** trigger for + statements that are invoked by SQLAlchemy Core only, i.e. statements + invoked directly using :meth:`_engine.Connection.execute` or + otherwise originating from an :class:`_engine.Engine` object without + any :class:`_orm.Session` involved. To intercept **all** SQL + executions regardless of whether the Core or ORM APIs are in use, + see the event hooks at :class:`.ConnectionEvents`, such as + :meth:`.ConnectionEvents.before_execute` and + :meth:`.ConnectionEvents.before_cursor_execute`. + + Also, this event hook does **not** apply to queries that are + emitted internally within the ORM flush process, + i.e. the process described at :ref:`session_flushing`; to + intercept steps within the flush process, see the event + hooks described at :ref:`session_persistence_events` as + well as :ref:`session_persistence_mapper`. + + This event is a ``do_`` event, meaning it has the capability to replace + the operation that the :meth:`_orm.Session.execute` method normally + performs. The intended use for this includes sharding and + result-caching schemes which may seek to invoke the same statement + across multiple database connections, returning a result that is + merged from each of them, or which don't invoke the statement at all, + instead returning data from a cache. + + The hook intends to replace the use of the + ``Query._execute_and_instances`` method that could be subclassed prior + to SQLAlchemy 1.4. + + :param orm_execute_state: an instance of :class:`.ORMExecuteState` + which contains all information about the current execution, as well + as helper functions used to derive other commonly required + information. See that object for details. + + .. seealso:: + + :ref:`session_execute_events` - top level documentation on how + to use :meth:`_orm.SessionEvents.do_orm_execute` + + :class:`.ORMExecuteState` - the object passed to the + :meth:`_orm.SessionEvents.do_orm_execute` event which contains + all information about the statement to be invoked. It also + provides an interface to extend the current statement, options, + and parameters as well as an option that allows programmatic + invocation of the statement at any point. + + :ref:`examples_session_orm_events` - includes examples of using + :meth:`_orm.SessionEvents.do_orm_execute` + + :ref:`examples_caching` - an example of how to integrate + Dogpile caching with the ORM :class:`_orm.Session` making use + of the :meth:`_orm.SessionEvents.do_orm_execute` event hook. + + :ref:`examples_sharding` - the Horizontal Sharding example / + extension relies upon the + :meth:`_orm.SessionEvents.do_orm_execute` event hook to invoke a + SQL statement on multiple backends and return a merged result. + + + .. versionadded:: 1.4 + + """ + + def after_transaction_create( + self, session: Session, transaction: SessionTransaction + ) -> None: + """Execute when a new :class:`.SessionTransaction` is created. + + This event differs from :meth:`~.SessionEvents.after_begin` + in that it occurs for each :class:`.SessionTransaction` + overall, as opposed to when transactions are begun + on individual database connections. It is also invoked + for nested transactions and subtransactions, and is always + matched by a corresponding + :meth:`~.SessionEvents.after_transaction_end` event + (assuming normal operation of the :class:`.Session`). + + :param session: the target :class:`.Session`. + :param transaction: the target :class:`.SessionTransaction`. + + To detect if this is the outermost + :class:`.SessionTransaction`, as opposed to a "subtransaction" or a + SAVEPOINT, test that the :attr:`.SessionTransaction.parent` attribute + is ``None``:: + + @event.listens_for(session, "after_transaction_create") + def after_transaction_create(session, transaction): + if transaction.parent is None: + # work with top-level transaction + + To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the + :attr:`.SessionTransaction.nested` attribute:: + + @event.listens_for(session, "after_transaction_create") + def after_transaction_create(session, transaction): + if transaction.nested: + # work with SAVEPOINT transaction + + + .. seealso:: + + :class:`.SessionTransaction` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def after_transaction_end( + self, session: Session, transaction: SessionTransaction + ) -> None: + """Execute when the span of a :class:`.SessionTransaction` ends. + + This event differs from :meth:`~.SessionEvents.after_commit` + in that it corresponds to all :class:`.SessionTransaction` + objects in use, including those for nested transactions + and subtransactions, and is always matched by a corresponding + :meth:`~.SessionEvents.after_transaction_create` event. + + :param session: the target :class:`.Session`. + :param transaction: the target :class:`.SessionTransaction`. + + To detect if this is the outermost + :class:`.SessionTransaction`, as opposed to a "subtransaction" or a + SAVEPOINT, test that the :attr:`.SessionTransaction.parent` attribute + is ``None``:: + + @event.listens_for(session, "after_transaction_create") + def after_transaction_end(session, transaction): + if transaction.parent is None: + # work with top-level transaction + + To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the + :attr:`.SessionTransaction.nested` attribute:: + + @event.listens_for(session, "after_transaction_create") + def after_transaction_end(session, transaction): + if transaction.nested: + # work with SAVEPOINT transaction + + + .. seealso:: + + :class:`.SessionTransaction` + + :meth:`~.SessionEvents.after_transaction_create` + + """ + + def before_commit(self, session: Session) -> None: + """Execute before commit is called. + + .. note:: + + The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush, + that is, the :class:`.Session` can emit SQL to the database + many times within the scope of a transaction. + For interception of these events, use the + :meth:`~.SessionEvents.before_flush`, + :meth:`~.SessionEvents.after_flush`, or + :meth:`~.SessionEvents.after_flush_postexec` + events. + + :param session: The target :class:`.Session`. + + .. seealso:: + + :meth:`~.SessionEvents.after_commit` + + :meth:`~.SessionEvents.after_begin` + + :meth:`~.SessionEvents.after_transaction_create` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def after_commit(self, session: Session) -> None: + """Execute after a commit has occurred. + + .. note:: + + The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush, + that is, the :class:`.Session` can emit SQL to the database + many times within the scope of a transaction. + For interception of these events, use the + :meth:`~.SessionEvents.before_flush`, + :meth:`~.SessionEvents.after_flush`, or + :meth:`~.SessionEvents.after_flush_postexec` + events. + + .. note:: + + The :class:`.Session` is not in an active transaction + when the :meth:`~.SessionEvents.after_commit` event is invoked, + and therefore can not emit SQL. To emit SQL corresponding to + every transaction, use the :meth:`~.SessionEvents.before_commit` + event. + + :param session: The target :class:`.Session`. + + .. seealso:: + + :meth:`~.SessionEvents.before_commit` + + :meth:`~.SessionEvents.after_begin` + + :meth:`~.SessionEvents.after_transaction_create` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def after_rollback(self, session: Session) -> None: + """Execute after a real DBAPI rollback has occurred. + + Note that this event only fires when the *actual* rollback against + the database occurs - it does *not* fire each time the + :meth:`.Session.rollback` method is called, if the underlying + DBAPI transaction has already been rolled back. In many + cases, the :class:`.Session` will not be in + an "active" state during this event, as the current + transaction is not valid. To acquire a :class:`.Session` + which is active after the outermost rollback has proceeded, + use the :meth:`.SessionEvents.after_soft_rollback` event, checking the + :attr:`.Session.is_active` flag. + + :param session: The target :class:`.Session`. + + """ + + def after_soft_rollback( + self, session: Session, previous_transaction: SessionTransaction + ) -> None: + """Execute after any rollback has occurred, including "soft" + rollbacks that don't actually emit at the DBAPI level. + + This corresponds to both nested and outer rollbacks, i.e. + the innermost rollback that calls the DBAPI's + rollback() method, as well as the enclosing rollback + calls that only pop themselves from the transaction stack. + + The given :class:`.Session` can be used to invoke SQL and + :meth:`.Session.query` operations after an outermost rollback + by first checking the :attr:`.Session.is_active` flag:: + + @event.listens_for(Session, "after_soft_rollback") + def do_something(session, previous_transaction): + if session.is_active: + session.execute(text("select * from some_table")) + + :param session: The target :class:`.Session`. + :param previous_transaction: The :class:`.SessionTransaction` + transactional marker object which was just closed. The current + :class:`.SessionTransaction` for the given :class:`.Session` is + available via the :attr:`.Session.transaction` attribute. + + """ + + def before_flush( + self, + session: Session, + flush_context: UOWTransaction, + instances: Optional[Sequence[_O]], + ) -> None: + """Execute before flush process has started. + + :param session: The target :class:`.Session`. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param instances: Usually ``None``, this is the collection of + objects which can be passed to the :meth:`.Session.flush` method + (note this usage is deprecated). + + .. seealso:: + + :meth:`~.SessionEvents.after_flush` + + :meth:`~.SessionEvents.after_flush_postexec` + + :ref:`session_persistence_events` + + """ + + def after_flush( + self, session: Session, flush_context: UOWTransaction + ) -> None: + """Execute after flush has completed, but before commit has been + called. + + Note that the session's state is still in pre-flush, i.e. 'new', + 'dirty', and 'deleted' lists still show pre-flush state as well + as the history settings on instance attributes. + + .. warning:: This event runs after the :class:`.Session` has emitted + SQL to modify the database, but **before** it has altered its + internal state to reflect those changes, including that newly + inserted objects are placed into the identity map. ORM operations + emitted within this event such as loads of related items + may produce new identity map entries that will immediately + be replaced, sometimes causing confusing results. SQLAlchemy will + emit a warning for this condition as of version 1.3.9. + + :param session: The target :class:`.Session`. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + + .. seealso:: + + :meth:`~.SessionEvents.before_flush` + + :meth:`~.SessionEvents.after_flush_postexec` + + :ref:`session_persistence_events` + + """ + + def after_flush_postexec( + self, session: Session, flush_context: UOWTransaction + ) -> None: + """Execute after flush has completed, and after the post-exec + state occurs. + + This will be when the 'new', 'dirty', and 'deleted' lists are in + their final state. An actual commit() may or may not have + occurred, depending on whether or not the flush started its own + transaction or participated in a larger transaction. + + :param session: The target :class:`.Session`. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + + + .. seealso:: + + :meth:`~.SessionEvents.before_flush` + + :meth:`~.SessionEvents.after_flush` + + :ref:`session_persistence_events` + + """ + + def after_begin( + self, + session: Session, + transaction: SessionTransaction, + connection: Connection, + ) -> None: + """Execute after a transaction is begun on a connection. + + .. note:: This event is called within the process of the + :class:`_orm.Session` modifying its own internal state. + To invoke SQL operations within this hook, use the + :class:`_engine.Connection` provided to the event; + do not run SQL operations using the :class:`_orm.Session` + directly. + + :param session: The target :class:`.Session`. + :param transaction: The :class:`.SessionTransaction`. + :param connection: The :class:`_engine.Connection` object + which will be used for SQL statements. + + .. seealso:: + + :meth:`~.SessionEvents.before_commit` + + :meth:`~.SessionEvents.after_commit` + + :meth:`~.SessionEvents.after_transaction_create` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + @_lifecycle_event + def before_attach(self, session: Session, instance: _O) -> None: + """Execute before an instance is attached to a session. + + This is called before an add, delete or merge causes + the object to be part of the session. + + .. seealso:: + + :meth:`~.SessionEvents.after_attach` + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def after_attach(self, session: Session, instance: _O) -> None: + """Execute after an instance is attached to a session. + + This is called after an add, delete or merge. + + .. note:: + + As of 0.8, this event fires off *after* the item + has been fully associated with the session, which is + different than previous releases. For event + handlers that require the object not yet + be part of session state (such as handlers which + may autoflush while the target object is not + yet complete) consider the + new :meth:`.before_attach` event. + + .. seealso:: + + :meth:`~.SessionEvents.before_attach` + + :ref:`session_lifecycle_events` + + """ + + @event._legacy_signature( + "0.9", + ["session", "query", "query_context", "result"], + lambda update_context: ( + update_context.session, + update_context.query, + None, + update_context.result, + ), + ) + def after_bulk_update(self, update_context: _O) -> None: + """Event for after the legacy :meth:`_orm.Query.update` method + has been called. + + .. legacy:: The :meth:`_orm.SessionEvents.after_bulk_update` method + is a legacy event hook as of SQLAlchemy 2.0. The event + **does not participate** in :term:`2.0 style` invocations + using :func:`_dml.update` documented at + :ref:`orm_queryguide_update_delete_where`. For 2.0 style use, + the :meth:`_orm.SessionEvents.do_orm_execute` hook will intercept + these calls. + + :param update_context: an "update context" object which contains + details about the update, including these attributes: + + * ``session`` - the :class:`.Session` involved + * ``query`` -the :class:`_query.Query` + object that this update operation + was called upon. + * ``values`` The "values" dictionary that was passed to + :meth:`_query.Query.update`. + * ``result`` the :class:`_engine.CursorResult` + returned as a result of the + bulk UPDATE operation. + + .. versionchanged:: 1.4 the update_context no longer has a + ``QueryContext`` object associated with it. + + .. seealso:: + + :meth:`.QueryEvents.before_compile_update` + + :meth:`.SessionEvents.after_bulk_delete` + + """ + + @event._legacy_signature( + "0.9", + ["session", "query", "query_context", "result"], + lambda delete_context: ( + delete_context.session, + delete_context.query, + None, + delete_context.result, + ), + ) + def after_bulk_delete(self, delete_context: _O) -> None: + """Event for after the legacy :meth:`_orm.Query.delete` method + has been called. + + .. legacy:: The :meth:`_orm.SessionEvents.after_bulk_delete` method + is a legacy event hook as of SQLAlchemy 2.0. The event + **does not participate** in :term:`2.0 style` invocations + using :func:`_dml.delete` documented at + :ref:`orm_queryguide_update_delete_where`. For 2.0 style use, + the :meth:`_orm.SessionEvents.do_orm_execute` hook will intercept + these calls. + + :param delete_context: a "delete context" object which contains + details about the update, including these attributes: + + * ``session`` - the :class:`.Session` involved + * ``query`` -the :class:`_query.Query` + object that this update operation + was called upon. + * ``result`` the :class:`_engine.CursorResult` + returned as a result of the + bulk DELETE operation. + + .. versionchanged:: 1.4 the update_context no longer has a + ``QueryContext`` object associated with it. + + .. seealso:: + + :meth:`.QueryEvents.before_compile_delete` + + :meth:`.SessionEvents.after_bulk_update` + + """ + + @_lifecycle_event + def transient_to_pending(self, session: Session, instance: _O) -> None: + """Intercept the "transient to pending" transition for a specific + object. + + This event is a specialization of the + :meth:`.SessionEvents.after_attach` event which is only invoked + for this specific transition. It is invoked typically during the + :meth:`.Session.add` call. + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def pending_to_transient(self, session: Session, instance: _O) -> None: + """Intercept the "pending to transient" transition for a specific + object. + + This less common transition occurs when an pending object that has + not been flushed is evicted from the session; this can occur + when the :meth:`.Session.rollback` method rolls back the transaction, + or when the :meth:`.Session.expunge` method is used. + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def persistent_to_transient(self, session: Session, instance: _O) -> None: + """Intercept the "persistent to transient" transition for a specific + object. + + This less common transition occurs when an pending object that has + has been flushed is evicted from the session; this can occur + when the :meth:`.Session.rollback` method rolls back the transaction. + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def pending_to_persistent(self, session: Session, instance: _O) -> None: + """Intercept the "pending to persistent"" transition for a specific + object. + + This event is invoked within the flush process, and is + similar to scanning the :attr:`.Session.new` collection within + the :meth:`.SessionEvents.after_flush` event. However, in this + case the object has already been moved to the persistent state + when the event is called. + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def detached_to_persistent(self, session: Session, instance: _O) -> None: + """Intercept the "detached to persistent" transition for a specific + object. + + This event is a specialization of the + :meth:`.SessionEvents.after_attach` event which is only invoked + for this specific transition. It is invoked typically during the + :meth:`.Session.add` call, as well as during the + :meth:`.Session.delete` call if the object was not previously + associated with the + :class:`.Session` (note that an object marked as "deleted" remains + in the "persistent" state until the flush proceeds). + + .. note:: + + If the object becomes persistent as part of a call to + :meth:`.Session.delete`, the object is **not** yet marked as + deleted when this event is called. To detect deleted objects, + check the ``deleted`` flag sent to the + :meth:`.SessionEvents.persistent_to_detached` to event after the + flush proceeds, or check the :attr:`.Session.deleted` collection + within the :meth:`.SessionEvents.before_flush` event if deleted + objects need to be intercepted before the flush. + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def loaded_as_persistent(self, session: Session, instance: _O) -> None: + """Intercept the "loaded as persistent" transition for a specific + object. + + This event is invoked within the ORM loading process, and is invoked + very similarly to the :meth:`.InstanceEvents.load` event. However, + the event here is linkable to a :class:`.Session` class or instance, + rather than to a mapper or class hierarchy, and integrates + with the other session lifecycle events smoothly. The object + is guaranteed to be present in the session's identity map when + this event is called. + + .. note:: This event is invoked within the loader process before + eager loaders may have been completed, and the object's state may + not be complete. Additionally, invoking row-level refresh + operations on the object will place the object into a new loader + context, interfering with the existing load context. See the note + on :meth:`.InstanceEvents.load` for background on making use of the + :paramref:`.SessionEvents.restore_load_context` parameter, which + works in the same manner as that of + :paramref:`.InstanceEvents.restore_load_context`, in order to + resolve this scenario. + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def persistent_to_deleted(self, session: Session, instance: _O) -> None: + """Intercept the "persistent to deleted" transition for a specific + object. + + This event is invoked when a persistent object's identity + is deleted from the database within a flush, however the object + still remains associated with the :class:`.Session` until the + transaction completes. + + If the transaction is rolled back, the object moves again + to the persistent state, and the + :meth:`.SessionEvents.deleted_to_persistent` event is called. + If the transaction is committed, the object becomes detached, + which will emit the :meth:`.SessionEvents.deleted_to_detached` + event. + + Note that while the :meth:`.Session.delete` method is the primary + public interface to mark an object as deleted, many objects + get deleted due to cascade rules, which are not always determined + until flush time. Therefore, there's no way to catch + every object that will be deleted until the flush has proceeded. + the :meth:`.SessionEvents.persistent_to_deleted` event is therefore + invoked at the end of a flush. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def deleted_to_persistent(self, session: Session, instance: _O) -> None: + """Intercept the "deleted to persistent" transition for a specific + object. + + This transition occurs only when an object that's been deleted + successfully in a flush is restored due to a call to + :meth:`.Session.rollback`. The event is not called under + any other circumstances. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def deleted_to_detached(self, session: Session, instance: _O) -> None: + """Intercept the "deleted to detached" transition for a specific + object. + + This event is invoked when a deleted object is evicted + from the session. The typical case when this occurs is when + the transaction for a :class:`.Session` in which the object + was deleted is committed; the object moves from the deleted + state to the detached state. + + It is also invoked for objects that were deleted in a flush + when the :meth:`.Session.expunge_all` or :meth:`.Session.close` + events are called, as well as if the object is individually + expunged from its deleted state via :meth:`.Session.expunge`. + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + @_lifecycle_event + def persistent_to_detached(self, session: Session, instance: _O) -> None: + """Intercept the "persistent to detached" transition for a specific + object. + + This event is invoked when a persistent object is evicted + from the session. There are many conditions that cause this + to happen, including: + + * using a method such as :meth:`.Session.expunge` + or :meth:`.Session.close` + + * Calling the :meth:`.Session.rollback` method, when the object + was part of an INSERT statement for that session's transaction + + + :param session: target :class:`.Session` + + :param instance: the ORM-mapped instance being operated upon. + + :param deleted: boolean. If True, indicates this object moved + to the detached state because it was marked as deleted and flushed. + + + .. seealso:: + + :ref:`session_lifecycle_events` + + """ + + +class AttributeEvents(event.Events[QueryableAttribute[Any]]): + r"""Define events for object attributes. + + These are typically defined on the class-bound descriptor for the + target class. + + For example, to register a listener that will receive the + :meth:`_orm.AttributeEvents.append` event:: + + from sqlalchemy import event + + @event.listens_for(MyClass.collection, 'append', propagate=True) + def my_append_listener(target, value, initiator): + print("received append event for target: %s" % target) + + + Listeners have the option to return a possibly modified version of the + value, when the :paramref:`.AttributeEvents.retval` flag is passed to + :func:`.event.listen` or :func:`.event.listens_for`, such as below, + illustrated using the :meth:`_orm.AttributeEvents.set` event:: + + def validate_phone(target, value, oldvalue, initiator): + "Strip non-numeric characters from a phone number" + + return re.sub(r'\D', '', value) + + # setup listener on UserContact.phone attribute, instructing + # it to use the return value + listen(UserContact.phone, 'set', validate_phone, retval=True) + + A validation function like the above can also raise an exception + such as :exc:`ValueError` to halt the operation. + + The :paramref:`.AttributeEvents.propagate` flag is also important when + applying listeners to mapped classes that also have mapped subclasses, + as when using mapper inheritance patterns:: + + + @event.listens_for(MySuperClass.attr, 'set', propagate=True) + def receive_set(target, value, initiator): + print("value set: %s" % target) + + The full list of modifiers available to the :func:`.event.listen` + and :func:`.event.listens_for` functions are below. + + :param active_history=False: When True, indicates that the + "set" event would like to receive the "old" value being + replaced unconditionally, even if this requires firing off + database loads. Note that ``active_history`` can also be + set directly via :func:`.column_property` and + :func:`_orm.relationship`. + + :param propagate=False: When True, the listener function will + be established not just for the class attribute given, but + for attributes of the same name on all current subclasses + of that class, as well as all future subclasses of that + class, using an additional listener that listens for + instrumentation events. + :param raw=False: When True, the "target" argument to the + event will be the :class:`.InstanceState` management + object, rather than the mapped instance itself. + :param retval=False: when True, the user-defined event + listening must return the "value" argument from the + function. This gives the listening function the opportunity + to change the value that is ultimately used for a "set" + or "append" event. + + """ + + _target_class_doc = "SomeClass.some_attribute" + _dispatch_target = QueryableAttribute + + @staticmethod + def _set_dispatch( + cls: Type[_HasEventsDispatch[Any]], dispatch_cls: Type[_Dispatch[Any]] + ) -> _Dispatch[Any]: + dispatch = event.Events._set_dispatch(cls, dispatch_cls) + dispatch_cls._active_history = False + return dispatch + + @classmethod + def _accept_with( + cls, + target: Union[QueryableAttribute[Any], Type[QueryableAttribute[Any]]], + identifier: str, + ) -> Union[QueryableAttribute[Any], Type[QueryableAttribute[Any]]]: + # TODO: coverage + if isinstance(target, interfaces.MapperProperty): + return getattr(target.parent.class_, target.key) + else: + return target + + @classmethod + def _listen( # type: ignore [override] + cls, + event_key: _EventKey[QueryableAttribute[Any]], + active_history: bool = False, + raw: bool = False, + retval: bool = False, + propagate: bool = False, + include_key: bool = False, + ) -> None: + target, fn = event_key.dispatch_target, event_key._listen_fn + + if active_history: + target.dispatch._active_history = True + + if not raw or not retval or not include_key: + + def wrap(target: InstanceState[_O], *arg: Any, **kw: Any) -> Any: + if not raw: + target = target.obj() # type: ignore [assignment] + if not retval: + if arg: + value = arg[0] + else: + value = None + if include_key: + fn(target, *arg, **kw) + else: + fn(target, *arg) + return value + else: + if include_key: + return fn(target, *arg, **kw) + else: + return fn(target, *arg) + + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(propagate=propagate) + + if propagate: + manager = instrumentation.manager_of_class(target.class_) + + for mgr in manager.subclass_managers(True): # type: ignore [no-untyped-call] # noqa: E501 + event_key.with_dispatch_target(mgr[target.key]).base_listen( + propagate=True + ) + if active_history: + mgr[target.key].dispatch._active_history = True + + def append( + self, + target: _O, + value: _T, + initiator: Event, + *, + key: EventConstants = NO_KEY, + ) -> Optional[_T]: + """Receive a collection append event. + + The append event is invoked for each element as it is appended + to the collection. This occurs for single-item appends as well + as for a "bulk replace" operation. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value being appended. If this listener + is registered with ``retval=True``, the listener + function must return this value, or a new value which + replaces it. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation, as well as be inspected for information + about the source of the event. + :param key: When the event is established using the + :paramref:`.AttributeEvents.include_key` parameter set to + True, this will be the key used in the operation, such as + ``collection[some_key_or_index] = value``. + The parameter is not passed + to the event at all if the the + :paramref:`.AttributeEvents.include_key` + was not used to set up the event; this is to allow backwards + compatibility with existing event handlers that don't include the + ``key`` parameter. + + .. versionadded:: 2.0 + + :return: if the event was registered with ``retval=True``, + the given value, or a new effective value, should be returned. + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + :meth:`.AttributeEvents.bulk_replace` + + """ + + def append_wo_mutation( + self, + target: _O, + value: _T, + initiator: Event, + *, + key: EventConstants = NO_KEY, + ) -> None: + """Receive a collection append event where the collection was not + actually mutated. + + This event differs from :meth:`_orm.AttributeEvents.append` in that + it is fired off for de-duplicating collections such as sets and + dictionaries, when the object already exists in the target collection. + The event does not have a return value and the identity of the + given object cannot be changed. + + The event is used for cascading objects into a :class:`_orm.Session` + when the collection has already been mutated via a backref event. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value that would be appended if the object did not + already exist in the collection. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation, as well as be inspected for information + about the source of the event. + :param key: When the event is established using the + :paramref:`.AttributeEvents.include_key` parameter set to + True, this will be the key used in the operation, such as + ``collection[some_key_or_index] = value``. + The parameter is not passed + to the event at all if the the + :paramref:`.AttributeEvents.include_key` + was not used to set up the event; this is to allow backwards + compatibility with existing event handlers that don't include the + ``key`` parameter. + + .. versionadded:: 2.0 + + :return: No return value is defined for this event. + + .. versionadded:: 1.4.15 + + """ + + def bulk_replace( + self, + target: _O, + values: Iterable[_T], + initiator: Event, + *, + keys: Optional[Iterable[EventConstants]] = None, + ) -> None: + """Receive a collection 'bulk replace' event. + + This event is invoked for a sequence of values as they are incoming + to a bulk collection set operation, which can be + modified in place before the values are treated as ORM objects. + This is an "early hook" that runs before the bulk replace routine + attempts to reconcile which objects are already present in the + collection and which are being removed by the net replace operation. + + It is typical that this method be combined with use of the + :meth:`.AttributeEvents.append` event. When using both of these + events, note that a bulk replace operation will invoke + the :meth:`.AttributeEvents.append` event for all new items, + even after :meth:`.AttributeEvents.bulk_replace` has been invoked + for the collection as a whole. In order to determine if an + :meth:`.AttributeEvents.append` event is part of a bulk replace, + use the symbol :attr:`~.attributes.OP_BULK_REPLACE` to test the + incoming initiator:: + + from sqlalchemy.orm.attributes import OP_BULK_REPLACE + + @event.listens_for(SomeObject.collection, "bulk_replace") + def process_collection(target, values, initiator): + values[:] = [_make_value(value) for value in values] + + @event.listens_for(SomeObject.collection, "append", retval=True) + def process_collection(target, value, initiator): + # make sure bulk_replace didn't already do it + if initiator is None or initiator.op is not OP_BULK_REPLACE: + return _make_value(value) + else: + return value + + .. versionadded:: 1.2 + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: a sequence (e.g. a list) of the values being set. The + handler can modify this list in place. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. + :param keys: When the event is established using the + :paramref:`.AttributeEvents.include_key` parameter set to + True, this will be the sequence of keys used in the operation, + typically only for a dictionary update. The parameter is not passed + to the event at all if the the + :paramref:`.AttributeEvents.include_key` + was not used to set up the event; this is to allow backwards + compatibility with existing event handlers that don't include the + ``key`` parameter. + + .. versionadded:: 2.0 + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + + """ + + def remove( + self, + target: _O, + value: _T, + initiator: Event, + *, + key: EventConstants = NO_KEY, + ) -> None: + """Receive a collection remove event. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value being removed. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation. + + :param key: When the event is established using the + :paramref:`.AttributeEvents.include_key` parameter set to + True, this will be the key used in the operation, such as + ``del collection[some_key_or_index]``. The parameter is not passed + to the event at all if the the + :paramref:`.AttributeEvents.include_key` + was not used to set up the event; this is to allow backwards + compatibility with existing event handlers that don't include the + ``key`` parameter. + + .. versionadded:: 2.0 + + :return: No return value is defined for this event. + + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + """ + + def set( + self, target: _O, value: _T, oldvalue: _T, initiator: Event + ) -> None: + """Receive a scalar set event. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value being set. If this listener + is registered with ``retval=True``, the listener + function must return this value, or a new value which + replaces it. + :param oldvalue: the previous value being replaced. This + may also be the symbol ``NEVER_SET`` or ``NO_VALUE``. + If the listener is registered with ``active_history=True``, + the previous value of the attribute will be loaded from + the database if the existing value is currently unloaded + or expired. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation. + + :return: if the event was registered with ``retval=True``, + the given value, or a new effective value, should be returned. + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + """ + + def init_scalar( + self, target: _O, value: _T, dict_: Dict[Any, Any] + ) -> None: + r"""Receive a scalar "init" event. + + This event is invoked when an uninitialized, unpersisted scalar + attribute is accessed, e.g. read:: + + + x = my_object.some_attribute + + The ORM's default behavior when this occurs for an un-initialized + attribute is to return the value ``None``; note this differs from + Python's usual behavior of raising ``AttributeError``. The + event here can be used to customize what value is actually returned, + with the assumption that the event listener would be mirroring + a default generator that is configured on the Core + :class:`_schema.Column` + object as well. + + Since a default generator on a :class:`_schema.Column` + might also produce + a changing value such as a timestamp, the + :meth:`.AttributeEvents.init_scalar` + event handler can also be used to **set** the newly returned value, so + that a Core-level default generation function effectively fires off + only once, but at the moment the attribute is accessed on the + non-persisted object. Normally, no change to the object's state + is made when an uninitialized attribute is accessed (much older + SQLAlchemy versions did in fact change the object's state). + + If a default generator on a column returned a particular constant, + a handler might be used as follows:: + + SOME_CONSTANT = 3.1415926 + + class MyClass(Base): + # ... + + some_attribute = Column(Numeric, default=SOME_CONSTANT) + + @event.listens_for( + MyClass.some_attribute, "init_scalar", + retval=True, propagate=True) + def _init_some_attribute(target, dict_, value): + dict_['some_attribute'] = SOME_CONSTANT + return SOME_CONSTANT + + Above, we initialize the attribute ``MyClass.some_attribute`` to the + value of ``SOME_CONSTANT``. The above code includes the following + features: + + * By setting the value ``SOME_CONSTANT`` in the given ``dict_``, + we indicate that this value is to be persisted to the database. + This supersedes the use of ``SOME_CONSTANT`` in the default generator + for the :class:`_schema.Column`. The ``active_column_defaults.py`` + example given at :ref:`examples_instrumentation` illustrates using + the same approach for a changing default, e.g. a timestamp + generator. In this particular example, it is not strictly + necessary to do this since ``SOME_CONSTANT`` would be part of the + INSERT statement in either case. + + * By establishing the ``retval=True`` flag, the value we return + from the function will be returned by the attribute getter. + Without this flag, the event is assumed to be a passive observer + and the return value of our function is ignored. + + * The ``propagate=True`` flag is significant if the mapped class + includes inheriting subclasses, which would also make use of this + event listener. Without this flag, an inheriting subclass will + not use our event handler. + + In the above example, the attribute set event + :meth:`.AttributeEvents.set` as well as the related validation feature + provided by :obj:`_orm.validates` is **not** invoked when we apply our + value to the given ``dict_``. To have these events to invoke in + response to our newly generated value, apply the value to the given + object as a normal attribute set operation:: + + SOME_CONSTANT = 3.1415926 + + @event.listens_for( + MyClass.some_attribute, "init_scalar", + retval=True, propagate=True) + def _init_some_attribute(target, dict_, value): + # will also fire off attribute set events + target.some_attribute = SOME_CONSTANT + return SOME_CONSTANT + + When multiple listeners are set up, the generation of the value + is "chained" from one listener to the next by passing the value + returned by the previous listener that specifies ``retval=True`` + as the ``value`` argument of the next listener. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value that is to be returned before this event + listener were invoked. This value begins as the value ``None``, + however will be the return value of the previous event handler + function if multiple listeners are present. + :param dict\_: the attribute dictionary of this mapped object. + This is normally the ``__dict__`` of the object, but in all cases + represents the destination that the attribute system uses to get + at the actual value of this attribute. Placing the value in this + dictionary has the effect that the value will be used in the + INSERT statement generated by the unit of work. + + + .. seealso:: + + :meth:`.AttributeEvents.init_collection` - collection version + of this event + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + :ref:`examples_instrumentation` - see the + ``active_column_defaults.py`` example. + + """ + + def init_collection( + self, + target: _O, + collection: Type[Collection[Any]], + collection_adapter: CollectionAdapter, + ) -> None: + """Receive a 'collection init' event. + + This event is triggered for a collection-based attribute, when + the initial "empty collection" is first generated for a blank + attribute, as well as for when the collection is replaced with + a new one, such as via a set event. + + E.g., given that ``User.addresses`` is a relationship-based + collection, the event is triggered here:: + + u1 = User() + u1.addresses.append(a1) # <- new collection + + and also during replace operations:: + + u1.addresses = [a2, a3] # <- new collection + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param collection: the new collection. This will always be generated + from what was specified as + :paramref:`_orm.relationship.collection_class`, and will always + be empty. + :param collection_adapter: the :class:`.CollectionAdapter` that will + mediate internal access to the collection. + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + :meth:`.AttributeEvents.init_scalar` - "scalar" version of this + event. + + """ + + def dispose_collection( + self, + target: _O, + collection: Collection[Any], + collection_adapter: CollectionAdapter, + ) -> None: + """Receive a 'collection dispose' event. + + This event is triggered for a collection-based attribute when + a collection is replaced, that is:: + + u1.addresses.append(a1) + + u1.addresses = [a2, a3] # <- old collection is disposed + + The old collection received will contain its previous contents. + + .. versionchanged:: 1.2 The collection passed to + :meth:`.AttributeEvents.dispose_collection` will now have its + contents before the dispose intact; previously, the collection + would be empty. + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + """ + + def modified(self, target: _O, initiator: Event) -> None: + """Receive a 'modified' event. + + This event is triggered when the :func:`.attributes.flag_modified` + function is used to trigger a modify event on an attribute without + any specific value being set. + + .. versionadded:: 1.2 + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. + + .. seealso:: + + :class:`.AttributeEvents` - background on listener options such + as propagation to subclasses. + + """ + + +class QueryEvents(event.Events[Query[Any]]): + """Represent events within the construction of a :class:`_query.Query` + object. + + .. legacy:: The :class:`_orm.QueryEvents` event methods are legacy + as of SQLAlchemy 2.0, and only apply to direct use of the + :class:`_orm.Query` object. They are not used for :term:`2.0 style` + statements. For events to intercept and modify 2.0 style ORM use, + use the :meth:`_orm.SessionEvents.do_orm_execute` hook. + + + The :class:`_orm.QueryEvents` hooks are now superseded by the + :meth:`_orm.SessionEvents.do_orm_execute` event hook. + + """ + + _target_class_doc = "SomeQuery" + _dispatch_target = Query + + def before_compile(self, query: Query[Any]) -> None: + """Receive the :class:`_query.Query` + object before it is composed into a + core :class:`_expression.Select` object. + + .. deprecated:: 1.4 The :meth:`_orm.QueryEvents.before_compile` event + is superseded by the much more capable + :meth:`_orm.SessionEvents.do_orm_execute` hook. In version 1.4, + the :meth:`_orm.QueryEvents.before_compile` event is **no longer + used** for ORM-level attribute loads, such as loads of deferred + or expired attributes as well as relationship loaders. See the + new examples in :ref:`examples_session_orm_events` which + illustrate new ways of intercepting and modifying ORM queries + for the most common purpose of adding arbitrary filter criteria. + + + This event is intended to allow changes to the query given:: + + @event.listens_for(Query, "before_compile", retval=True) + def no_deleted(query): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + return query + + The event should normally be listened with the ``retval=True`` + parameter set, so that the modified query may be returned. + + The :meth:`.QueryEvents.before_compile` event by default + will disallow "baked" queries from caching a query, if the event + hook returns a new :class:`_query.Query` object. + This affects both direct + use of the baked query extension as well as its operation within + lazy loaders and eager loaders for relationships. In order to + re-establish the query being cached, apply the event adding the + ``bake_ok`` flag:: + + @event.listens_for( + Query, "before_compile", retval=True, bake_ok=True) + def my_event(query): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + return query + + When ``bake_ok`` is set to True, the event hook will only be invoked + once, and not called for subsequent invocations of a particular query + that is being cached. + + .. versionadded:: 1.3.11 - added the "bake_ok" flag to the + :meth:`.QueryEvents.before_compile` event and disallowed caching via + the "baked" extension from occurring for event handlers that + return a new :class:`_query.Query` object if this flag is not set. + + .. seealso:: + + :meth:`.QueryEvents.before_compile_update` + + :meth:`.QueryEvents.before_compile_delete` + + :ref:`baked_with_before_compile` + + """ + + def before_compile_update( + self, query: Query[Any], update_context: BulkUpdate + ) -> None: + """Allow modifications to the :class:`_query.Query` object within + :meth:`_query.Query.update`. + + .. deprecated:: 1.4 The :meth:`_orm.QueryEvents.before_compile_update` + event is superseded by the much more capable + :meth:`_orm.SessionEvents.do_orm_execute` hook. + + Like the :meth:`.QueryEvents.before_compile` event, if the event + is to be used to alter the :class:`_query.Query` object, it should + be configured with ``retval=True``, and the modified + :class:`_query.Query` object returned, as in :: + + @event.listens_for(Query, "before_compile_update", retval=True) + def no_deleted(query, update_context): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + + update_context.values['timestamp'] = ( + datetime.datetime.now(datetime.UTC) + ) + return query + + The ``.values`` dictionary of the "update context" object can also + be modified in place as illustrated above. + + :param query: a :class:`_query.Query` instance; this is also + the ``.query`` attribute of the given "update context" + object. + + :param update_context: an "update context" object which is + the same kind of object as described in + :paramref:`.QueryEvents.after_bulk_update.update_context`. + The object has a ``.values`` attribute in an UPDATE context which is + the dictionary of parameters passed to :meth:`_query.Query.update`. + This + dictionary can be modified to alter the VALUES clause of the + resulting UPDATE statement. + + .. versionadded:: 1.2.17 + + .. seealso:: + + :meth:`.QueryEvents.before_compile` + + :meth:`.QueryEvents.before_compile_delete` + + + """ + + def before_compile_delete( + self, query: Query[Any], delete_context: BulkDelete + ) -> None: + """Allow modifications to the :class:`_query.Query` object within + :meth:`_query.Query.delete`. + + .. deprecated:: 1.4 The :meth:`_orm.QueryEvents.before_compile_delete` + event is superseded by the much more capable + :meth:`_orm.SessionEvents.do_orm_execute` hook. + + Like the :meth:`.QueryEvents.before_compile` event, this event + should be configured with ``retval=True``, and the modified + :class:`_query.Query` object returned, as in :: + + @event.listens_for(Query, "before_compile_delete", retval=True) + def no_deleted(query, delete_context): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + return query + + :param query: a :class:`_query.Query` instance; this is also + the ``.query`` attribute of the given "delete context" + object. + + :param delete_context: a "delete context" object which is + the same kind of object as described in + :paramref:`.QueryEvents.after_bulk_delete.delete_context`. + + .. versionadded:: 1.2.17 + + .. seealso:: + + :meth:`.QueryEvents.before_compile` + + :meth:`.QueryEvents.before_compile_update` + + + """ + + @classmethod + def _listen( + cls, + event_key: _EventKey[_ET], + retval: bool = False, + bake_ok: bool = False, + **kw: Any, + ) -> None: + fn = event_key._listen_fn + + if not retval: + + def wrap(*arg: Any, **kw: Any) -> Any: + if not retval: + query = arg[0] + fn(*arg, **kw) + return query + else: + return fn(*arg, **kw) + + event_key = event_key.with_wrapper(wrap) + else: + # don't assume we can apply an attribute to the callable + def wrap(*arg: Any, **kw: Any) -> Any: + return fn(*arg, **kw) + + event_key = event_key.with_wrapper(wrap) + + wrap._bake_ok = bake_ok # type: ignore [attr-defined] + + event_key.base_listen(**kw) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py new file mode 100644 index 00000000..39dd5401 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py @@ -0,0 +1,228 @@ +# orm/exc.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""SQLAlchemy ORM exceptions.""" + +from __future__ import annotations + +from typing import Any +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar + +from .util import _mapper_property_as_plain_name +from .. import exc as sa_exc +from .. import util +from ..exc import MultipleResultsFound # noqa +from ..exc import NoResultFound # noqa + +if TYPE_CHECKING: + from .interfaces import LoaderStrategy + from .interfaces import MapperProperty + from .state import InstanceState + +_T = TypeVar("_T", bound=Any) + +NO_STATE = (AttributeError, KeyError) +"""Exception types that may be raised by instrumentation implementations.""" + + +class StaleDataError(sa_exc.SQLAlchemyError): + """An operation encountered database state that is unaccounted for. + + Conditions which cause this to happen include: + + * A flush may have attempted to update or delete rows + and an unexpected number of rows were matched during + the UPDATE or DELETE statement. Note that when + version_id_col is used, rows in UPDATE or DELETE statements + are also matched against the current known version + identifier. + + * A mapped object with version_id_col was refreshed, + and the version number coming back from the database does + not match that of the object itself. + + * A object is detached from its parent object, however + the object was previously attached to a different parent + identity which was garbage collected, and a decision + cannot be made if the new parent was really the most + recent "parent". + + """ + + +ConcurrentModificationError = StaleDataError + + +class FlushError(sa_exc.SQLAlchemyError): + """A invalid condition was detected during flush().""" + + +class UnmappedError(sa_exc.InvalidRequestError): + """Base for exceptions that involve expected mappings not present.""" + + +class ObjectDereferencedError(sa_exc.SQLAlchemyError): + """An operation cannot complete due to an object being garbage + collected. + + """ + + +class DetachedInstanceError(sa_exc.SQLAlchemyError): + """An attempt to access unloaded attributes on a + mapped instance that is detached.""" + + code = "bhk3" + + +class UnmappedInstanceError(UnmappedError): + """An mapping operation was requested for an unknown instance.""" + + @util.preload_module("sqlalchemy.orm.base") + def __init__(self, obj: object, msg: Optional[str] = None): + base = util.preloaded.orm_base + + if not msg: + try: + base.class_mapper(type(obj)) + name = _safe_cls_name(type(obj)) + msg = ( + "Class %r is mapped, but this instance lacks " + "instrumentation. This occurs when the instance " + "is created before sqlalchemy.orm.mapper(%s) " + "was called." % (name, name) + ) + except UnmappedClassError: + msg = f"Class '{_safe_cls_name(type(obj))}' is not mapped" + if isinstance(obj, type): + msg += ( + "; was a class (%s) supplied where an instance was " + "required?" % _safe_cls_name(obj) + ) + UnmappedError.__init__(self, msg) + + def __reduce__(self) -> Any: + return self.__class__, (None, self.args[0]) + + +class UnmappedClassError(UnmappedError): + """An mapping operation was requested for an unknown class.""" + + def __init__(self, cls: Type[_T], msg: Optional[str] = None): + if not msg: + msg = _default_unmapped(cls) + UnmappedError.__init__(self, msg) + + def __reduce__(self) -> Any: + return self.__class__, (None, self.args[0]) + + +class ObjectDeletedError(sa_exc.InvalidRequestError): + """A refresh operation failed to retrieve the database + row corresponding to an object's known primary key identity. + + A refresh operation proceeds when an expired attribute is + accessed on an object, or when :meth:`_query.Query.get` is + used to retrieve an object which is, upon retrieval, detected + as expired. A SELECT is emitted for the target row + based on primary key; if no row is returned, this + exception is raised. + + The true meaning of this exception is simply that + no row exists for the primary key identifier associated + with a persistent object. The row may have been + deleted, or in some cases the primary key updated + to a new value, outside of the ORM's management of the target + object. + + """ + + @util.preload_module("sqlalchemy.orm.base") + def __init__(self, state: InstanceState[Any], msg: Optional[str] = None): + base = util.preloaded.orm_base + + if not msg: + msg = ( + "Instance '%s' has been deleted, or its " + "row is otherwise not present." % base.state_str(state) + ) + + sa_exc.InvalidRequestError.__init__(self, msg) + + def __reduce__(self) -> Any: + return self.__class__, (None, self.args[0]) + + +class UnmappedColumnError(sa_exc.InvalidRequestError): + """Mapping operation was requested on an unknown column.""" + + +class LoaderStrategyException(sa_exc.InvalidRequestError): + """A loader strategy for an attribute does not exist.""" + + def __init__( + self, + applied_to_property_type: Type[Any], + requesting_property: MapperProperty[Any], + applies_to: Optional[Type[MapperProperty[Any]]], + actual_strategy_type: Optional[Type[LoaderStrategy]], + strategy_key: Tuple[Any, ...], + ): + if actual_strategy_type is None: + sa_exc.InvalidRequestError.__init__( + self, + "Can't find strategy %s for %s" + % (strategy_key, requesting_property), + ) + else: + assert applies_to is not None + sa_exc.InvalidRequestError.__init__( + self, + 'Can\'t apply "%s" strategy to property "%s", ' + 'which is a "%s"; this loader strategy is intended ' + 'to be used with a "%s".' + % ( + util.clsname_as_plain_name(actual_strategy_type), + requesting_property, + _mapper_property_as_plain_name(applied_to_property_type), + _mapper_property_as_plain_name(applies_to), + ), + ) + + +def _safe_cls_name(cls: Type[Any]) -> str: + cls_name: Optional[str] + try: + cls_name = ".".join((cls.__module__, cls.__name__)) + except AttributeError: + cls_name = getattr(cls, "__name__", None) + if cls_name is None: + cls_name = repr(cls) + return cls_name + + +@util.preload_module("sqlalchemy.orm.base") +def _default_unmapped(cls: Type[Any]) -> Optional[str]: + base = util.preloaded.orm_base + + try: + mappers = base.manager_of_class(cls).mappers # type: ignore + except ( + UnmappedClassError, + TypeError, + ) + NO_STATE: + mappers = {} + name = _safe_cls_name(cls) + + if not mappers: + return f"Class '{name}' is not mapped" + else: + return None diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py new file mode 100644 index 00000000..23682f7e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py @@ -0,0 +1,302 @@ +# orm/identity.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NoReturn +from typing import Optional +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +import weakref + +from . import util as orm_util +from .. import exc as sa_exc + +if TYPE_CHECKING: + from ._typing import _IdentityKeyType + from .state import InstanceState + + +_T = TypeVar("_T", bound=Any) + +_O = TypeVar("_O", bound=object) + + +class IdentityMap: + _wr: weakref.ref[IdentityMap] + + _dict: Dict[_IdentityKeyType[Any], Any] + _modified: Set[InstanceState[Any]] + + def __init__(self) -> None: + self._dict = {} + self._modified = set() + self._wr = weakref.ref(self) + + def _kill(self) -> None: + self._add_unpresent = _killed # type: ignore + + def all_states(self) -> List[InstanceState[Any]]: + raise NotImplementedError() + + def contains_state(self, state: InstanceState[Any]) -> bool: + raise NotImplementedError() + + def __contains__(self, key: _IdentityKeyType[Any]) -> bool: + raise NotImplementedError() + + def safe_discard(self, state: InstanceState[Any]) -> None: + raise NotImplementedError() + + def __getitem__(self, key: _IdentityKeyType[_O]) -> _O: + raise NotImplementedError() + + def get( + self, key: _IdentityKeyType[_O], default: Optional[_O] = None + ) -> Optional[_O]: + raise NotImplementedError() + + def fast_get_state( + self, key: _IdentityKeyType[_O] + ) -> Optional[InstanceState[_O]]: + raise NotImplementedError() + + def keys(self) -> Iterable[_IdentityKeyType[Any]]: + return self._dict.keys() + + def values(self) -> Iterable[object]: + raise NotImplementedError() + + def replace(self, state: InstanceState[_O]) -> Optional[InstanceState[_O]]: + raise NotImplementedError() + + def add(self, state: InstanceState[Any]) -> bool: + raise NotImplementedError() + + def _fast_discard(self, state: InstanceState[Any]) -> None: + raise NotImplementedError() + + def _add_unpresent( + self, state: InstanceState[Any], key: _IdentityKeyType[Any] + ) -> None: + """optional inlined form of add() which can assume item isn't present + in the map""" + self.add(state) + + def _manage_incoming_state(self, state: InstanceState[Any]) -> None: + state._instance_dict = self._wr + + if state.modified: + self._modified.add(state) + + def _manage_removed_state(self, state: InstanceState[Any]) -> None: + del state._instance_dict + if state.modified: + self._modified.discard(state) + + def _dirty_states(self) -> Set[InstanceState[Any]]: + return self._modified + + def check_modified(self) -> bool: + """return True if any InstanceStates present have been marked + as 'modified'. + + """ + return bool(self._modified) + + def has_key(self, key: _IdentityKeyType[Any]) -> bool: + return key in self + + def __len__(self) -> int: + return len(self._dict) + + +class WeakInstanceDict(IdentityMap): + _dict: Dict[_IdentityKeyType[Any], InstanceState[Any]] + + def __getitem__(self, key: _IdentityKeyType[_O]) -> _O: + state = cast("InstanceState[_O]", self._dict[key]) + o = state.obj() + if o is None: + raise KeyError(key) + return o + + def __contains__(self, key: _IdentityKeyType[Any]) -> bool: + try: + if key in self._dict: + state = self._dict[key] + o = state.obj() + else: + return False + except KeyError: + return False + else: + return o is not None + + def contains_state(self, state: InstanceState[Any]) -> bool: + if state.key in self._dict: + if TYPE_CHECKING: + assert state.key is not None + try: + return self._dict[state.key] is state + except KeyError: + return False + else: + return False + + def replace( + self, state: InstanceState[Any] + ) -> Optional[InstanceState[Any]]: + assert state.key is not None + if state.key in self._dict: + try: + existing = existing_non_none = self._dict[state.key] + except KeyError: + # catch gc removed the key after we just checked for it + existing = None + else: + if existing_non_none is not state: + self._manage_removed_state(existing_non_none) + else: + return None + else: + existing = None + + self._dict[state.key] = state + self._manage_incoming_state(state) + return existing + + def add(self, state: InstanceState[Any]) -> bool: + key = state.key + assert key is not None + # inline of self.__contains__ + if key in self._dict: + try: + existing_state = self._dict[key] + except KeyError: + # catch gc removed the key after we just checked for it + pass + else: + if existing_state is not state: + o = existing_state.obj() + if o is not None: + raise sa_exc.InvalidRequestError( + "Can't attach instance " + "%s; another instance with key %s is already " + "present in this session." + % (orm_util.state_str(state), state.key) + ) + else: + return False + self._dict[key] = state + self._manage_incoming_state(state) + return True + + def _add_unpresent( + self, state: InstanceState[Any], key: _IdentityKeyType[Any] + ) -> None: + # inlined form of add() called by loading.py + self._dict[key] = state + state._instance_dict = self._wr + + def fast_get_state( + self, key: _IdentityKeyType[_O] + ) -> Optional[InstanceState[_O]]: + return self._dict.get(key) + + def get( + self, key: _IdentityKeyType[_O], default: Optional[_O] = None + ) -> Optional[_O]: + if key not in self._dict: + return default + try: + state = cast("InstanceState[_O]", self._dict[key]) + except KeyError: + # catch gc removed the key after we just checked for it + return default + else: + o = state.obj() + if o is None: + return default + return o + + def items(self) -> List[Tuple[_IdentityKeyType[Any], InstanceState[Any]]]: + values = self.all_states() + result = [] + for state in values: + value = state.obj() + key = state.key + assert key is not None + if value is not None: + result.append((key, value)) + return result + + def values(self) -> List[object]: + values = self.all_states() + result = [] + for state in values: + value = state.obj() + if value is not None: + result.append(value) + + return result + + def __iter__(self) -> Iterator[_IdentityKeyType[Any]]: + return iter(self.keys()) + + def all_states(self) -> List[InstanceState[Any]]: + return list(self._dict.values()) + + def _fast_discard(self, state: InstanceState[Any]) -> None: + # used by InstanceState for state being + # GC'ed, inlines _managed_removed_state + key = state.key + assert key is not None + try: + st = self._dict[key] + except KeyError: + # catch gc removed the key after we just checked for it + pass + else: + if st is state: + self._dict.pop(key, None) + + def discard(self, state: InstanceState[Any]) -> None: + self.safe_discard(state) + + def safe_discard(self, state: InstanceState[Any]) -> None: + key = state.key + if key in self._dict: + assert key is not None + try: + st = self._dict[key] + except KeyError: + # catch gc removed the key after we just checked for it + pass + else: + if st is state: + self._dict.pop(key, None) + self._manage_removed_state(state) + + +def _killed(state: InstanceState[Any], key: _IdentityKeyType[Any]) -> NoReturn: + # external function to avoid creating cycles when assigned to + # the IdentityMap + raise sa_exc.InvalidRequestError( + "Object %s cannot be converted to 'persistent' state, as this " + "identity map is no longer valid. Has the owning Session " + "been closed?" % orm_util.state_str(state), + code="lkrp", + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py new file mode 100644 index 00000000..e9fe8433 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py @@ -0,0 +1,754 @@ +# orm/instrumentation.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Defines SQLAlchemy's system of class instrumentation. + +This module is usually not directly visible to user applications, but +defines a large part of the ORM's interactivity. + +instrumentation.py deals with registration of end-user classes +for state tracking. It interacts closely with state.py +and attributes.py which establish per-instance and per-class-attribute +instrumentation, respectively. + +The class instrumentation system can be customized on a per-class +or global basis using the :mod:`sqlalchemy.ext.instrumentation` +module, which provides the means to build and specify +alternate instrumentation forms. + +.. versionchanged: 0.8 + The instrumentation extension system was moved out of the + ORM and into the external :mod:`sqlalchemy.ext.instrumentation` + package. When that package is imported, it installs + itself within sqlalchemy.orm so that its more comprehensive + resolution mechanics take effect. + +""" + + +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import base +from . import collections +from . import exc +from . import interfaces +from . import state +from ._typing import _O +from .attributes import _is_collection_attribute_impl +from .. import util +from ..event import EventTarget +from ..util import HasMemoized +from ..util.typing import Literal +from ..util.typing import Protocol + +if TYPE_CHECKING: + from ._typing import _RegistryType + from .attributes import AttributeImpl + from .attributes import QueryableAttribute + from .collections import _AdaptedCollectionProtocol + from .collections import _CollectionFactoryType + from .decl_base import _MapperConfig + from .events import InstanceEvents + from .mapper import Mapper + from .state import InstanceState + from ..event import dispatcher + +_T = TypeVar("_T", bound=Any) +DEL_ATTR = util.symbol("DEL_ATTR") + + +class _ExpiredAttributeLoaderProto(Protocol): + def __call__( + self, + state: state.InstanceState[Any], + toload: Set[str], + passive: base.PassiveFlag, + ) -> None: ... + + +class _ManagerFactory(Protocol): + def __call__(self, class_: Type[_O]) -> ClassManager[_O]: ... + + +class ClassManager( + HasMemoized, + Dict[str, "QueryableAttribute[Any]"], + Generic[_O], + EventTarget, +): + """Tracks state information at the class level.""" + + dispatch: dispatcher[ClassManager[_O]] + + MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR + STATE_ATTR = base.DEFAULT_STATE_ATTR + + _state_setter = staticmethod(util.attrsetter(STATE_ATTR)) + + expired_attribute_loader: _ExpiredAttributeLoaderProto + "previously known as deferred_scalar_loader" + + init_method: Optional[Callable[..., None]] + original_init: Optional[Callable[..., None]] = None + + factory: Optional[_ManagerFactory] + + declarative_scan: Optional[weakref.ref[_MapperConfig]] = None + + registry: _RegistryType + + if not TYPE_CHECKING: + # starts as None during setup + registry = None + + class_: Type[_O] + + _bases: List[ClassManager[Any]] + + @property + @util.deprecated( + "1.4", + message="The ClassManager.deferred_scalar_loader attribute is now " + "named expired_attribute_loader", + ) + def deferred_scalar_loader(self): + return self.expired_attribute_loader + + @deferred_scalar_loader.setter + @util.deprecated( + "1.4", + message="The ClassManager.deferred_scalar_loader attribute is now " + "named expired_attribute_loader", + ) + def deferred_scalar_loader(self, obj): + self.expired_attribute_loader = obj + + def __init__(self, class_): + self.class_ = class_ + self.info = {} + self.new_init = None + self.local_attrs = {} + self.originals = {} + self._finalized = False + self.factory = None + self.init_method = None + + self._bases = [ + mgr + for mgr in cast( + "List[Optional[ClassManager[Any]]]", + [ + opt_manager_of_class(base) + for base in self.class_.__bases__ + if isinstance(base, type) + ], + ) + if mgr is not None + ] + + for base_ in self._bases: + self.update(base_) + + cast( + "InstanceEvents", self.dispatch._events + )._new_classmanager_instance(class_, self) + + for basecls in class_.__mro__: + mgr = opt_manager_of_class(basecls) + if mgr is not None: + self.dispatch._update(mgr.dispatch) + + self.manage() + + if "__del__" in class_.__dict__: + util.warn( + "__del__() method on class %s will " + "cause unreachable cycles and memory leaks, " + "as SQLAlchemy instrumentation often creates " + "reference cycles. Please remove this method." % class_ + ) + + def _update_state( + self, + finalize: bool = False, + mapper: Optional[Mapper[_O]] = None, + registry: Optional[_RegistryType] = None, + declarative_scan: Optional[_MapperConfig] = None, + expired_attribute_loader: Optional[ + _ExpiredAttributeLoaderProto + ] = None, + init_method: Optional[Callable[..., None]] = None, + ) -> None: + if mapper: + self.mapper = mapper # + if registry: + registry._add_manager(self) + if declarative_scan: + self.declarative_scan = weakref.ref(declarative_scan) + if expired_attribute_loader: + self.expired_attribute_loader = expired_attribute_loader + + if init_method: + assert not self._finalized, ( + "class is already instrumented, " + "init_method %s can't be applied" % init_method + ) + self.init_method = init_method + + if not self._finalized: + self.original_init = ( + self.init_method + if self.init_method is not None + and self.class_.__init__ is object.__init__ + else self.class_.__init__ + ) + + if finalize and not self._finalized: + self._finalize() + + def _finalize(self) -> None: + if self._finalized: + return + self._finalized = True + + self._instrument_init() + + _instrumentation_factory.dispatch.class_instrument(self.class_) + + def __hash__(self) -> int: # type: ignore[override] + return id(self) + + def __eq__(self, other: Any) -> bool: + return other is self + + @property + def is_mapped(self) -> bool: + return "mapper" in self.__dict__ + + @HasMemoized.memoized_attribute + def _all_key_set(self): + return frozenset(self) + + @HasMemoized.memoized_attribute + def _collection_impl_keys(self): + return frozenset( + [attr.key for attr in self.values() if attr.impl.collection] + ) + + @HasMemoized.memoized_attribute + def _scalar_loader_impls(self): + return frozenset( + [ + attr.impl + for attr in self.values() + if attr.impl.accepts_scalar_loader + ] + ) + + @HasMemoized.memoized_attribute + def _loader_impls(self): + return frozenset([attr.impl for attr in self.values()]) + + @util.memoized_property + def mapper(self) -> Mapper[_O]: + # raises unless self.mapper has been assigned + raise exc.UnmappedClassError(self.class_) + + def _all_sqla_attributes(self, exclude=None): + """return an iterator of all classbound attributes that are + implement :class:`.InspectionAttr`. + + This includes :class:`.QueryableAttribute` as well as extension + types such as :class:`.hybrid_property` and + :class:`.AssociationProxy`. + + """ + + found: Dict[str, Any] = {} + + # constraints: + # 1. yield keys in cls.__dict__ order + # 2. if a subclass has the same key as a superclass, include that + # key as part of the ordering of the superclass, because an + # overridden key is usually installed by the mapper which is going + # on a different ordering + # 3. don't use getattr() as this fires off descriptors + + for supercls in self.class_.__mro__[0:-1]: + inherits = supercls.__mro__[1] + for key in supercls.__dict__: + found.setdefault(key, supercls) + if key in inherits.__dict__: + continue + val = found[key].__dict__[key] + if ( + isinstance(val, interfaces.InspectionAttr) + and val.is_attribute + ): + yield key, val + + def _get_class_attr_mro(self, key, default=None): + """return an attribute on the class without tripping it.""" + + for supercls in self.class_.__mro__: + if key in supercls.__dict__: + return supercls.__dict__[key] + else: + return default + + def _attr_has_impl(self, key: str) -> bool: + """Return True if the given attribute is fully initialized. + + i.e. has an impl. + """ + + return key in self and self[key].impl is not None + + def _subclass_manager(self, cls: Type[_T]) -> ClassManager[_T]: + """Create a new ClassManager for a subclass of this ClassManager's + class. + + This is called automatically when attributes are instrumented so that + the attributes can be propagated to subclasses against their own + class-local manager, without the need for mappers etc. to have already + pre-configured managers for the full class hierarchy. Mappers + can post-configure the auto-generated ClassManager when needed. + + """ + return register_class(cls, finalize=False) + + def _instrument_init(self): + self.new_init = _generate_init(self.class_, self, self.original_init) + self.install_member("__init__", self.new_init) + + @util.memoized_property + def _state_constructor(self) -> Type[state.InstanceState[_O]]: + self.dispatch.first_init(self, self.class_) + return state.InstanceState + + def manage(self): + """Mark this instance as the manager for its class.""" + + setattr(self.class_, self.MANAGER_ATTR, self) + + @util.hybridmethod + def manager_getter(self): + return _default_manager_getter + + @util.hybridmethod + def state_getter(self): + """Return a (instance) -> InstanceState callable. + + "state getter" callables should raise either KeyError or + AttributeError if no InstanceState could be found for the + instance. + """ + + return _default_state_getter + + @util.hybridmethod + def dict_getter(self): + return _default_dict_getter + + def instrument_attribute( + self, + key: str, + inst: QueryableAttribute[Any], + propagated: bool = False, + ) -> None: + if propagated: + if key in self.local_attrs: + return # don't override local attr with inherited attr + else: + self.local_attrs[key] = inst + self.install_descriptor(key, inst) + self._reset_memoizations() + self[key] = inst + + for cls in self.class_.__subclasses__(): + manager = self._subclass_manager(cls) + manager.instrument_attribute(key, inst, True) + + def subclass_managers(self, recursive): + for cls in self.class_.__subclasses__(): + mgr = opt_manager_of_class(cls) + if mgr is not None and mgr is not self: + yield mgr + if recursive: + yield from mgr.subclass_managers(True) + + def post_configure_attribute(self, key): + _instrumentation_factory.dispatch.attribute_instrument( + self.class_, key, self[key] + ) + + def uninstrument_attribute(self, key, propagated=False): + if key not in self: + return + if propagated: + if key in self.local_attrs: + return # don't get rid of local attr + else: + del self.local_attrs[key] + self.uninstall_descriptor(key) + self._reset_memoizations() + del self[key] + for cls in self.class_.__subclasses__(): + manager = opt_manager_of_class(cls) + if manager: + manager.uninstrument_attribute(key, True) + + def unregister(self) -> None: + """remove all instrumentation established by this ClassManager.""" + + for key in list(self.originals): + self.uninstall_member(key) + + self.mapper = None + self.dispatch = None # type: ignore + self.new_init = None + self.info.clear() + + for key in list(self): + if key in self.local_attrs: + self.uninstrument_attribute(key) + + if self.MANAGER_ATTR in self.class_.__dict__: + delattr(self.class_, self.MANAGER_ATTR) + + def install_descriptor( + self, key: str, inst: QueryableAttribute[Any] + ) -> None: + if key in (self.STATE_ATTR, self.MANAGER_ATTR): + raise KeyError( + "%r: requested attribute name conflicts with " + "instrumentation attribute of the same name." % key + ) + setattr(self.class_, key, inst) + + def uninstall_descriptor(self, key: str) -> None: + delattr(self.class_, key) + + def install_member(self, key: str, implementation: Any) -> None: + if key in (self.STATE_ATTR, self.MANAGER_ATTR): + raise KeyError( + "%r: requested attribute name conflicts with " + "instrumentation attribute of the same name." % key + ) + self.originals.setdefault(key, self.class_.__dict__.get(key, DEL_ATTR)) + setattr(self.class_, key, implementation) + + def uninstall_member(self, key: str) -> None: + original = self.originals.pop(key, None) + if original is not DEL_ATTR: + setattr(self.class_, key, original) + else: + delattr(self.class_, key) + + def instrument_collection_class( + self, key: str, collection_class: Type[Collection[Any]] + ) -> _CollectionFactoryType: + return collections.prepare_instrumentation(collection_class) + + def initialize_collection( + self, + key: str, + state: InstanceState[_O], + factory: _CollectionFactoryType, + ) -> Tuple[collections.CollectionAdapter, _AdaptedCollectionProtocol]: + user_data = factory() + impl = self.get_impl(key) + assert _is_collection_attribute_impl(impl) + adapter = collections.CollectionAdapter(impl, state, user_data) + return adapter, user_data + + def is_instrumented(self, key: str, search: bool = False) -> bool: + if search: + return key in self + else: + return key in self.local_attrs + + def get_impl(self, key: str) -> AttributeImpl: + return self[key].impl + + @property + def attributes(self) -> Iterable[Any]: + return iter(self.values()) + + # InstanceState management + + def new_instance(self, state: Optional[InstanceState[_O]] = None) -> _O: + # here, we would prefer _O to be bound to "object" + # so that mypy sees that __new__ is present. currently + # it's bound to Any as there were other problems not having + # it that way but these can be revisited + instance = self.class_.__new__(self.class_) + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) + return instance + + def setup_instance( + self, instance: _O, state: Optional[InstanceState[_O]] = None + ) -> None: + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) + + def teardown_instance(self, instance: _O) -> None: + delattr(instance, self.STATE_ATTR) + + def _serialize( + self, state: InstanceState[_O], state_dict: Dict[str, Any] + ) -> _SerializeManager: + return _SerializeManager(state, state_dict) + + def _new_state_if_none( + self, instance: _O + ) -> Union[Literal[False], InstanceState[_O]]: + """Install a default InstanceState if none is present. + + A private convenience method used by the __init__ decorator. + + """ + if hasattr(instance, self.STATE_ATTR): + return False + elif self.class_ is not instance.__class__ and self.is_mapped: + # this will create a new ClassManager for the + # subclass, without a mapper. This is likely a + # user error situation but allow the object + # to be constructed, so that it is usable + # in a non-ORM context at least. + return self._subclass_manager( + instance.__class__ + )._new_state_if_none(instance) + else: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) + return state + + def has_state(self, instance: _O) -> bool: + return hasattr(instance, self.STATE_ATTR) + + def has_parent( + self, state: InstanceState[_O], key: str, optimistic: bool = False + ) -> bool: + """TODO""" + return self.get_impl(key).hasparent(state, optimistic=optimistic) + + def __bool__(self) -> bool: + """All ClassManagers are non-zero regardless of attribute state.""" + return True + + def __repr__(self) -> str: + return "<%s of %r at %x>" % ( + self.__class__.__name__, + self.class_, + id(self), + ) + + +class _SerializeManager: + """Provide serialization of a :class:`.ClassManager`. + + The :class:`.InstanceState` uses ``__init__()`` on serialize + and ``__call__()`` on deserialize. + + """ + + def __init__(self, state: state.InstanceState[Any], d: Dict[str, Any]): + self.class_ = state.class_ + manager = state.manager + manager.dispatch.pickle(state, d) + + def __call__(self, state, inst, state_dict): + state.manager = manager = opt_manager_of_class(self.class_) + if manager is None: + raise exc.UnmappedInstanceError( + inst, + "Cannot deserialize object of type %r - " + "no mapper() has " + "been configured for this class within the current " + "Python process!" % self.class_, + ) + elif manager.is_mapped and not manager.mapper.configured: + manager.mapper._check_configure() + + # setup _sa_instance_state ahead of time so that + # unpickle events can access the object normally. + # see [ticket:2362] + if inst is not None: + manager.setup_instance(inst, state) + manager.dispatch.unpickle(state, state_dict) + + +class InstrumentationFactory(EventTarget): + """Factory for new ClassManager instances.""" + + dispatch: dispatcher[InstrumentationFactory] + + def create_manager_for_cls(self, class_: Type[_O]) -> ClassManager[_O]: + assert class_ is not None + assert opt_manager_of_class(class_) is None + + # give a more complicated subclass + # a chance to do what it wants here + manager, factory = self._locate_extended_factory(class_) + + if factory is None: + factory = ClassManager + manager = ClassManager(class_) + else: + assert manager is not None + + self._check_conflicts(class_, factory) + + manager.factory = factory + + return manager + + def _locate_extended_factory( + self, class_: Type[_O] + ) -> Tuple[Optional[ClassManager[_O]], Optional[_ManagerFactory]]: + """Overridden by a subclass to do an extended lookup.""" + return None, None + + def _check_conflicts( + self, class_: Type[_O], factory: Callable[[Type[_O]], ClassManager[_O]] + ) -> None: + """Overridden by a subclass to test for conflicting factories.""" + + def unregister(self, class_: Type[_O]) -> None: + manager = manager_of_class(class_) + manager.unregister() + self.dispatch.class_uninstrument(class_) + + +# this attribute is replaced by sqlalchemy.ext.instrumentation +# when imported. +_instrumentation_factory = InstrumentationFactory() + +# these attributes are replaced by sqlalchemy.ext.instrumentation +# when a non-standard InstrumentationManager class is first +# used to instrument a class. +instance_state = _default_state_getter = base.instance_state + +instance_dict = _default_dict_getter = base.instance_dict + +manager_of_class = _default_manager_getter = base.manager_of_class +opt_manager_of_class = _default_opt_manager_getter = base.opt_manager_of_class + + +def register_class( + class_: Type[_O], + finalize: bool = True, + mapper: Optional[Mapper[_O]] = None, + registry: Optional[_RegistryType] = None, + declarative_scan: Optional[_MapperConfig] = None, + expired_attribute_loader: Optional[_ExpiredAttributeLoaderProto] = None, + init_method: Optional[Callable[..., None]] = None, +) -> ClassManager[_O]: + """Register class instrumentation. + + Returns the existing or newly created class manager. + + """ + + manager = opt_manager_of_class(class_) + if manager is None: + manager = _instrumentation_factory.create_manager_for_cls(class_) + manager._update_state( + mapper=mapper, + registry=registry, + declarative_scan=declarative_scan, + expired_attribute_loader=expired_attribute_loader, + init_method=init_method, + finalize=finalize, + ) + + return manager + + +def unregister_class(class_): + """Unregister class instrumentation.""" + + _instrumentation_factory.unregister(class_) + + +def is_instrumented(instance, key): + """Return True if the given attribute on the given instance is + instrumented by the attributes package. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + + """ + return manager_of_class(instance.__class__).is_instrumented( + key, search=True + ) + + +def _generate_init(class_, class_manager, original_init): + """Build an __init__ decorator that triggers ClassManager events.""" + + # TODO: we should use the ClassManager's notion of the + # original '__init__' method, once ClassManager is fixed + # to always reference that. + + if original_init is None: + original_init = class_.__init__ + + # Go through some effort here and don't change the user's __init__ + # calling signature, including the unlikely case that it has + # a return value. + # FIXME: need to juggle local names to avoid constructor argument + # clashes. + func_body = """\ +def __init__(%(apply_pos)s): + new_state = class_manager._new_state_if_none(%(self_arg)s) + if new_state: + return new_state._initialize_instance(%(apply_kw)s) + else: + return original_init(%(apply_kw)s) +""" + func_vars = util.format_argspec_init(original_init, grouped=False) + func_text = func_body % func_vars + + func_defaults = getattr(original_init, "__defaults__", None) + func_kw_defaults = getattr(original_init, "__kwdefaults__", None) + + env = locals().copy() + env["__name__"] = __name__ + exec(func_text, env) + __init__ = env["__init__"] + __init__.__doc__ = original_init.__doc__ + __init__._sa_original_init = original_init + + if func_defaults: + __init__.__defaults__ = func_defaults + if func_kw_defaults: + __init__.__kwdefaults__ = func_kw_defaults + + return __init__ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py new file mode 100644 index 00000000..36336e7a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py @@ -0,0 +1,1469 @@ +# orm/interfaces.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +""" + +Contains various base classes used throughout the ORM. + +Defines some key base classes prominent within the internals. + +This module and the classes within are mostly private, though some attributes +are exposed when inspecting mappings. + +""" + +from __future__ import annotations + +import collections +import dataclasses +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import Generic +from typing import Iterator +from typing import List +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import exc as orm_exc +from . import path_registry +from .base import _MappedAttribute as _MappedAttribute +from .base import EXT_CONTINUE as EXT_CONTINUE # noqa: F401 +from .base import EXT_SKIP as EXT_SKIP # noqa: F401 +from .base import EXT_STOP as EXT_STOP # noqa: F401 +from .base import InspectionAttr as InspectionAttr # noqa: F401 +from .base import InspectionAttrInfo as InspectionAttrInfo +from .base import MANYTOMANY as MANYTOMANY # noqa: F401 +from .base import MANYTOONE as MANYTOONE # noqa: F401 +from .base import NO_KEY as NO_KEY # noqa: F401 +from .base import NO_VALUE as NO_VALUE # noqa: F401 +from .base import NotExtension as NotExtension # noqa: F401 +from .base import ONETOMANY as ONETOMANY # noqa: F401 +from .base import RelationshipDirection as RelationshipDirection # noqa: F401 +from .base import SQLORMOperations +from .. import ColumnElement +from .. import exc as sa_exc +from .. import inspection +from .. import util +from ..sql import operators +from ..sql import roles +from ..sql import visitors +from ..sql.base import _NoArg +from ..sql.base import ExecutableOption +from ..sql.cache_key import HasCacheKey +from ..sql.operators import ColumnOperators +from ..sql.schema import Column +from ..sql.type_api import TypeEngine +from ..util import warn_deprecated +from ..util.typing import RODescriptorReference +from ..util.typing import TypedDict + +if typing.TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _IdentityKeyType + from ._typing import _InstanceDict + from ._typing import _InternalEntityType + from ._typing import _ORMAdapterProto + from .attributes import InstrumentedAttribute + from .base import Mapped + from .context import _MapperEntity + from .context import ORMCompileState + from .context import QueryContext + from .decl_api import RegistryType + from .decl_base import _ClassScanMapperConfig + from .loading import _PopulatorDict + from .mapper import Mapper + from .path_registry import AbstractEntityRegistry + from .query import Query + from .session import Session + from .state import InstanceState + from .strategy_options import _LoadElement + from .util import AliasedInsp + from .util import ORMAdapter + from ..engine.result import Result + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _ColumnsClauseArgument + from ..sql._typing import _DMLColumnArgument + from ..sql._typing import _InfoType + from ..sql.operators import OperatorType + from ..sql.visitors import _TraverseInternalsType + from ..util.typing import _AnnotationScanType + +_StrategyKey = Tuple[Any, ...] + +_T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + +_TLS = TypeVar("_TLS", bound="Type[LoaderStrategy]") + + +class ORMStatementRole(roles.StatementRole): + __slots__ = () + _role_name = ( + "Executable SQL or text() construct, including ORM aware objects" + ) + + +class ORMColumnsClauseRole( + roles.ColumnsClauseRole, roles.TypedColumnsClauseRole[_T] +): + __slots__ = () + _role_name = "ORM mapped entity, aliased entity, or Column expression" + + +class ORMEntityColumnsClauseRole(ORMColumnsClauseRole[_T]): + __slots__ = () + _role_name = "ORM mapped or aliased entity" + + +class ORMFromClauseRole(roles.StrictFromClauseRole): + __slots__ = () + _role_name = "ORM mapped entity, aliased entity, or FROM expression" + + +class ORMColumnDescription(TypedDict): + name: str + # TODO: add python_type and sql_type here; combining them + # into "type" is a bad idea + type: Union[Type[Any], TypeEngine[Any]] + aliased: bool + expr: _ColumnsClauseArgument[Any] + entity: Optional[_ColumnsClauseArgument[Any]] + + +class _IntrospectsAnnotations: + __slots__ = () + + @classmethod + def _mapper_property_name(cls) -> str: + return cls.__name__ + + def found_in_pep593_annotated(self) -> Any: + """return a copy of this object to use in declarative when the + object is found inside of an Annotated object.""" + + raise NotImplementedError( + f"Use of the {self._mapper_property_name()!r} " + "construct inside of an Annotated object is not yet supported." + ) + + def declarative_scan( + self, + decl_scan: _ClassScanMapperConfig, + registry: RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + mapped_container: Optional[Type[Mapped[Any]]], + annotation: Optional[_AnnotationScanType], + extracted_mapped_annotation: Optional[_AnnotationScanType], + is_dataclass_field: bool, + ) -> None: + """Perform class-specific initializaton at early declarative scanning + time. + + .. versionadded:: 2.0 + + """ + + def _raise_for_required(self, key: str, cls: Type[Any]) -> NoReturn: + raise sa_exc.ArgumentError( + f"Python typing annotation is required for attribute " + f'"{cls.__name__}.{key}" when primary argument(s) for ' + f'"{self._mapper_property_name()}" ' + "construct are None or not present" + ) + + +class _AttributeOptions(NamedTuple): + """define Python-local attribute behavior options common to all + :class:`.MapperProperty` objects. + + Currently this includes dataclass-generation arguments. + + .. versionadded:: 2.0 + + """ + + dataclasses_init: Union[_NoArg, bool] + dataclasses_repr: Union[_NoArg, bool] + dataclasses_default: Union[_NoArg, Any] + dataclasses_default_factory: Union[_NoArg, Callable[[], Any]] + dataclasses_compare: Union[_NoArg, bool] + dataclasses_kw_only: Union[_NoArg, bool] + + def _as_dataclass_field(self, key: str) -> Any: + """Return a ``dataclasses.Field`` object given these arguments.""" + + kw: Dict[str, Any] = {} + if self.dataclasses_default_factory is not _NoArg.NO_ARG: + kw["default_factory"] = self.dataclasses_default_factory + if self.dataclasses_default is not _NoArg.NO_ARG: + kw["default"] = self.dataclasses_default + if self.dataclasses_init is not _NoArg.NO_ARG: + kw["init"] = self.dataclasses_init + if self.dataclasses_repr is not _NoArg.NO_ARG: + kw["repr"] = self.dataclasses_repr + if self.dataclasses_compare is not _NoArg.NO_ARG: + kw["compare"] = self.dataclasses_compare + if self.dataclasses_kw_only is not _NoArg.NO_ARG: + kw["kw_only"] = self.dataclasses_kw_only + + if "default" in kw and callable(kw["default"]): + # callable defaults are ambiguous. deprecate them in favour of + # insert_default or default_factory. #9936 + warn_deprecated( + f"Callable object passed to the ``default`` parameter for " + f"attribute {key!r} in a ORM-mapped Dataclasses context is " + "ambiguous, " + "and this use will raise an error in a future release. " + "If this callable is intended to produce Core level INSERT " + "default values for an underlying ``Column``, use " + "the ``mapped_column.insert_default`` parameter instead. " + "To establish this callable as providing a default value " + "for instances of the dataclass itself, use the " + "``default_factory`` dataclasses parameter.", + "2.0", + ) + + if ( + "init" in kw + and not kw["init"] + and "default" in kw + and not callable(kw["default"]) # ignore callable defaults. #9936 + and "default_factory" not in kw # illegal but let dc.field raise + ): + # fix for #9879 + default = kw.pop("default") + kw["default_factory"] = lambda: default + + return dataclasses.field(**kw) + + @classmethod + def _get_arguments_for_make_dataclass( + cls, + key: str, + annotation: _AnnotationScanType, + mapped_container: Optional[Any], + elem: _T, + ) -> Union[ + Tuple[str, _AnnotationScanType], + Tuple[str, _AnnotationScanType, dataclasses.Field[Any]], + ]: + """given attribute key, annotation, and value from a class, return + the argument tuple we would pass to dataclasses.make_dataclass() + for this attribute. + + """ + if isinstance(elem, _DCAttributeOptions): + dc_field = elem._attribute_options._as_dataclass_field(key) + + return (key, annotation, dc_field) + elif elem is not _NoArg.NO_ARG: + # why is typing not erroring on this? + return (key, annotation, elem) + elif mapped_container is not None: + # it's Mapped[], but there's no "element", which means declarative + # did not actually do anything for this field. this shouldn't + # happen. + # previously, this would occur because _scan_attributes would + # skip a field that's on an already mapped superclass, but it + # would still include it in the annotations, leading + # to issue #8718 + + assert False, "Mapped[] received without a mapping declaration" + + else: + # plain dataclass field, not mapped. Is only possible + # if __allow_unmapped__ is set up. I can see this mode causing + # problems... + return (key, annotation) + + +_DEFAULT_ATTRIBUTE_OPTIONS = _AttributeOptions( + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, +) + +_DEFAULT_READONLY_ATTRIBUTE_OPTIONS = _AttributeOptions( + False, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + _NoArg.NO_ARG, +) + + +class _DCAttributeOptions: + """mixin for descriptors or configurational objects that include dataclass + field options. + + This includes :class:`.MapperProperty`, :class:`._MapsColumn` within + the ORM, but also includes :class:`.AssociationProxy` within ext. + Can in theory be used for other descriptors that serve a similar role + as association proxy. (*maybe* hybrids, not sure yet.) + + """ + + __slots__ = () + + _attribute_options: _AttributeOptions + """behavioral options for ORM-enabled Python attributes + + .. versionadded:: 2.0 + + """ + + _has_dataclass_arguments: bool + + +class _MapsColumns(_DCAttributeOptions, _MappedAttribute[_T]): + """interface for declarative-capable construct that delivers one or more + Column objects to the declarative process to be part of a Table. + """ + + __slots__ = () + + @property + def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]: + """return a MapperProperty to be assigned to the declarative mapping""" + raise NotImplementedError() + + @property + def columns_to_assign(self) -> List[Tuple[Column[_T], int]]: + """A list of Column objects that should be declaratively added to the + new Table object. + + """ + raise NotImplementedError() + + +# NOTE: MapperProperty needs to extend _MappedAttribute so that declarative +# typing works, i.e. "Mapped[A] = relationship()". This introduces an +# inconvenience which is that all the MapperProperty objects are treated +# as descriptors by typing tools, which are misled by this as assignment / +# access to a descriptor attribute wants to move through __get__. +# Therefore, references to MapperProperty as an instance variable, such +# as in PropComparator, may have some special typing workarounds such as the +# use of sqlalchemy.util.typing.DescriptorReference to avoid mis-interpretation +# by typing tools +@inspection._self_inspects +class MapperProperty( + HasCacheKey, + _DCAttributeOptions, + _MappedAttribute[_T], + InspectionAttrInfo, + util.MemoizedSlots, +): + """Represent a particular class attribute mapped by :class:`_orm.Mapper`. + + The most common occurrences of :class:`.MapperProperty` are the + mapped :class:`_schema.Column`, which is represented in a mapping as + an instance of :class:`.ColumnProperty`, + and a reference to another class produced by :func:`_orm.relationship`, + represented in the mapping as an instance of + :class:`.Relationship`. + + """ + + __slots__ = ( + "_configure_started", + "_configure_finished", + "_attribute_options", + "_has_dataclass_arguments", + "parent", + "key", + "info", + "doc", + ) + + _cache_key_traversal: _TraverseInternalsType = [ + ("parent", visitors.ExtendedInternalTraversal.dp_has_cache_key), + ("key", visitors.ExtendedInternalTraversal.dp_string), + ] + + if not TYPE_CHECKING: + cascade = None + + is_property = True + """Part of the InspectionAttr interface; states this object is a + mapper property. + + """ + + comparator: PropComparator[_T] + """The :class:`_orm.PropComparator` instance that implements SQL + expression construction on behalf of this mapped attribute.""" + + key: str + """name of class attribute""" + + parent: Mapper[Any] + """the :class:`.Mapper` managing this property.""" + + _is_relationship = False + + _links_to_entity: bool + """True if this MapperProperty refers to a mapped entity. + + Should only be True for Relationship, False for all others. + + """ + + doc: Optional[str] + """optional documentation string""" + + info: _InfoType + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`_orm.relationship`, or :func:`.composite` + functions. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + + def _memoized_attr_info(self) -> _InfoType: + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`_orm.relationship`, or + :func:`.composite` + functions. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} + + def setup( + self, + context: ORMCompileState, + query_entity: _MapperEntity, + path: AbstractEntityRegistry, + adapter: Optional[ORMAdapter], + **kwargs: Any, + ) -> None: + """Called by Query for the purposes of constructing a SQL statement. + + Each MapperProperty associated with the target mapper processes the + statement referenced by the query context, adding columns and/or + criterion as appropriate. + + """ + + def create_row_processor( + self, + context: ORMCompileState, + query_entity: _MapperEntity, + path: AbstractEntityRegistry, + mapper: Mapper[Any], + result: Result[Any], + adapter: Optional[ORMAdapter], + populators: _PopulatorDict, + ) -> None: + """Produce row processing functions and append to the given + set of populators lists. + + """ + + def cascade_iterator( + self, + type_: str, + state: InstanceState[Any], + dict_: _InstanceDict, + visited_states: Set[InstanceState[Any]], + halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None, + ) -> Iterator[ + Tuple[object, Mapper[Any], InstanceState[Any], _InstanceDict] + ]: + """Iterate through instances related to the given instance for + a particular 'cascade', starting with this MapperProperty. + + Return an iterator3-tuples (instance, mapper, state). + + Note that the 'cascade' collection on this MapperProperty is + checked first for the given type before cascade_iterator is called. + + This method typically only applies to Relationship. + + """ + + return iter(()) + + def set_parent(self, parent: Mapper[Any], init: bool) -> None: + """Set the parent mapper that references this MapperProperty. + + This method is overridden by some subclasses to perform extra + setup when the mapper is first known. + + """ + self.parent = parent + + def instrument_class(self, mapper: Mapper[Any]) -> None: + """Hook called by the Mapper to the property to initiate + instrumentation of the class attribute managed by this + MapperProperty. + + The MapperProperty here will typically call out to the + attributes module to set up an InstrumentedAttribute. + + This step is the first of two steps to set up an InstrumentedAttribute, + and is called early in the mapper setup process. + + The second step is typically the init_class_attribute step, + called from StrategizedProperty via the post_instrument_class() + hook. This step assigns additional state to the InstrumentedAttribute + (specifically the "impl") which has been determined after the + MapperProperty has determined what kind of persistence + management it needs to do (e.g. scalar, object, collection, etc). + + """ + + def __init__( + self, + attribute_options: Optional[_AttributeOptions] = None, + _assume_readonly_dc_attributes: bool = False, + ) -> None: + self._configure_started = False + self._configure_finished = False + + if _assume_readonly_dc_attributes: + default_attrs = _DEFAULT_READONLY_ATTRIBUTE_OPTIONS + else: + default_attrs = _DEFAULT_ATTRIBUTE_OPTIONS + + if attribute_options and attribute_options != default_attrs: + self._has_dataclass_arguments = True + self._attribute_options = attribute_options + else: + self._has_dataclass_arguments = False + self._attribute_options = default_attrs + + def init(self) -> None: + """Called after all mappers are created to assemble + relationships between mappers and perform other post-mapper-creation + initialization steps. + + + """ + self._configure_started = True + self.do_init() + self._configure_finished = True + + @property + def class_attribute(self) -> InstrumentedAttribute[_T]: + """Return the class-bound descriptor corresponding to this + :class:`.MapperProperty`. + + This is basically a ``getattr()`` call:: + + return getattr(self.parent.class_, self.key) + + I.e. if this :class:`.MapperProperty` were named ``addresses``, + and the class to which it is mapped is ``User``, this sequence + is possible:: + + >>> from sqlalchemy import inspect + >>> mapper = inspect(User) + >>> addresses_property = mapper.attrs.addresses + >>> addresses_property.class_attribute is User.addresses + True + >>> User.addresses.property is addresses_property + True + + + """ + + return getattr(self.parent.class_, self.key) # type: ignore + + def do_init(self) -> None: + """Perform subclass-specific initialization post-mapper-creation + steps. + + This is a template method called by the ``MapperProperty`` + object's init() method. + + """ + + def post_instrument_class(self, mapper: Mapper[Any]) -> None: + """Perform instrumentation adjustments that need to occur + after init() has completed. + + The given Mapper is the Mapper invoking the operation, which + may not be the same Mapper as self.parent in an inheritance + scenario; however, Mapper will always at least be a sub-mapper of + self.parent. + + This method is typically used by StrategizedProperty, which delegates + it to LoaderStrategy.init_class_attribute() to perform final setup + on the class-bound InstrumentedAttribute. + + """ + + def merge( + self, + session: Session, + source_state: InstanceState[Any], + source_dict: _InstanceDict, + dest_state: InstanceState[Any], + dest_dict: _InstanceDict, + load: bool, + _recursive: Dict[Any, object], + _resolve_conflict_map: Dict[_IdentityKeyType[Any], object], + ) -> None: + """Merge the attribute represented by this ``MapperProperty`` + from source to destination object. + + """ + + def __repr__(self) -> str: + return "<%s at 0x%x; %s>" % ( + self.__class__.__name__, + id(self), + getattr(self, "key", "no key"), + ) + + +@inspection._self_inspects +class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators): + r"""Defines SQL operations for ORM mapped attributes. + + SQLAlchemy allows for operators to + be redefined at both the Core and ORM level. :class:`.PropComparator` + is the base class of operator redefinition for ORM-level operations, + including those of :class:`.ColumnProperty`, + :class:`.Relationship`, and :class:`.Composite`. + + User-defined subclasses of :class:`.PropComparator` may be created. The + built-in Python comparison and math operator methods, such as + :meth:`.operators.ColumnOperators.__eq__`, + :meth:`.operators.ColumnOperators.__lt__`, and + :meth:`.operators.ColumnOperators.__add__`, can be overridden to provide + new operator behavior. The custom :class:`.PropComparator` is passed to + the :class:`.MapperProperty` instance via the ``comparator_factory`` + argument. In each case, + the appropriate subclass of :class:`.PropComparator` should be used:: + + # definition of custom PropComparator subclasses + + from sqlalchemy.orm.properties import \ + ColumnProperty,\ + Composite,\ + Relationship + + class MyColumnComparator(ColumnProperty.Comparator): + def __eq__(self, other): + return self.__clause_element__() == other + + class MyRelationshipComparator(Relationship.Comparator): + def any(self, expression): + "define the 'any' operation" + # ... + + class MyCompositeComparator(Composite.Comparator): + def __gt__(self, other): + "redefine the 'greater than' operation" + + return sql.and_(*[a>b for a, b in + zip(self.__clause_element__().clauses, + other.__composite_values__())]) + + + # application of custom PropComparator subclasses + + from sqlalchemy.orm import column_property, relationship, composite + from sqlalchemy import Column, String + + class SomeMappedClass(Base): + some_column = column_property(Column("some_column", String), + comparator_factory=MyColumnComparator) + + some_relationship = relationship(SomeOtherClass, + comparator_factory=MyRelationshipComparator) + + some_composite = composite( + Column("a", String), Column("b", String), + comparator_factory=MyCompositeComparator + ) + + Note that for column-level operator redefinition, it's usually + simpler to define the operators at the Core level, using the + :attr:`.TypeEngine.comparator_factory` attribute. See + :ref:`types_operators` for more detail. + + .. seealso:: + + :class:`.ColumnProperty.Comparator` + + :class:`.Relationship.Comparator` + + :class:`.Composite.Comparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + __slots__ = "prop", "_parententity", "_adapt_to_entity" + + __visit_name__ = "orm_prop_comparator" + + _parententity: _InternalEntityType[Any] + _adapt_to_entity: Optional[AliasedInsp[Any]] + prop: RODescriptorReference[MapperProperty[_T_co]] + + def __init__( + self, + prop: MapperProperty[_T], + parentmapper: _InternalEntityType[Any], + adapt_to_entity: Optional[AliasedInsp[Any]] = None, + ): + self.prop = prop + self._parententity = adapt_to_entity or parentmapper + self._adapt_to_entity = adapt_to_entity + + @util.non_memoized_property + def property(self) -> MapperProperty[_T_co]: + """Return the :class:`.MapperProperty` associated with this + :class:`.PropComparator`. + + + Return values here will commonly be instances of + :class:`.ColumnProperty` or :class:`.Relationship`. + + + """ + return self.prop + + def __clause_element__(self) -> roles.ColumnsClauseRole: + raise NotImplementedError("%r" % self) + + def _bulk_update_tuples( + self, value: Any + ) -> Sequence[Tuple[_DMLColumnArgument, Any]]: + """Receive a SQL expression that represents a value in the SET + clause of an UPDATE statement. + + Return a tuple that can be passed to a :class:`_expression.Update` + construct. + + """ + + return [(cast("_DMLColumnArgument", self.__clause_element__()), value)] + + def adapt_to_entity( + self, adapt_to_entity: AliasedInsp[Any] + ) -> PropComparator[_T_co]: + """Return a copy of this PropComparator which will use the given + :class:`.AliasedInsp` to produce corresponding expressions. + """ + return self.__class__(self.prop, self._parententity, adapt_to_entity) + + @util.ro_non_memoized_property + def _parentmapper(self) -> Mapper[Any]: + """legacy; this is renamed to _parententity to be + compatible with QueryableAttribute.""" + return self._parententity.mapper + + def _criterion_exists( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[Any]: + return self.prop.comparator._criterion_exists(criterion, **kwargs) + + @util.ro_non_memoized_property + def adapter(self) -> Optional[_ORMAdapterProto]: + """Produce a callable that adapts column expressions + to suit an aliased version of this comparator. + + """ + if self._adapt_to_entity is None: + return None + else: + return self._adapt_to_entity._orm_adapt_element + + @util.ro_non_memoized_property + def info(self) -> _InfoType: + return self.prop.info + + @staticmethod + def _any_op(a: Any, b: Any, **kwargs: Any) -> Any: + return a.any(b, **kwargs) + + @staticmethod + def _has_op(left: Any, other: Any, **kwargs: Any) -> Any: + return left.has(other, **kwargs) + + @staticmethod + def _of_type_op(a: Any, class_: Any) -> Any: + return a.of_type(class_) + + any_op = cast(operators.OperatorType, _any_op) + has_op = cast(operators.OperatorType, _has_op) + of_type_op = cast(operators.OperatorType, _of_type_op) + + if typing.TYPE_CHECKING: + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: ... + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: ... + + def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]: + r"""Redefine this object in terms of a polymorphic subclass, + :func:`_orm.with_polymorphic` construct, or :func:`_orm.aliased` + construct. + + Returns a new PropComparator from which further criterion can be + evaluated. + + e.g.:: + + query.join(Company.employees.of_type(Engineer)).\ + filter(Engineer.name=='foo') + + :param \class_: a class or mapper indicating that criterion will be + against this specific subclass. + + .. seealso:: + + :ref:`orm_queryguide_joining_relationships_aliased` - in the + :ref:`queryguide_toplevel` + + :ref:`inheritance_of_type` + + """ + + return self.operate(PropComparator.of_type_op, class_) # type: ignore + + def and_( + self, *criteria: _ColumnExpressionArgument[bool] + ) -> PropComparator[bool]: + """Add additional criteria to the ON clause that's represented by this + relationship attribute. + + E.g.:: + + + stmt = select(User).join( + User.addresses.and_(Address.email_address != 'foo') + ) + + stmt = select(User).options( + joinedload(User.addresses.and_(Address.email_address != 'foo')) + ) + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`orm_queryguide_join_on_augmented` + + :ref:`loader_option_criteria` + + :func:`.with_loader_criteria` + + """ + return self.operate(operators.and_, *criteria) # type: ignore + + def any( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + r"""Return a SQL expression representing true if this element + references a member which meets the given criterion. + + The usual implementation of ``any()`` is + :meth:`.Relationship.Comparator.any`. + + :param criterion: an optional ClauseElement formulated against the + member class' table or attributes. + + :param \**kwargs: key/value pairs corresponding to member class + attribute names which will be compared via equality to the + corresponding values. + + """ + + return self.operate(PropComparator.any_op, criterion, **kwargs) + + def has( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + r"""Return a SQL expression representing true if this element + references a member which meets the given criterion. + + The usual implementation of ``has()`` is + :meth:`.Relationship.Comparator.has`. + + :param criterion: an optional ClauseElement formulated against the + member class' table or attributes. + + :param \**kwargs: key/value pairs corresponding to member class + attribute names which will be compared via equality to the + corresponding values. + + """ + + return self.operate(PropComparator.has_op, criterion, **kwargs) + + +class StrategizedProperty(MapperProperty[_T]): + """A MapperProperty which uses selectable strategies to affect + loading behavior. + + There is a single strategy selected by default. Alternate + strategies can be selected at Query time through the usage of + ``StrategizedOption`` objects via the Query.options() method. + + The mechanics of StrategizedProperty are used for every Query + invocation for every mapped attribute participating in that Query, + to determine first how the attribute will be rendered in SQL + and secondly how the attribute will retrieve a value from a result + row and apply it to a mapped object. The routines here are very + performance-critical. + + """ + + __slots__ = ( + "_strategies", + "strategy", + "_wildcard_token", + "_default_path_loader_key", + "strategy_key", + ) + inherit_cache = True + strategy_wildcard_key: ClassVar[str] + + strategy_key: _StrategyKey + + _strategies: Dict[_StrategyKey, LoaderStrategy] + + def _memoized_attr__wildcard_token(self) -> Tuple[str]: + return ( + f"{self.strategy_wildcard_key}:{path_registry._WILDCARD_TOKEN}", + ) + + def _memoized_attr__default_path_loader_key( + self, + ) -> Tuple[str, Tuple[str]]: + return ( + "loader", + (f"{self.strategy_wildcard_key}:{path_registry._DEFAULT_TOKEN}",), + ) + + def _get_context_loader( + self, context: ORMCompileState, path: AbstractEntityRegistry + ) -> Optional[_LoadElement]: + load: Optional[_LoadElement] = None + + search_path = path[self] + + # search among: exact match, "attr.*", "default" strategy + # if any. + for path_key in ( + search_path._loader_key, + search_path._wildcard_path_loader_key, + search_path._default_path_loader_key, + ): + if path_key in context.attributes: + load = context.attributes[path_key] + break + + # note that if strategy_options.Load is placing non-actionable + # objects in the context like defaultload(), we would + # need to continue the loop here if we got such an + # option as below. + # if load.strategy or load.local_opts: + # break + + return load + + def _get_strategy(self, key: _StrategyKey) -> LoaderStrategy: + try: + return self._strategies[key] + except KeyError: + pass + + # run outside to prevent transfer of exception context + cls = self._strategy_lookup(self, *key) + # this previously was setting self._strategies[cls], that's + # a bad idea; should use strategy key at all times because every + # strategy has multiple keys at this point + self._strategies[key] = strategy = cls(self, key) + return strategy + + def setup( + self, + context: ORMCompileState, + query_entity: _MapperEntity, + path: AbstractEntityRegistry, + adapter: Optional[ORMAdapter], + **kwargs: Any, + ) -> None: + loader = self._get_context_loader(context, path) + if loader and loader.strategy: + strat = self._get_strategy(loader.strategy) + else: + strat = self.strategy + strat.setup_query( + context, query_entity, path, loader, adapter, **kwargs + ) + + def create_row_processor( + self, + context: ORMCompileState, + query_entity: _MapperEntity, + path: AbstractEntityRegistry, + mapper: Mapper[Any], + result: Result[Any], + adapter: Optional[ORMAdapter], + populators: _PopulatorDict, + ) -> None: + loader = self._get_context_loader(context, path) + if loader and loader.strategy: + strat = self._get_strategy(loader.strategy) + else: + strat = self.strategy + strat.create_row_processor( + context, + query_entity, + path, + loader, + mapper, + result, + adapter, + populators, + ) + + def do_init(self) -> None: + self._strategies = {} + self.strategy = self._get_strategy(self.strategy_key) + + def post_instrument_class(self, mapper: Mapper[Any]) -> None: + if ( + not self.parent.non_primary + and not mapper.class_manager._attr_has_impl(self.key) + ): + self.strategy.init_class_attribute(mapper) + + _all_strategies: collections.defaultdict[ + Type[MapperProperty[Any]], Dict[_StrategyKey, Type[LoaderStrategy]] + ] = collections.defaultdict(dict) + + @classmethod + def strategy_for(cls, **kw: Any) -> Callable[[_TLS], _TLS]: + def decorate(dec_cls: _TLS) -> _TLS: + # ensure each subclass of the strategy has its + # own _strategy_keys collection + if "_strategy_keys" not in dec_cls.__dict__: + dec_cls._strategy_keys = [] + key = tuple(sorted(kw.items())) + cls._all_strategies[cls][key] = dec_cls + dec_cls._strategy_keys.append(key) + return dec_cls + + return decorate + + @classmethod + def _strategy_lookup( + cls, requesting_property: MapperProperty[Any], *key: Any + ) -> Type[LoaderStrategy]: + requesting_property.parent._with_polymorphic_mappers + + for prop_cls in cls.__mro__: + if prop_cls in cls._all_strategies: + if TYPE_CHECKING: + assert issubclass(prop_cls, MapperProperty) + strategies = cls._all_strategies[prop_cls] + try: + return strategies[key] + except KeyError: + pass + + for property_type, strats in cls._all_strategies.items(): + if key in strats: + intended_property_type = property_type + actual_strategy = strats[key] + break + else: + intended_property_type = None + actual_strategy = None + + raise orm_exc.LoaderStrategyException( + cls, + requesting_property, + intended_property_type, + actual_strategy, + key, + ) + + +class ORMOption(ExecutableOption): + """Base class for option objects that are passed to ORM queries. + + These options may be consumed by :meth:`.Query.options`, + :meth:`.Select.options`, or in a more general sense by any + :meth:`.Executable.options` method. They are interpreted at + statement compile time or execution time in modern use. The + deprecated :class:`.MapperOption` is consumed at ORM query construction + time. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + _is_legacy_option = False + + propagate_to_loaders = False + """if True, indicate this option should be carried along + to "secondary" SELECT statements that occur for relationship + lazy loaders as well as attribute load / refresh operations. + + """ + + _is_core = False + + _is_user_defined = False + + _is_compile_state = False + + _is_criteria_option = False + + _is_strategy_option = False + + def _adapt_cached_option_to_uncached_option( + self, context: QueryContext, uncached_opt: ORMOption + ) -> ORMOption: + """adapt this option to the "uncached" version of itself in a + loader strategy context. + + given "self" which is an option from a cached query, as well as the + corresponding option from the uncached version of the same query, + return the option we should use in a new query, in the context of a + loader strategy being asked to load related rows on behalf of that + cached query, which is assumed to be building a new query based on + entities passed to us from the cached query. + + Currently this routine chooses between "self" and "uncached" without + manufacturing anything new. If the option is itself a loader strategy + option which has a path, that path needs to match to the entities being + passed to us by the cached query, so the :class:`_orm.Load` subclass + overrides this to return "self". For all other options, we return the + uncached form which may have changing state, such as a + with_loader_criteria() option which will very often have new state. + + This routine could in the future involve + generating a new option based on both inputs if use cases arise, + such as if with_loader_criteria() needed to match up to + ``AliasedClass`` instances given in the parent query. + + However, longer term it might be better to restructure things such that + ``AliasedClass`` entities are always matched up on their cache key, + instead of identity, in things like paths and such, so that this whole + issue of "the uncached option does not match the entities" goes away. + However this would make ``PathRegistry`` more complicated and difficult + to debug as well as potentially less performant in that it would be + hashing enormous cache keys rather than a simple AliasedInsp. UNLESS, + we could get cache keys overall to be reliably hashed into something + like an md5 key. + + .. versionadded:: 1.4.41 + + """ + if uncached_opt is not None: + return uncached_opt + else: + return self + + +class CompileStateOption(HasCacheKey, ORMOption): + """base for :class:`.ORMOption` classes that affect the compilation of + a SQL query and therefore need to be part of the cache key. + + .. note:: :class:`.CompileStateOption` is generally non-public and + should not be used as a base class for user-defined options; instead, + use :class:`.UserDefinedOption`, which is easier to use as it does not + interact with ORM compilation internals or caching. + + :class:`.CompileStateOption` defines an internal attribute + ``_is_compile_state=True`` which has the effect of the ORM compilation + routines for SELECT and other statements will call upon these options when + a SQL string is being compiled. As such, these classes implement + :class:`.HasCacheKey` and need to provide robust ``_cache_key_traversal`` + structures. + + The :class:`.CompileStateOption` class is used to implement the ORM + :class:`.LoaderOption` and :class:`.CriteriaOption` classes. + + .. versionadded:: 1.4.28 + + + """ + + __slots__ = () + + _is_compile_state = True + + def process_compile_state(self, compile_state: ORMCompileState) -> None: + """Apply a modification to a given :class:`.ORMCompileState`. + + This method is part of the implementation of a particular + :class:`.CompileStateOption` and is only invoked internally + when an ORM query is compiled. + + """ + + def process_compile_state_replaced_entities( + self, + compile_state: ORMCompileState, + mapper_entities: Sequence[_MapperEntity], + ) -> None: + """Apply a modification to a given :class:`.ORMCompileState`, + given entities that were replaced by with_only_columns() or + with_entities(). + + This method is part of the implementation of a particular + :class:`.CompileStateOption` and is only invoked internally + when an ORM query is compiled. + + .. versionadded:: 1.4.19 + + """ + + +class LoaderOption(CompileStateOption): + """Describe a loader modification to an ORM statement at compilation time. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + def process_compile_state_replaced_entities( + self, + compile_state: ORMCompileState, + mapper_entities: Sequence[_MapperEntity], + ) -> None: + self.process_compile_state(compile_state) + + +class CriteriaOption(CompileStateOption): + """Describe a WHERE criteria modification to an ORM statement at + compilation time. + + .. versionadded:: 1.4 + + """ + + __slots__ = () + + _is_criteria_option = True + + def get_global_criteria(self, attributes: Dict[str, Any]) -> None: + """update additional entity criteria options in the given + attributes dictionary. + + """ + + +class UserDefinedOption(ORMOption): + """Base class for a user-defined option that can be consumed from the + :meth:`.SessionEvents.do_orm_execute` event hook. + + """ + + __slots__ = ("payload",) + + _is_legacy_option = False + + _is_user_defined = True + + propagate_to_loaders = False + """if True, indicate this option should be carried along + to "secondary" Query objects produced during lazy loads + or refresh operations. + + """ + + def __init__(self, payload: Optional[Any] = None): + self.payload = payload + + +@util.deprecated_cls( + "1.4", + "The :class:`.MapperOption class is deprecated and will be removed " + "in a future release. For " + "modifications to queries on a per-execution basis, use the " + ":class:`.UserDefinedOption` class to establish state within a " + ":class:`.Query` or other Core statement, then use the " + ":meth:`.SessionEvents.before_orm_execute` hook to consume them.", + constructor=None, +) +class MapperOption(ORMOption): + """Describe a modification to a Query""" + + __slots__ = () + + _is_legacy_option = True + + propagate_to_loaders = False + """if True, indicate this option should be carried along + to "secondary" Query objects produced during lazy loads + or refresh operations. + + """ + + def process_query(self, query: Query[Any]) -> None: + """Apply a modification to the given :class:`_query.Query`.""" + + def process_query_conditionally(self, query: Query[Any]) -> None: + """same as process_query(), except that this option may not + apply to the given query. + + This is typically applied during a lazy load or scalar refresh + operation to propagate options stated in the original Query to the + new Query being used for the load. It occurs for those options that + specify propagate_to_loaders=True. + + """ + + self.process_query(query) + + +class LoaderStrategy: + """Describe the loading behavior of a StrategizedProperty object. + + The ``LoaderStrategy`` interacts with the querying process in three + ways: + + * it controls the configuration of the ``InstrumentedAttribute`` + placed on a class to handle the behavior of the attribute. this + may involve setting up class-level callable functions to fire + off a select operation when the attribute is first accessed + (i.e. a lazy load) + + * it processes the ``QueryContext`` at statement construction time, + where it can modify the SQL statement that is being produced. + For example, simple column attributes will add their represented + column to the list of selected columns, a joined eager loader + may establish join clauses to add to the statement. + + * It produces "row processor" functions at result fetching time. + These "row processor" functions populate a particular attribute + on a particular mapped instance. + + """ + + __slots__ = ( + "parent_property", + "is_class_level", + "parent", + "key", + "strategy_key", + "strategy_opts", + ) + + _strategy_keys: ClassVar[List[_StrategyKey]] + + def __init__( + self, parent: MapperProperty[Any], strategy_key: _StrategyKey + ): + self.parent_property = parent + self.is_class_level = False + self.parent = self.parent_property.parent + self.key = self.parent_property.key + self.strategy_key = strategy_key + self.strategy_opts = dict(strategy_key) + + def init_class_attribute(self, mapper: Mapper[Any]) -> None: + pass + + def setup_query( + self, + compile_state: ORMCompileState, + query_entity: _MapperEntity, + path: AbstractEntityRegistry, + loadopt: Optional[_LoadElement], + adapter: Optional[ORMAdapter], + **kwargs: Any, + ) -> None: + """Establish column and other state for a given QueryContext. + + This method fulfills the contract specified by MapperProperty.setup(). + + StrategizedProperty delegates its setup() method + directly to this method. + + """ + + def create_row_processor( + self, + context: ORMCompileState, + query_entity: _MapperEntity, + path: AbstractEntityRegistry, + loadopt: Optional[_LoadElement], + mapper: Mapper[Any], + result: Result[Any], + adapter: Optional[ORMAdapter], + populators: _PopulatorDict, + ) -> None: + """Establish row processing functions for a given QueryContext. + + This method fulfills the contract specified by + MapperProperty.create_row_processor(). + + StrategizedProperty delegates its create_row_processor() method + directly to this method. + + """ + + def __str__(self) -> str: + return str(self.parent_property) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py new file mode 100644 index 00000000..6176d72a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py @@ -0,0 +1,1682 @@ +# orm/loading.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""private module containing functions used to convert database +rows into object instances and associated state. + +the functions here are called primarily by Query, Mapper, +as well as some of the attribute loading strategies. + +""" + +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import Iterable +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import attributes +from . import exc as orm_exc +from . import path_registry +from .base import _DEFER_FOR_STATE +from .base import _RAISE_FOR_STATE +from .base import _SET_DEFERRED_EXPIRED +from .base import PassiveFlag +from .context import FromStatement +from .context import ORMCompileState +from .context import QueryContext +from .util import _none_set +from .util import state_str +from .. import exc as sa_exc +from .. import util +from ..engine import result_tuple +from ..engine.result import ChunkedIteratorResult +from ..engine.result import FrozenResult +from ..engine.result import SimpleResultMetaData +from ..sql import select +from ..sql import util as sql_util +from ..sql.selectable import ForUpdateArg +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from ..sql.selectable import SelectState +from ..util import EMPTY_DICT + +if TYPE_CHECKING: + from ._typing import _IdentityKeyType + from .base import LoaderCallableStatus + from .interfaces import ORMOption + from .mapper import Mapper + from .query import Query + from .session import Session + from .state import InstanceState + from ..engine.cursor import CursorResult + from ..engine.interfaces import _ExecuteOptions + from ..engine.result import Result + from ..sql import Select + +_T = TypeVar("_T", bound=Any) +_O = TypeVar("_O", bound=object) +_new_runid = util.counter() + + +_PopulatorDict = Dict[str, List[Tuple[str, Any]]] + + +def instances(cursor: CursorResult[Any], context: QueryContext) -> Result[Any]: + """Return a :class:`.Result` given an ORM query context. + + :param cursor: a :class:`.CursorResult`, generated by a statement + which came from :class:`.ORMCompileState` + + :param context: a :class:`.QueryContext` object + + :return: a :class:`.Result` object representing ORM results + + .. versionchanged:: 1.4 The instances() function now uses + :class:`.Result` objects and has an all new interface. + + """ + + context.runid = _new_runid() + + if context.top_level_context: + is_top_level = False + context.post_load_paths = context.top_level_context.post_load_paths + else: + is_top_level = True + context.post_load_paths = {} + + compile_state = context.compile_state + filtered = compile_state._has_mapper_entities + single_entity = ( + not context.load_options._only_return_tuples + and len(compile_state._entities) == 1 + and compile_state._entities[0].supports_single_entity + ) + + try: + (process, labels, extra) = list( + zip( + *[ + query_entity.row_processor(context, cursor) + for query_entity in context.compile_state._entities + ] + ) + ) + + if context.yield_per and ( + context.loaders_require_buffering + or context.loaders_require_uniquing + ): + raise sa_exc.InvalidRequestError( + "Can't use yield_per with eager loaders that require uniquing " + "or row buffering, e.g. joinedload() against collections " + "or subqueryload(). Consider the selectinload() strategy " + "for better flexibility in loading objects." + ) + + except Exception: + with util.safe_reraise(): + cursor.close() + + def _no_unique(entry): + raise sa_exc.InvalidRequestError( + "Can't use the ORM yield_per feature in conjunction with unique()" + ) + + def _not_hashable(datatype, *, legacy=False, uncertain=False): + if not legacy: + + def go(obj): + if uncertain: + try: + return hash(obj) + except: + pass + + raise sa_exc.InvalidRequestError( + "Can't apply uniqueness to row tuple containing value of " + f"""type {datatype!r}; { + 'the values returned appear to be' + if uncertain + else 'this datatype produces' + } non-hashable values""" + ) + + return go + elif not uncertain: + return id + else: + _use_id = False + + def go(obj): + nonlocal _use_id + + if not _use_id: + try: + return hash(obj) + except: + pass + + # in #10459, we considered using a warning here, however + # as legacy query uses result.unique() in all cases, this + # would lead to too many warning cases. + _use_id = True + + return id(obj) + + return go + + unique_filters = [ + ( + _no_unique + if context.yield_per + else ( + _not_hashable( + ent.column.type, # type: ignore + legacy=context.load_options._legacy_uniquing, + uncertain=ent._null_column_type, + ) + if ( + not ent.use_id_for_hash + and (ent._non_hashable_value or ent._null_column_type) + ) + else id if ent.use_id_for_hash else None + ) + ) + for ent in context.compile_state._entities + ] + + row_metadata = SimpleResultMetaData( + labels, extra, _unique_filters=unique_filters + ) + + def chunks(size): # type: ignore + while True: + yield_per = size + + context.partials = {} + + if yield_per: + fetch = cursor.fetchmany(yield_per) + + if not fetch: + break + else: + fetch = cursor._raw_all_rows() + + if single_entity: + proc = process[0] + rows = [proc(row) for row in fetch] + else: + rows = [ + tuple([proc(row) for proc in process]) for row in fetch + ] + + # if we are the originating load from a query, meaning we + # aren't being called as a result of a nested "post load", + # iterate through all the collected post loaders and fire them + # off. Previously this used to work recursively, however that + # prevented deeply nested structures from being loadable + if is_top_level: + if yield_per: + # if using yield per, memoize the state of the + # collection so that it can be restored + top_level_post_loads = list( + context.post_load_paths.items() + ) + + while context.post_load_paths: + post_loads = list(context.post_load_paths.items()) + context.post_load_paths.clear() + for path, post_load in post_loads: + post_load.invoke(context, path) + + if yield_per: + context.post_load_paths.clear() + context.post_load_paths.update(top_level_post_loads) + + yield rows + + if not yield_per: + break + + if context.execution_options.get("prebuffer_rows", False): + # this is a bit of a hack at the moment. + # I would rather have some option in the result to pre-buffer + # internally. + _prebuffered = list(chunks(None)) + + def chunks(size): + return iter(_prebuffered) + + result = ChunkedIteratorResult( + row_metadata, + chunks, + source_supports_scalars=single_entity, + raw=cursor, + dynamic_yield_per=cursor.context._is_server_side, + ) + + # filtered and single_entity are used to indicate to legacy Query that the + # query has ORM entities, so legacy deduping and scalars should be called + # on the result. + result._attributes = result._attributes.union( + dict(filtered=filtered, is_single_entity=single_entity) + ) + + # multi_row_eager_loaders OTOH is specific to joinedload. + if context.compile_state.multi_row_eager_loaders: + + def require_unique(obj): + raise sa_exc.InvalidRequestError( + "The unique() method must be invoked on this Result, " + "as it contains results that include joined eager loads " + "against collections" + ) + + result._unique_filter_state = (None, require_unique) + + if context.yield_per: + result.yield_per(context.yield_per) + + return result + + +@util.preload_module("sqlalchemy.orm.context") +def merge_frozen_result(session, statement, frozen_result, load=True): + """Merge a :class:`_engine.FrozenResult` back into a :class:`_orm.Session`, + returning a new :class:`_engine.Result` object with :term:`persistent` + objects. + + See the section :ref:`do_orm_execute_re_executing` for an example. + + .. seealso:: + + :ref:`do_orm_execute_re_executing` + + :meth:`_engine.Result.freeze` + + :class:`_engine.FrozenResult` + + """ + querycontext = util.preloaded.orm_context + + if load: + # flush current contents if we expect to load data + session._autoflush() + + ctx = querycontext.ORMSelectCompileState._create_entities_collection( + statement, legacy=False + ) + + autoflush = session.autoflush + try: + session.autoflush = False + mapped_entities = [ + i + for i, e in enumerate(ctx._entities) + if isinstance(e, querycontext._MapperEntity) + ] + keys = [ent._label_name for ent in ctx._entities] + + keyed_tuple = result_tuple( + keys, [ent._extra_entities for ent in ctx._entities] + ) + + result = [] + for newrow in frozen_result.rewrite_rows(): + for i in mapped_entities: + if newrow[i] is not None: + newrow[i] = session._merge( + attributes.instance_state(newrow[i]), + attributes.instance_dict(newrow[i]), + load=load, + _recursive={}, + _resolve_conflict_map={}, + ) + + result.append(keyed_tuple(newrow)) + + return frozen_result.with_new_rows(result) + finally: + session.autoflush = autoflush + + +@util.became_legacy_20( + ":func:`_orm.merge_result`", + alternative="The function as well as the method on :class:`_orm.Query` " + "is superseded by the :func:`_orm.merge_frozen_result` function.", +) +@util.preload_module("sqlalchemy.orm.context") +def merge_result( + query: Query[Any], + iterator: Union[FrozenResult, Iterable[Sequence[Any]], Iterable[object]], + load: bool = True, +) -> Union[FrozenResult, Iterable[Any]]: + """Merge a result into the given :class:`.Query` object's Session. + + See :meth:`_orm.Query.merge_result` for top-level documentation on this + function. + + """ + + querycontext = util.preloaded.orm_context + + session = query.session + if load: + # flush current contents if we expect to load data + session._autoflush() + + # TODO: need test coverage and documentation for the FrozenResult + # use case. + if isinstance(iterator, FrozenResult): + frozen_result = iterator + iterator = iter(frozen_result.data) + else: + frozen_result = None + + ctx = querycontext.ORMSelectCompileState._create_entities_collection( + query, legacy=True + ) + + autoflush = session.autoflush + try: + session.autoflush = False + single_entity = not frozen_result and len(ctx._entities) == 1 + + if single_entity: + if isinstance(ctx._entities[0], querycontext._MapperEntity): + result = [ + session._merge( + attributes.instance_state(instance), + attributes.instance_dict(instance), + load=load, + _recursive={}, + _resolve_conflict_map={}, + ) + for instance in iterator + ] + else: + result = list(iterator) + else: + mapped_entities = [ + i + for i, e in enumerate(ctx._entities) + if isinstance(e, querycontext._MapperEntity) + ] + result = [] + keys = [ent._label_name for ent in ctx._entities] + + keyed_tuple = result_tuple( + keys, [ent._extra_entities for ent in ctx._entities] + ) + + for row in iterator: + newrow = list(row) + for i in mapped_entities: + if newrow[i] is not None: + newrow[i] = session._merge( + attributes.instance_state(newrow[i]), + attributes.instance_dict(newrow[i]), + load=load, + _recursive={}, + _resolve_conflict_map={}, + ) + result.append(keyed_tuple(newrow)) + + if frozen_result: + return frozen_result.with_new_rows(result) + else: + return iter(result) + finally: + session.autoflush = autoflush + + +def get_from_identity( + session: Session, + mapper: Mapper[_O], + key: _IdentityKeyType[_O], + passive: PassiveFlag, +) -> Union[LoaderCallableStatus, Optional[_O]]: + """Look up the given key in the given session's identity map, + check the object for expired state if found. + + """ + instance = session.identity_map.get(key) + if instance is not None: + state = attributes.instance_state(instance) + + if mapper.inherits and not state.mapper.isa(mapper): + return attributes.PASSIVE_CLASS_MISMATCH + + # expired - ensure it still exists + if state.expired: + if not passive & attributes.SQL_OK: + # TODO: no coverage here + return attributes.PASSIVE_NO_RESULT + elif not passive & attributes.RELATED_OBJECT_OK: + # this mode is used within a flush and the instance's + # expired state will be checked soon enough, if necessary. + # also used by immediateloader for a mutually-dependent + # o2m->m2m load, :ticket:`6301` + return instance + try: + state._load_expired(state, passive) + except orm_exc.ObjectDeletedError: + session._remove_newly_deleted([state]) + return None + return instance + else: + return None + + +def load_on_ident( + session: Session, + statement: Union[Select, FromStatement], + key: Optional[_IdentityKeyType], + *, + load_options: Optional[Sequence[ORMOption]] = None, + refresh_state: Optional[InstanceState[Any]] = None, + with_for_update: Optional[ForUpdateArg] = None, + only_load_props: Optional[Iterable[str]] = None, + no_autoflush: bool = False, + bind_arguments: Mapping[str, Any] = util.EMPTY_DICT, + execution_options: _ExecuteOptions = util.EMPTY_DICT, + require_pk_cols: bool = False, + is_user_refresh: bool = False, +): + """Load the given identity key from the database.""" + if key is not None: + ident = key[1] + identity_token = key[2] + else: + ident = identity_token = None + + return load_on_pk_identity( + session, + statement, + ident, + load_options=load_options, + refresh_state=refresh_state, + with_for_update=with_for_update, + only_load_props=only_load_props, + identity_token=identity_token, + no_autoflush=no_autoflush, + bind_arguments=bind_arguments, + execution_options=execution_options, + require_pk_cols=require_pk_cols, + is_user_refresh=is_user_refresh, + ) + + +def load_on_pk_identity( + session: Session, + statement: Union[Select, FromStatement], + primary_key_identity: Optional[Tuple[Any, ...]], + *, + load_options: Optional[Sequence[ORMOption]] = None, + refresh_state: Optional[InstanceState[Any]] = None, + with_for_update: Optional[ForUpdateArg] = None, + only_load_props: Optional[Iterable[str]] = None, + identity_token: Optional[Any] = None, + no_autoflush: bool = False, + bind_arguments: Mapping[str, Any] = util.EMPTY_DICT, + execution_options: _ExecuteOptions = util.EMPTY_DICT, + require_pk_cols: bool = False, + is_user_refresh: bool = False, +): + """Load the given primary key identity from the database.""" + + query = statement + q = query._clone() + + assert not q._is_lambda_element + + if load_options is None: + load_options = QueryContext.default_load_options + + if ( + statement._compile_options + is SelectState.default_select_compile_options + ): + compile_options = ORMCompileState.default_compile_options + else: + compile_options = statement._compile_options + + if primary_key_identity is not None: + mapper = query._propagate_attrs["plugin_subject"] + + (_get_clause, _get_params) = mapper._get_clause + + # None present in ident - turn those comparisons + # into "IS NULL" + if None in primary_key_identity: + nones = { + _get_params[col].key + for col, value in zip(mapper.primary_key, primary_key_identity) + if value is None + } + + _get_clause = sql_util.adapt_criterion_to_null(_get_clause, nones) + + if len(nones) == len(primary_key_identity): + util.warn( + "fully NULL primary key identity cannot load any " + "object. This condition may raise an error in a future " + "release." + ) + + q._where_criteria = ( + sql_util._deep_annotate(_get_clause, {"_orm_adapt": True}), + ) + + params = { + _get_params[primary_key].key: id_val + for id_val, primary_key in zip( + primary_key_identity, mapper.primary_key + ) + } + else: + params = None + + if with_for_update is not None: + version_check = True + q._for_update_arg = with_for_update + elif query._for_update_arg is not None: + version_check = True + q._for_update_arg = query._for_update_arg + else: + version_check = False + + if require_pk_cols and only_load_props: + if not refresh_state: + raise sa_exc.ArgumentError( + "refresh_state is required when require_pk_cols is present" + ) + + refresh_state_prokeys = refresh_state.mapper._primary_key_propkeys + has_changes = { + key + for key in refresh_state_prokeys.difference(only_load_props) + if refresh_state.attrs[key].history.has_changes() + } + if has_changes: + # raise if pending pk changes are present. + # technically, this could be limited to the case where we have + # relationships in the only_load_props collection to be refreshed + # also (and only ones that have a secondary eager loader, at that). + # however, the error is in place across the board so that behavior + # here is easier to predict. The use case it prevents is one + # of mutating PK attrs, leaving them unflushed, + # calling session.refresh(), and expecting those attrs to remain + # still unflushed. It seems likely someone doing all those + # things would be better off having the PK attributes flushed + # to the database before tinkering like that (session.refresh() is + # tinkering). + raise sa_exc.InvalidRequestError( + f"Please flush pending primary key changes on " + "attributes " + f"{has_changes} for mapper {refresh_state.mapper} before " + "proceeding with a refresh" + ) + + # overall, the ORM has no internal flow right now for "dont load the + # primary row of an object at all, but fire off + # selectinload/subqueryload/immediateload for some relationships". + # It would probably be a pretty big effort to add such a flow. So + # here, the case for #8703 is introduced; user asks to refresh some + # relationship attributes only which are + # selectinload/subqueryload/immediateload/ etc. (not joinedload). + # ORM complains there's no columns in the primary row to load. + # So here, we just add the PK cols if that + # case is detected, so that there is a SELECT emitted for the primary + # row. + # + # Let's just state right up front, for this one little case, + # the ORM here is adding a whole extra SELECT just to satisfy + # limitations in the internal flow. This is really not a thing + # SQLAlchemy finds itself doing like, ever, obviously, we are + # constantly working to *remove* SELECTs we don't need. We + # rationalize this for now based on 1. session.refresh() is not + # commonly used 2. session.refresh() with only relationship attrs is + # even less commonly used 3. the SELECT in question is very low + # latency. + # + # to add the flow to not include the SELECT, the quickest way + # might be to just manufacture a single-row result set to send off to + # instances(), but we'd have to weave that into context.py and all + # that. For 2.0.0, we have enough big changes to navigate for now. + # + mp = refresh_state.mapper._props + for p in only_load_props: + if mp[p]._is_relationship: + only_load_props = refresh_state_prokeys.union(only_load_props) + break + + if refresh_state and refresh_state.load_options: + compile_options += {"_current_path": refresh_state.load_path.parent} + q = q.options(*refresh_state.load_options) + + new_compile_options, load_options = _set_get_options( + compile_options, + load_options, + version_check=version_check, + only_load_props=only_load_props, + refresh_state=refresh_state, + identity_token=identity_token, + is_user_refresh=is_user_refresh, + ) + + q._compile_options = new_compile_options + q._order_by = None + + if no_autoflush: + load_options += {"_autoflush": False} + + execution_options = util.EMPTY_DICT.merge_with( + execution_options, {"_sa_orm_load_options": load_options} + ) + result = ( + session.execute( + q, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + .unique() + .scalars() + ) + + try: + return result.one() + except orm_exc.NoResultFound: + return None + + +def _set_get_options( + compile_opt, + load_opt, + populate_existing=None, + version_check=None, + only_load_props=None, + refresh_state=None, + identity_token=None, + is_user_refresh=None, +): + compile_options = {} + load_options = {} + if version_check: + load_options["_version_check"] = version_check + if populate_existing: + load_options["_populate_existing"] = populate_existing + if refresh_state: + load_options["_refresh_state"] = refresh_state + compile_options["_for_refresh_state"] = True + if only_load_props: + compile_options["_only_load_props"] = frozenset(only_load_props) + if identity_token: + load_options["_identity_token"] = identity_token + + if is_user_refresh: + load_options["_is_user_refresh"] = is_user_refresh + if load_options: + load_opt += load_options + if compile_options: + compile_opt += compile_options + + return compile_opt, load_opt + + +def _setup_entity_query( + compile_state, + mapper, + query_entity, + path, + adapter, + column_collection, + with_polymorphic=None, + only_load_props=None, + polymorphic_discriminator=None, + **kw, +): + if with_polymorphic: + poly_properties = mapper._iterate_polymorphic_properties( + with_polymorphic + ) + else: + poly_properties = mapper._polymorphic_properties + + quick_populators = {} + + path.set(compile_state.attributes, "memoized_setups", quick_populators) + + # for the lead entities in the path, e.g. not eager loads, and + # assuming a user-passed aliased class, e.g. not a from_self() or any + # implicit aliasing, don't add columns to the SELECT that aren't + # in the thing that's aliased. + check_for_adapt = adapter and len(path) == 1 and path[-1].is_aliased_class + + for value in poly_properties: + if only_load_props and value.key not in only_load_props: + continue + value.setup( + compile_state, + query_entity, + path, + adapter, + only_load_props=only_load_props, + column_collection=column_collection, + memoized_populators=quick_populators, + check_for_adapt=check_for_adapt, + **kw, + ) + + if ( + polymorphic_discriminator is not None + and polymorphic_discriminator is not mapper.polymorphic_on + ): + if adapter: + pd = adapter.columns[polymorphic_discriminator] + else: + pd = polymorphic_discriminator + column_collection.append(pd) + + +def _warn_for_runid_changed(state): + util.warn( + "Loading context for %s has changed within a load/refresh " + "handler, suggesting a row refresh operation took place. If this " + "event handler is expected to be " + "emitting row refresh operations within an existing load or refresh " + "operation, set restore_load_context=True when establishing the " + "listener to ensure the context remains unchanged when the event " + "handler completes." % (state_str(state),) + ) + + +def _instance_processor( + query_entity, + mapper, + context, + result, + path, + adapter, + only_load_props=None, + refresh_state=None, + polymorphic_discriminator=None, + _polymorphic_from=None, +): + """Produce a mapper level row processor callable + which processes rows into mapped instances.""" + + # note that this method, most of which exists in a closure + # called _instance(), resists being broken out, as + # attempts to do so tend to add significant function + # call overhead. _instance() is the most + # performance-critical section in the whole ORM. + + identity_class = mapper._identity_class + compile_state = context.compile_state + + # look for "row getter" functions that have been assigned along + # with the compile state that were cached from a previous load. + # these are operator.itemgetter() objects that each will extract a + # particular column from each row. + + getter_key = ("getters", mapper) + getters = path.get(compile_state.attributes, getter_key, None) + + if getters is None: + # no getters, so go through a list of attributes we are loading for, + # and the ones that are column based will have already put information + # for us in another collection "memoized_setups", which represents the + # output of the LoaderStrategy.setup_query() method. We can just as + # easily call LoaderStrategy.create_row_processor for each, but by + # getting it all at once from setup_query we save another method call + # per attribute. + props = mapper._prop_set + if only_load_props is not None: + props = props.intersection( + mapper._props[k] for k in only_load_props + ) + + quick_populators = path.get( + context.attributes, "memoized_setups", EMPTY_DICT + ) + + todo = [] + cached_populators = { + "new": [], + "quick": [], + "deferred": [], + "expire": [], + "existing": [], + "eager": [], + } + + if refresh_state is None: + # we can also get the "primary key" tuple getter function + pk_cols = mapper.primary_key + + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + primary_key_getter = result._tuple_getter(pk_cols) + else: + primary_key_getter = None + + getters = { + "cached_populators": cached_populators, + "todo": todo, + "primary_key_getter": primary_key_getter, + } + for prop in props: + if prop in quick_populators: + # this is an inlined path just for column-based attributes. + col = quick_populators[prop] + if col is _DEFER_FOR_STATE: + cached_populators["new"].append( + (prop.key, prop._deferred_column_loader) + ) + elif col is _SET_DEFERRED_EXPIRED: + # note that in this path, we are no longer + # searching in the result to see if the column might + # be present in some unexpected way. + cached_populators["expire"].append((prop.key, False)) + elif col is _RAISE_FOR_STATE: + cached_populators["new"].append( + (prop.key, prop._raise_column_loader) + ) + else: + getter = None + if adapter: + # this logic had been removed for all 1.4 releases + # up until 1.4.18; the adapter here is particularly + # the compound eager adapter which isn't accommodated + # in the quick_populators right now. The "fallback" + # logic below instead took over in many more cases + # until issue #6596 was identified. + + # note there is still an issue where this codepath + # produces no "getter" for cases where a joined-inh + # mapping includes a labeled column property, meaning + # KeyError is caught internally and we fall back to + # _getter(col), which works anyway. The adapter + # here for joined inh without any aliasing might not + # be useful. Tests which see this include + # test.orm.inheritance.test_basic -> + # EagerTargetingTest.test_adapt_stringency + # OptimizedLoadTest.test_column_expression_joined + # PolymorphicOnNotLocalTest.test_polymorphic_on_column_prop # noqa: E501 + # + + adapted_col = adapter.columns[col] + if adapted_col is not None: + getter = result._getter(adapted_col, False) + if not getter: + getter = result._getter(col, False) + if getter: + cached_populators["quick"].append((prop.key, getter)) + else: + # fall back to the ColumnProperty itself, which + # will iterate through all of its columns + # to see if one fits + prop.create_row_processor( + context, + query_entity, + path, + mapper, + result, + adapter, + cached_populators, + ) + else: + # loader strategies like subqueryload, selectinload, + # joinedload, basically relationships, these need to interact + # with the context each time to work correctly. + todo.append(prop) + + path.set(compile_state.attributes, getter_key, getters) + + cached_populators = getters["cached_populators"] + + populators = {key: list(value) for key, value in cached_populators.items()} + for prop in getters["todo"]: + prop.create_row_processor( + context, query_entity, path, mapper, result, adapter, populators + ) + + propagated_loader_options = context.propagated_loader_options + load_path = ( + context.compile_state.current_path + path + if context.compile_state.current_path.path + else path + ) + + session_identity_map = context.session.identity_map + + populate_existing = context.populate_existing or mapper.always_refresh + load_evt = bool(mapper.class_manager.dispatch.load) + refresh_evt = bool(mapper.class_manager.dispatch.refresh) + persistent_evt = bool(context.session.dispatch.loaded_as_persistent) + if persistent_evt: + loaded_as_persistent = context.session.dispatch.loaded_as_persistent + instance_state = attributes.instance_state + instance_dict = attributes.instance_dict + session_id = context.session.hash_key + runid = context.runid + identity_token = context.identity_token + + version_check = context.version_check + if version_check: + version_id_col = mapper.version_id_col + if version_id_col is not None: + if adapter: + version_id_col = adapter.columns[version_id_col] + version_id_getter = result._getter(version_id_col) + else: + version_id_getter = None + + if not refresh_state and _polymorphic_from is not None: + key = ("loader", path.path) + + if key in context.attributes and context.attributes[key].strategy == ( + ("selectinload_polymorphic", True), + ): + option_entities = context.attributes[key].local_opts["entities"] + else: + option_entities = None + selectin_load_via = mapper._should_selectin_load( + option_entities, + _polymorphic_from, + ) + + if selectin_load_via and selectin_load_via is not _polymorphic_from: + # only_load_props goes w/ refresh_state only, and in a refresh + # we are a single row query for the exact entity; polymorphic + # loading does not apply + assert only_load_props is None + + if selectin_load_via.is_mapper: + _load_supers = [] + _endmost_mapper = selectin_load_via + while ( + _endmost_mapper + and _endmost_mapper is not _polymorphic_from + ): + _load_supers.append(_endmost_mapper) + _endmost_mapper = _endmost_mapper.inherits + else: + _load_supers = [selectin_load_via] + + for _selectinload_entity in _load_supers: + if PostLoad.path_exists( + context, load_path, _selectinload_entity + ): + continue + callable_ = _load_subclass_via_in( + context, + path, + _selectinload_entity, + _polymorphic_from, + option_entities, + ) + PostLoad.callable_for_path( + context, + load_path, + _selectinload_entity.mapper, + _selectinload_entity, + callable_, + _selectinload_entity, + ) + + post_load = PostLoad.for_context(context, load_path, only_load_props) + + if refresh_state: + refresh_identity_key = refresh_state.key + if refresh_identity_key is None: + # super-rare condition; a refresh is being called + # on a non-instance-key instance; this is meant to only + # occur within a flush() + refresh_identity_key = mapper._identity_key_from_state( + refresh_state + ) + else: + refresh_identity_key = None + + primary_key_getter = getters["primary_key_getter"] + + if mapper.allow_partial_pks: + is_not_primary_key = _none_set.issuperset + else: + is_not_primary_key = _none_set.intersection + + def _instance(row): + # determine the state that we'll be populating + if refresh_identity_key: + # fixed state that we're refreshing + state = refresh_state + instance = state.obj() + dict_ = instance_dict(instance) + isnew = state.runid != runid + currentload = True + loaded_instance = False + else: + # look at the row, see if that identity is in the + # session, or we have to create a new one + identitykey = ( + identity_class, + primary_key_getter(row), + identity_token, + ) + + instance = session_identity_map.get(identitykey) + + if instance is not None: + # existing instance + state = instance_state(instance) + dict_ = instance_dict(instance) + + isnew = state.runid != runid + currentload = not isnew + loaded_instance = False + + if version_check and version_id_getter and not currentload: + _validate_version_id( + mapper, state, dict_, row, version_id_getter + ) + + else: + # create a new instance + + # check for non-NULL values in the primary key columns, + # else no entity is returned for the row + if is_not_primary_key(identitykey[1]): + return None + + isnew = True + currentload = True + loaded_instance = True + + instance = mapper.class_manager.new_instance() + + dict_ = instance_dict(instance) + state = instance_state(instance) + state.key = identitykey + state.identity_token = identity_token + + # attach instance to session. + state.session_id = session_id + session_identity_map._add_unpresent(state, identitykey) + + effective_populate_existing = populate_existing + if refresh_state is state: + effective_populate_existing = True + + # populate. this looks at whether this state is new + # for this load or was existing, and whether or not this + # row is the first row with this identity. + if currentload or effective_populate_existing: + # full population routines. Objects here are either + # just created, or we are doing a populate_existing + + # be conservative about setting load_path when populate_existing + # is in effect; want to maintain options from the original + # load. see test_expire->test_refresh_maintains_deferred_options + if isnew and ( + propagated_loader_options or not effective_populate_existing + ): + state.load_options = propagated_loader_options + state.load_path = load_path + + _populate_full( + context, + row, + state, + dict_, + isnew, + load_path, + loaded_instance, + effective_populate_existing, + populators, + ) + + if isnew: + # state.runid should be equal to context.runid / runid + # here, however for event checks we are being more conservative + # and checking against existing run id + # assert state.runid == runid + + existing_runid = state.runid + + if loaded_instance: + if load_evt: + state.manager.dispatch.load(state, context) + if state.runid != existing_runid: + _warn_for_runid_changed(state) + if persistent_evt: + loaded_as_persistent(context.session, state) + if state.runid != existing_runid: + _warn_for_runid_changed(state) + elif refresh_evt: + state.manager.dispatch.refresh( + state, context, only_load_props + ) + if state.runid != runid: + _warn_for_runid_changed(state) + + if effective_populate_existing or state.modified: + if refresh_state and only_load_props: + state._commit(dict_, only_load_props) + else: + state._commit_all(dict_, session_identity_map) + + if post_load: + post_load.add_state(state, True) + + else: + # partial population routines, for objects that were already + # in the Session, but a row matches them; apply eager loaders + # on existing objects, etc. + unloaded = state.unloaded + isnew = state not in context.partials + + if not isnew or unloaded or populators["eager"]: + # state is having a partial set of its attributes + # refreshed. Populate those attributes, + # and add to the "context.partials" collection. + + to_load = _populate_partial( + context, + row, + state, + dict_, + isnew, + load_path, + unloaded, + populators, + ) + + if isnew: + if refresh_evt: + existing_runid = state.runid + state.manager.dispatch.refresh(state, context, to_load) + if state.runid != existing_runid: + _warn_for_runid_changed(state) + + state._commit(dict_, to_load) + + if post_load and context.invoke_all_eagers: + post_load.add_state(state, False) + + return instance + + if mapper.polymorphic_map and not _polymorphic_from and not refresh_state: + # if we are doing polymorphic, dispatch to a different _instance() + # method specific to the subclass mapper + def ensure_no_pk(row): + identitykey = ( + identity_class, + primary_key_getter(row), + identity_token, + ) + if not is_not_primary_key(identitykey[1]): + return identitykey + else: + return None + + _instance = _decorate_polymorphic_switch( + _instance, + context, + query_entity, + mapper, + result, + path, + polymorphic_discriminator, + adapter, + ensure_no_pk, + ) + + return _instance + + +def _load_subclass_via_in( + context, path, entity, polymorphic_from, option_entities +): + mapper = entity.mapper + + # TODO: polymorphic_from seems to be a Mapper in all cases. + # this is likely not needed, but as we dont have typing in loading.py + # yet, err on the safe side + polymorphic_from_mapper = polymorphic_from.mapper + not_against_basemost = polymorphic_from_mapper.inherits is not None + + zero_idx = len(mapper.base_mapper.primary_key) == 1 + + if entity.is_aliased_class or not_against_basemost: + q, enable_opt, disable_opt = mapper._subclass_load_via_in( + entity, polymorphic_from + ) + else: + q, enable_opt, disable_opt = mapper._subclass_load_via_in_mapper + + def do_load(context, path, states, load_only, effective_entity): + if not option_entities: + # filter out states for those that would have selectinloaded + # from another loader + # TODO: we are currently ignoring the case where the + # "selectin_polymorphic" option is used, as this is much more + # complex / specific / very uncommon API use + states = [ + (s, v) + for s, v in states + if s.mapper._would_selectin_load_only_from_given_mapper(mapper) + ] + + if not states: + return + + orig_query = context.query + + if path.parent: + enable_opt_lcl = enable_opt._prepend_path(path) + disable_opt_lcl = disable_opt._prepend_path(path) + else: + enable_opt_lcl = enable_opt + disable_opt_lcl = disable_opt + options = ( + (enable_opt_lcl,) + orig_query._with_options + (disable_opt_lcl,) + ) + + q2 = q.options(*options) + + q2._compile_options = context.compile_state.default_compile_options + q2._compile_options += {"_current_path": path.parent} + + if context.populate_existing: + q2 = q2.execution_options(populate_existing=True) + + context.session.execute( + q2, + dict( + primary_keys=[ + state.key[1][0] if zero_idx else state.key[1] + for state, load_attrs in states + ] + ), + ).unique().scalars().all() + + return do_load + + +def _populate_full( + context, + row, + state, + dict_, + isnew, + load_path, + loaded_instance, + populate_existing, + populators, +): + if isnew: + # first time we are seeing a row with this identity. + state.runid = context.runid + + for key, getter in populators["quick"]: + dict_[key] = getter(row) + if populate_existing: + for key, set_callable in populators["expire"]: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + else: + for key, set_callable in populators["expire"]: + if set_callable: + state.expired_attributes.add(key) + + for key, populator in populators["new"]: + populator(state, dict_, row) + + elif load_path != state.load_path: + # new load path, e.g. object is present in more than one + # column position in a series of rows + state.load_path = load_path + + # if we have data, and the data isn't in the dict, OK, let's put + # it in. + for key, getter in populators["quick"]: + if key not in dict_: + dict_[key] = getter(row) + + # otherwise treat like an "already seen" row + for key, populator in populators["existing"]: + populator(state, dict_, row) + # TODO: allow "existing" populator to know this is + # a new path for the state: + # populator(state, dict_, row, new_path=True) + + else: + # have already seen rows with this identity in this same path. + for key, populator in populators["existing"]: + populator(state, dict_, row) + + # TODO: same path + # populator(state, dict_, row, new_path=False) + + +def _populate_partial( + context, row, state, dict_, isnew, load_path, unloaded, populators +): + if not isnew: + if unloaded: + # extra pass, see #8166 + for key, getter in populators["quick"]: + if key in unloaded: + dict_[key] = getter(row) + + to_load = context.partials[state] + for key, populator in populators["existing"]: + if key in to_load: + populator(state, dict_, row) + else: + to_load = unloaded + context.partials[state] = to_load + + for key, getter in populators["quick"]: + if key in to_load: + dict_[key] = getter(row) + for key, set_callable in populators["expire"]: + if key in to_load: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + if key in to_load: + populator(state, dict_, row) + + for key, populator in populators["eager"]: + if key not in unloaded: + populator(state, dict_, row) + + return to_load + + +def _validate_version_id(mapper, state, dict_, row, getter): + if mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col + ) != getter(row): + raise orm_exc.StaleDataError( + "Instance '%s' has version id '%s' which " + "does not match database-loaded version id '%s'." + % ( + state_str(state), + mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col + ), + getter(row), + ) + ) + + +def _decorate_polymorphic_switch( + instance_fn, + context, + query_entity, + mapper, + result, + path, + polymorphic_discriminator, + adapter, + ensure_no_pk, +): + if polymorphic_discriminator is not None: + polymorphic_on = polymorphic_discriminator + else: + polymorphic_on = mapper.polymorphic_on + if polymorphic_on is None: + return instance_fn + + if adapter: + polymorphic_on = adapter.columns[polymorphic_on] + + def configure_subclass_mapper(discriminator): + try: + sub_mapper = mapper.polymorphic_map[discriminator] + except KeyError: + raise AssertionError( + "No such polymorphic_identity %r is defined" % discriminator + ) + else: + if sub_mapper is mapper: + return None + elif not sub_mapper.isa(mapper): + return False + + return _instance_processor( + query_entity, + sub_mapper, + context, + result, + path, + adapter, + _polymorphic_from=mapper, + ) + + polymorphic_instances = util.PopulateDict(configure_subclass_mapper) + + getter = result._getter(polymorphic_on) + + def polymorphic_instance(row): + discriminator = getter(row) + if discriminator is not None: + _instance = polymorphic_instances[discriminator] + if _instance: + return _instance(row) + elif _instance is False: + identitykey = ensure_no_pk(row) + + if identitykey: + raise sa_exc.InvalidRequestError( + "Row with identity key %s can't be loaded into an " + "object; the polymorphic discriminator column '%s' " + "refers to %s, which is not a sub-mapper of " + "the requested %s" + % ( + identitykey, + polymorphic_on, + mapper.polymorphic_map[discriminator], + mapper, + ) + ) + else: + return None + else: + return instance_fn(row) + else: + identitykey = ensure_no_pk(row) + + if identitykey: + raise sa_exc.InvalidRequestError( + "Row with identity key %s can't be loaded into an " + "object; the polymorphic discriminator column '%s' is " + "NULL" % (identitykey, polymorphic_on) + ) + else: + return None + + return polymorphic_instance + + +class PostLoad: + """Track loaders and states for "post load" operations.""" + + __slots__ = "loaders", "states", "load_keys" + + def __init__(self): + self.loaders = {} + self.states = util.OrderedDict() + self.load_keys = None + + def add_state(self, state, overwrite): + # the states for a polymorphic load here are all shared + # within a single PostLoad object among multiple subtypes. + # Filtering of callables on a per-subclass basis needs to be done at + # the invocation level + self.states[state] = overwrite + + def invoke(self, context, path): + if not self.states: + return + path = path_registry.PathRegistry.coerce(path) + for ( + effective_context, + token, + limit_to_mapper, + loader, + arg, + kw, + ) in self.loaders.values(): + states = [ + (state, overwrite) + for state, overwrite in self.states.items() + if state.manager.mapper.isa(limit_to_mapper) + ] + if states: + loader( + effective_context, path, states, self.load_keys, *arg, **kw + ) + self.states.clear() + + @classmethod + def for_context(cls, context, path, only_load_props): + pl = context.post_load_paths.get(path.path) + if pl is not None and only_load_props: + pl.load_keys = only_load_props + return pl + + @classmethod + def path_exists(self, context, path, key): + return ( + path.path in context.post_load_paths + and key in context.post_load_paths[path.path].loaders + ) + + @classmethod + def callable_for_path( + cls, context, path, limit_to_mapper, token, loader_callable, *arg, **kw + ): + if path.path in context.post_load_paths: + pl = context.post_load_paths[path.path] + else: + pl = context.post_load_paths[path.path] = PostLoad() + pl.loaders[token] = ( + context, + token, + limit_to_mapper, + loader_callable, + arg, + kw, + ) + + +def load_scalar_attributes(mapper, state, attribute_names, passive): + """initiate a column-based attribute refresh operation.""" + + # assert mapper is _state_mapper(state) + session = state.session + if not session: + raise orm_exc.DetachedInstanceError( + "Instance %s is not bound to a Session; " + "attribute refresh operation cannot proceed" % (state_str(state)) + ) + + no_autoflush = bool(passive & attributes.NO_AUTOFLUSH) + + # in the case of inheritance, particularly concrete and abstract + # concrete inheritance, the class manager might have some keys + # of attributes on the superclass that we didn't actually map. + # These could be mapped as "concrete, don't load" or could be completely + # excluded from the mapping and we know nothing about them. Filter them + # here to prevent them from coming through. + if attribute_names: + attribute_names = attribute_names.intersection(mapper.attrs.keys()) + + if mapper.inherits and not mapper.concrete: + # load based on committed attributes in the object, formed into + # a truncated SELECT that only includes relevant tables. does not + # currently use state.key + statement = mapper._optimized_get_statement(state, attribute_names) + if statement is not None: + # undefer() isn't needed here because statement has the + # columns needed already, this implicitly undefers that column + stmt = FromStatement(mapper, statement) + + return load_on_ident( + session, + stmt, + None, + only_load_props=attribute_names, + refresh_state=state, + no_autoflush=no_autoflush, + ) + + # normal load, use state.key as the identity to SELECT + has_key = bool(state.key) + + if has_key: + identity_key = state.key + else: + # this codepath is rare - only valid when inside a flush, and the + # object is becoming persistent but hasn't yet been assigned + # an identity_key. + # check here to ensure we have the attrs we need. + pk_attrs = [ + mapper._columntoproperty[col].key for col in mapper.primary_key + ] + if state.expired_attributes.intersection(pk_attrs): + raise sa_exc.InvalidRequestError( + "Instance %s cannot be refreshed - it's not " + " persistent and does not " + "contain a full primary key." % state_str(state) + ) + identity_key = mapper._identity_key_from_state(state) + + if ( + _none_set.issubset(identity_key) and not mapper.allow_partial_pks + ) or _none_set.issuperset(identity_key): + util.warn_limited( + "Instance %s to be refreshed doesn't " + "contain a full primary key - can't be refreshed " + "(and shouldn't be expired, either).", + state_str(state), + ) + return + + result = load_on_ident( + session, + select(mapper).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL), + identity_key, + refresh_state=state, + only_load_props=attribute_names, + no_autoflush=no_autoflush, + ) + + # if instance is pending, a refresh operation + # may not complete (even if PK attributes are assigned) + if has_key and result is None: + raise orm_exc.ObjectDeletedError(state) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py new file mode 100644 index 00000000..0d3079fb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py @@ -0,0 +1,557 @@ +# orm/mapped_collection.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import operator +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import List +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import base +from .collections import collection +from .collections import collection_adapter +from .. import exc as sa_exc +from .. import util +from ..sql import coercions +from ..sql import expression +from ..sql import roles +from ..util.langhelpers import Missing +from ..util.langhelpers import MissingOr +from ..util.typing import Literal + +if TYPE_CHECKING: + from . import AttributeEventToken + from . import Mapper + from .collections import CollectionAdapter + from ..sql.elements import ColumnElement + +_KT = TypeVar("_KT", bound=Any) +_VT = TypeVar("_VT", bound=Any) + + +class _PlainColumnGetter(Generic[_KT]): + """Plain column getter, stores collection of Column objects + directly. + + Serializes to a :class:`._SerializableColumnGetterV2` + which has more expensive __call__() performance + and some rare caveats. + + """ + + __slots__ = ("cols", "composite") + + def __init__(self, cols: Sequence[ColumnElement[_KT]]) -> None: + self.cols = cols + self.composite = len(cols) > 1 + + def __reduce__( + self, + ) -> Tuple[ + Type[_SerializableColumnGetterV2[_KT]], + Tuple[Sequence[Tuple[Optional[str], Optional[str]]]], + ]: + return _SerializableColumnGetterV2._reduce_from_cols(self.cols) + + def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]: + return self.cols + + def __call__(self, value: _KT) -> MissingOr[Union[_KT, Tuple[_KT, ...]]]: + state = base.instance_state(value) + m = base._state_mapper(state) + + key: List[_KT] = [ + m._get_state_attr_by_column(state, state.dict, col) + for col in self._cols(m) + ] + if self.composite: + return tuple(key) + else: + obj = key[0] + if obj is None: + return Missing + else: + return obj + + +class _SerializableColumnGetterV2(_PlainColumnGetter[_KT]): + """Updated serializable getter which deals with + multi-table mapped classes. + + Two extremely unusual cases are not supported. + Mappings which have tables across multiple metadata + objects, or which are mapped to non-Table selectables + linked across inheriting mappers may fail to function + here. + + """ + + __slots__ = ("colkeys",) + + def __init__( + self, colkeys: Sequence[Tuple[Optional[str], Optional[str]]] + ) -> None: + self.colkeys = colkeys + self.composite = len(colkeys) > 1 + + def __reduce__( + self, + ) -> Tuple[ + Type[_SerializableColumnGetterV2[_KT]], + Tuple[Sequence[Tuple[Optional[str], Optional[str]]]], + ]: + return self.__class__, (self.colkeys,) + + @classmethod + def _reduce_from_cols(cls, cols: Sequence[ColumnElement[_KT]]) -> Tuple[ + Type[_SerializableColumnGetterV2[_KT]], + Tuple[Sequence[Tuple[Optional[str], Optional[str]]]], + ]: + def _table_key(c: ColumnElement[_KT]) -> Optional[str]: + if not isinstance(c.table, expression.TableClause): + return None + else: + return c.table.key # type: ignore + + colkeys = [(c.key, _table_key(c)) for c in cols] + return _SerializableColumnGetterV2, (colkeys,) + + def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]: + cols: List[ColumnElement[_KT]] = [] + metadata = getattr(mapper.local_table, "metadata", None) + for ckey, tkey in self.colkeys: + if tkey is None or metadata is None or tkey not in metadata: + cols.append(mapper.local_table.c[ckey]) # type: ignore + else: + cols.append(metadata.tables[tkey].c[ckey]) + return cols + + +def column_keyed_dict( + mapping_spec: Union[Type[_KT], Callable[[_KT], _VT]], + *, + ignore_unpopulated_attribute: bool = False, +) -> Type[KeyFuncDict[_KT, _KT]]: + """A dictionary-based collection type with column-based keying. + + .. versionchanged:: 2.0 Renamed :data:`.column_mapped_collection` to + :class:`.column_keyed_dict`. + + Returns a :class:`.KeyFuncDict` factory which will produce new + dictionary keys based on the value of a particular :class:`.Column`-mapped + attribute on ORM mapped instances to be added to the dictionary. + + .. note:: the value of the target attribute must be assigned with its + value at the time that the object is being added to the + dictionary collection. Additionally, changes to the key attribute + are **not tracked**, which means the key in the dictionary is not + automatically synchronized with the key value on the target object + itself. See :ref:`key_collections_mutations` for further details. + + .. seealso:: + + :ref:`orm_dictionary_collection` - background on use + + :param mapping_spec: a :class:`_schema.Column` object that is expected + to be mapped by the target mapper to a particular attribute on the + mapped class, the value of which on a particular instance is to be used + as the key for a new dictionary entry for that instance. + :param ignore_unpopulated_attribute: if True, and the mapped attribute + indicated by the given :class:`_schema.Column` target attribute + on an object is not populated at all, the operation will be silently + skipped. By default, an error is raised. + + .. versionadded:: 2.0 an error is raised by default if the attribute + being used for the dictionary key is determined that it was never + populated with any value. The + :paramref:`_orm.column_keyed_dict.ignore_unpopulated_attribute` + parameter may be set which will instead indicate that this condition + should be ignored, and the append operation silently skipped. + This is in contrast to the behavior of the 1.x series which would + erroneously populate the value in the dictionary with an arbitrary key + value of ``None``. + + + """ + cols = [ + coercions.expect(roles.ColumnArgumentRole, q, argname="mapping_spec") + for q in util.to_list(mapping_spec) + ] + keyfunc = _PlainColumnGetter(cols) + return _mapped_collection_cls( + keyfunc, + ignore_unpopulated_attribute=ignore_unpopulated_attribute, + ) + + +class _AttrGetter: + __slots__ = ("attr_name", "getter") + + def __init__(self, attr_name: str): + self.attr_name = attr_name + self.getter = operator.attrgetter(attr_name) + + def __call__(self, mapped_object: Any) -> Any: + obj = self.getter(mapped_object) + if obj is None: + state = base.instance_state(mapped_object) + mp = state.mapper + if self.attr_name in mp.attrs: + dict_ = state.dict + obj = dict_.get(self.attr_name, base.NO_VALUE) + if obj is None: + return Missing + else: + return Missing + + return obj + + def __reduce__(self) -> Tuple[Type[_AttrGetter], Tuple[str]]: + return _AttrGetter, (self.attr_name,) + + +def attribute_keyed_dict( + attr_name: str, *, ignore_unpopulated_attribute: bool = False +) -> Type[KeyFuncDict[Any, Any]]: + """A dictionary-based collection type with attribute-based keying. + + .. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to + :func:`.attribute_keyed_dict`. + + Returns a :class:`.KeyFuncDict` factory which will produce new + dictionary keys based on the value of a particular named attribute on + ORM mapped instances to be added to the dictionary. + + .. note:: the value of the target attribute must be assigned with its + value at the time that the object is being added to the + dictionary collection. Additionally, changes to the key attribute + are **not tracked**, which means the key in the dictionary is not + automatically synchronized with the key value on the target object + itself. See :ref:`key_collections_mutations` for further details. + + .. seealso:: + + :ref:`orm_dictionary_collection` - background on use + + :param attr_name: string name of an ORM-mapped attribute + on the mapped class, the value of which on a particular instance + is to be used as the key for a new dictionary entry for that instance. + :param ignore_unpopulated_attribute: if True, and the target attribute + on an object is not populated at all, the operation will be silently + skipped. By default, an error is raised. + + .. versionadded:: 2.0 an error is raised by default if the attribute + being used for the dictionary key is determined that it was never + populated with any value. The + :paramref:`_orm.attribute_keyed_dict.ignore_unpopulated_attribute` + parameter may be set which will instead indicate that this condition + should be ignored, and the append operation silently skipped. + This is in contrast to the behavior of the 1.x series which would + erroneously populate the value in the dictionary with an arbitrary key + value of ``None``. + + + """ + + return _mapped_collection_cls( + _AttrGetter(attr_name), + ignore_unpopulated_attribute=ignore_unpopulated_attribute, + ) + + +def keyfunc_mapping( + keyfunc: Callable[[Any], Any], + *, + ignore_unpopulated_attribute: bool = False, +) -> Type[KeyFuncDict[_KT, Any]]: + """A dictionary-based collection type with arbitrary keying. + + .. versionchanged:: 2.0 Renamed :data:`.mapped_collection` to + :func:`.keyfunc_mapping`. + + Returns a :class:`.KeyFuncDict` factory with a keying function + generated from keyfunc, a callable that takes an entity and returns a + key value. + + .. note:: the given keyfunc is called only once at the time that the + target object is being added to the collection. Changes to the + effective value returned by the function are not tracked. + + + .. seealso:: + + :ref:`orm_dictionary_collection` - background on use + + :param keyfunc: a callable that will be passed the ORM-mapped instance + which should then generate a new key to use in the dictionary. + If the value returned is :attr:`.LoaderCallableStatus.NO_VALUE`, an error + is raised. + :param ignore_unpopulated_attribute: if True, and the callable returns + :attr:`.LoaderCallableStatus.NO_VALUE` for a particular instance, the + operation will be silently skipped. By default, an error is raised. + + .. versionadded:: 2.0 an error is raised by default if the callable + being used for the dictionary key returns + :attr:`.LoaderCallableStatus.NO_VALUE`, which in an ORM attribute + context indicates an attribute that was never populated with any value. + The :paramref:`_orm.mapped_collection.ignore_unpopulated_attribute` + parameter may be set which will instead indicate that this condition + should be ignored, and the append operation silently skipped. This is + in contrast to the behavior of the 1.x series which would erroneously + populate the value in the dictionary with an arbitrary key value of + ``None``. + + + """ + return _mapped_collection_cls( + keyfunc, ignore_unpopulated_attribute=ignore_unpopulated_attribute + ) + + +class KeyFuncDict(Dict[_KT, _VT]): + """Base for ORM mapped dictionary classes. + + Extends the ``dict`` type with additional methods needed by SQLAlchemy ORM + collection classes. Use of :class:`_orm.KeyFuncDict` is most directly + by using the :func:`.attribute_keyed_dict` or + :func:`.column_keyed_dict` class factories. + :class:`_orm.KeyFuncDict` may also serve as the base for user-defined + custom dictionary classes. + + .. versionchanged:: 2.0 Renamed :class:`.MappedCollection` to + :class:`.KeyFuncDict`. + + .. seealso:: + + :func:`_orm.attribute_keyed_dict` + + :func:`_orm.column_keyed_dict` + + :ref:`orm_dictionary_collection` + + :ref:`orm_custom_collection` + + + """ + + def __init__( + self, + keyfunc: Callable[[Any], Any], + *dict_args: Any, + ignore_unpopulated_attribute: bool = False, + ) -> None: + """Create a new collection with keying provided by keyfunc. + + keyfunc may be any callable that takes an object and returns an object + for use as a dictionary key. + + The keyfunc will be called every time the ORM needs to add a member by + value-only (such as when loading instances from the database) or + remove a member. The usual cautions about dictionary keying apply- + ``keyfunc(object)`` should return the same output for the life of the + collection. Keying based on mutable properties can result in + unreachable instances "lost" in the collection. + + """ + self.keyfunc = keyfunc + self.ignore_unpopulated_attribute = ignore_unpopulated_attribute + super().__init__(*dict_args) + + @classmethod + def _unreduce( + cls, + keyfunc: Callable[[Any], Any], + values: Dict[_KT, _KT], + adapter: Optional[CollectionAdapter] = None, + ) -> "KeyFuncDict[_KT, _KT]": + mp: KeyFuncDict[_KT, _KT] = KeyFuncDict(keyfunc) + mp.update(values) + # note that the adapter sets itself up onto this collection + # when its `__setstate__` method is called + return mp + + def __reduce__( + self, + ) -> Tuple[ + Callable[[_KT, _KT], KeyFuncDict[_KT, _KT]], + Tuple[Any, Union[Dict[_KT, _KT], Dict[_KT, _KT]], CollectionAdapter], + ]: + return ( + KeyFuncDict._unreduce, + ( + self.keyfunc, + dict(self), + collection_adapter(self), + ), + ) + + @util.preload_module("sqlalchemy.orm.attributes") + def _raise_for_unpopulated( + self, + value: _KT, + initiator: Union[AttributeEventToken, Literal[None, False]] = None, + *, + warn_only: bool, + ) -> None: + mapper = base.instance_state(value).mapper + + attributes = util.preloaded.orm_attributes + + if not isinstance(initiator, attributes.AttributeEventToken): + relationship = "unknown relationship" + elif initiator.key in mapper.attrs: + relationship = f"{mapper.attrs[initiator.key]}" + else: + relationship = initiator.key + + if warn_only: + util.warn( + f"Attribute keyed dictionary value for " + f"attribute '{relationship}' was None; this will raise " + "in a future release. " + f"To skip this assignment entirely, " + f'Set the "ignore_unpopulated_attribute=True" ' + f"parameter on the mapped collection factory." + ) + else: + raise sa_exc.InvalidRequestError( + "In event triggered from population of " + f"attribute '{relationship}' " + "(potentially from a backref), " + f"can't populate value in KeyFuncDict; " + "dictionary key " + f"derived from {base.instance_str(value)} is not " + f"populated. Ensure appropriate state is set up on " + f"the {base.instance_str(value)} object " + f"before assigning to the {relationship} attribute. " + f"To skip this assignment entirely, " + f'Set the "ignore_unpopulated_attribute=True" ' + f"parameter on the mapped collection factory." + ) + + @collection.appender # type: ignore[misc] + @collection.internally_instrumented # type: ignore[misc] + def set( + self, + value: _KT, + _sa_initiator: Union[AttributeEventToken, Literal[None, False]] = None, + ) -> None: + """Add an item by value, consulting the keyfunc for the key.""" + + key = self.keyfunc(value) + + if key is base.NO_VALUE: + if not self.ignore_unpopulated_attribute: + self._raise_for_unpopulated( + value, _sa_initiator, warn_only=False + ) + else: + return + elif key is Missing: + if not self.ignore_unpopulated_attribute: + self._raise_for_unpopulated( + value, _sa_initiator, warn_only=True + ) + key = None + else: + return + + self.__setitem__(key, value, _sa_initiator) # type: ignore[call-arg] + + @collection.remover # type: ignore[misc] + @collection.internally_instrumented # type: ignore[misc] + def remove( + self, + value: _KT, + _sa_initiator: Union[AttributeEventToken, Literal[None, False]] = None, + ) -> None: + """Remove an item by value, consulting the keyfunc for the key.""" + + key = self.keyfunc(value) + + if key is base.NO_VALUE: + if not self.ignore_unpopulated_attribute: + self._raise_for_unpopulated( + value, _sa_initiator, warn_only=False + ) + return + elif key is Missing: + if not self.ignore_unpopulated_attribute: + self._raise_for_unpopulated( + value, _sa_initiator, warn_only=True + ) + key = None + else: + return + + # Let self[key] raise if key is not in this collection + # testlib.pragma exempt:__ne__ + if self[key] != value: + raise sa_exc.InvalidRequestError( + "Can not remove '%s': collection holds '%s' for key '%s'. " + "Possible cause: is the KeyFuncDict key function " + "based on mutable properties or properties that only obtain " + "values after flush?" % (value, self[key], key) + ) + self.__delitem__(key, _sa_initiator) # type: ignore[call-arg] + + +def _mapped_collection_cls( + keyfunc: Callable[[Any], Any], ignore_unpopulated_attribute: bool +) -> Type[KeyFuncDict[_KT, _KT]]: + class _MKeyfuncMapped(KeyFuncDict[_KT, _KT]): + def __init__(self, *dict_args: Any) -> None: + super().__init__( + keyfunc, + *dict_args, + ignore_unpopulated_attribute=ignore_unpopulated_attribute, + ) + + return _MKeyfuncMapped + + +MappedCollection = KeyFuncDict +"""A synonym for :class:`.KeyFuncDict`. + +.. versionchanged:: 2.0 Renamed :class:`.MappedCollection` to + :class:`.KeyFuncDict`. + +""" + +mapped_collection = keyfunc_mapping +"""A synonym for :func:`_orm.keyfunc_mapping`. + +.. versionchanged:: 2.0 Renamed :data:`.mapped_collection` to + :func:`_orm.keyfunc_mapping` + +""" + +attribute_mapped_collection = attribute_keyed_dict +"""A synonym for :func:`_orm.attribute_keyed_dict`. + +.. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to + :func:`_orm.attribute_keyed_dict` + +""" + +column_mapped_collection = column_keyed_dict +"""A synonym for :func:`_orm.column_keyed_dict. + +.. versionchanged:: 2.0 Renamed :func:`.column_mapped_collection` to + :func:`_orm.column_keyed_dict` + +""" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py new file mode 100644 index 00000000..06e3884b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py @@ -0,0 +1,4421 @@ +# orm/mapper.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Logic to map Python classes to and from selectables. + +Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central +configurational unit which associates a class with a database table. + +This is a semi-private module; the main configurational API of the ORM is +available in :class:`~sqlalchemy.orm.`. + +""" +from __future__ import annotations + +from collections import deque +from functools import reduce +from itertools import chain +import sys +import threading +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import Deque +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import exc as orm_exc +from . import instrumentation +from . import loading +from . import properties +from . import util as orm_util +from ._typing import _O +from .base import _class_to_mapper +from .base import _parse_mapper_argument +from .base import _state_mapper +from .base import PassiveFlag +from .base import state_str +from .interfaces import _MappedAttribute +from .interfaces import EXT_SKIP +from .interfaces import InspectionAttr +from .interfaces import MapperProperty +from .interfaces import ORMEntityColumnsClauseRole +from .interfaces import ORMFromClauseRole +from .interfaces import StrategizedProperty +from .path_registry import PathRegistry +from .. import event +from .. import exc as sa_exc +from .. import inspection +from .. import log +from .. import schema +from .. import sql +from .. import util +from ..event import dispatcher +from ..event import EventTarget +from ..sql import base as sql_base +from ..sql import coercions +from ..sql import expression +from ..sql import operators +from ..sql import roles +from ..sql import TableClause +from ..sql import util as sql_util +from ..sql import visitors +from ..sql.cache_key import MemoizedHasCacheKey +from ..sql.elements import KeyedColumnElement +from ..sql.schema import Column +from ..sql.schema import Table +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from ..util import HasMemoized +from ..util import HasMemoized_ro_memoized_attribute +from ..util.typing import Literal + +if TYPE_CHECKING: + from ._typing import _IdentityKeyType + from ._typing import _InstanceDict + from ._typing import _ORMColumnExprArgument + from ._typing import _RegistryType + from .decl_api import registry + from .dependency import DependencyProcessor + from .descriptor_props import CompositeProperty + from .descriptor_props import SynonymProperty + from .events import MapperEvents + from .instrumentation import ClassManager + from .path_registry import CachingEntityRegistry + from .properties import ColumnProperty + from .relationships import RelationshipProperty + from .state import InstanceState + from .util import ORMAdapter + from ..engine import Row + from ..engine import RowMapping + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _EquivalentColumnMap + from ..sql.base import ReadOnlyColumnCollection + from ..sql.elements import ColumnClause + from ..sql.elements import ColumnElement + from ..sql.selectable import FromClause + from ..util import OrderedSet + + +_T = TypeVar("_T", bound=Any) +_MP = TypeVar("_MP", bound="MapperProperty[Any]") +_Fn = TypeVar("_Fn", bound="Callable[..., Any]") + + +_WithPolymorphicArg = Union[ + Literal["*"], + Tuple[ + Union[Literal["*"], Sequence[Union["Mapper[Any]", Type[Any]]]], + Optional["FromClause"], + ], + Sequence[Union["Mapper[Any]", Type[Any]]], +] + + +_mapper_registries: weakref.WeakKeyDictionary[_RegistryType, bool] = ( + weakref.WeakKeyDictionary() +) + + +def _all_registries() -> Set[registry]: + with _CONFIGURE_MUTEX: + return set(_mapper_registries) + + +def _unconfigured_mappers() -> Iterator[Mapper[Any]]: + for reg in _all_registries(): + yield from reg._mappers_to_configure() + + +_already_compiling = False + + +# a constant returned by _get_attr_by_column to indicate +# this mapper is not handling an attribute for a particular +# column +NO_ATTRIBUTE = util.symbol("NO_ATTRIBUTE") + +# lock used to synchronize the "mapper configure" step +_CONFIGURE_MUTEX = threading.RLock() + + +@inspection._self_inspects +@log.class_logger +class Mapper( + ORMFromClauseRole, + ORMEntityColumnsClauseRole[_O], + MemoizedHasCacheKey, + InspectionAttr, + log.Identified, + inspection.Inspectable["Mapper[_O]"], + EventTarget, + Generic[_O], +): + """Defines an association between a Python class and a database table or + other relational structure, so that ORM operations against the class may + proceed. + + The :class:`_orm.Mapper` object is instantiated using mapping methods + present on the :class:`_orm.registry` object. For information + about instantiating new :class:`_orm.Mapper` objects, see + :ref:`orm_mapping_classes_toplevel`. + + """ + + dispatch: dispatcher[Mapper[_O]] + + _dispose_called = False + _configure_failed: Any = False + _ready_for_configure = False + + @util.deprecated_params( + non_primary=( + "1.3", + "The :paramref:`.mapper.non_primary` parameter is deprecated, " + "and will be removed in a future release. The functionality " + "of non primary mappers is now better suited using the " + ":class:`.AliasedClass` construct, which can also be used " + "as the target of a :func:`_orm.relationship` in 1.3.", + ), + ) + def __init__( + self, + class_: Type[_O], + local_table: Optional[FromClause] = None, + properties: Optional[Mapping[str, MapperProperty[Any]]] = None, + primary_key: Optional[Iterable[_ORMColumnExprArgument[Any]]] = None, + non_primary: bool = False, + inherits: Optional[Union[Mapper[Any], Type[Any]]] = None, + inherit_condition: Optional[_ColumnExpressionArgument[bool]] = None, + inherit_foreign_keys: Optional[ + Sequence[_ORMColumnExprArgument[Any]] + ] = None, + always_refresh: bool = False, + version_id_col: Optional[_ORMColumnExprArgument[Any]] = None, + version_id_generator: Optional[ + Union[Literal[False], Callable[[Any], Any]] + ] = None, + polymorphic_on: Optional[ + Union[_ORMColumnExprArgument[Any], str, MapperProperty[Any]] + ] = None, + _polymorphic_map: Optional[Dict[Any, Mapper[Any]]] = None, + polymorphic_identity: Optional[Any] = None, + concrete: bool = False, + with_polymorphic: Optional[_WithPolymorphicArg] = None, + polymorphic_abstract: bool = False, + polymorphic_load: Optional[Literal["selectin", "inline"]] = None, + allow_partial_pks: bool = True, + batch: bool = True, + column_prefix: Optional[str] = None, + include_properties: Optional[Sequence[str]] = None, + exclude_properties: Optional[Sequence[str]] = None, + passive_updates: bool = True, + passive_deletes: bool = False, + confirm_deleted_rows: bool = True, + eager_defaults: Literal[True, False, "auto"] = "auto", + legacy_is_orphan: bool = False, + _compiled_cache_size: int = 100, + ): + r"""Direct constructor for a new :class:`_orm.Mapper` object. + + The :class:`_orm.Mapper` constructor is not called directly, and + is normally invoked through the + use of the :class:`_orm.registry` object through either the + :ref:`Declarative ` or + :ref:`Imperative ` mapping styles. + + .. versionchanged:: 2.0 The public facing ``mapper()`` function is + removed; for a classical mapping configuration, use the + :meth:`_orm.registry.map_imperatively` method. + + Parameters documented below may be passed to either the + :meth:`_orm.registry.map_imperatively` method, or may be passed in the + ``__mapper_args__`` declarative class attribute described at + :ref:`orm_declarative_mapper_options`. + + :param class\_: The class to be mapped. When using Declarative, + this argument is automatically passed as the declared class + itself. + + :param local_table: The :class:`_schema.Table` or other + :class:`_sql.FromClause` (i.e. selectable) to which the class is + mapped. May be ``None`` if this mapper inherits from another mapper + using single-table inheritance. When using Declarative, this + argument is automatically passed by the extension, based on what is + configured via the :attr:`_orm.DeclarativeBase.__table__` attribute + or via the :class:`_schema.Table` produced as a result of + the :attr:`_orm.DeclarativeBase.__tablename__` attribute being + present. + + :param polymorphic_abstract: Indicates this class will be mapped in a + polymorphic hierarchy, but not directly instantiated. The class is + mapped normally, except that it has no requirement for a + :paramref:`_orm.Mapper.polymorphic_identity` within an inheritance + hierarchy. The class however must be part of a polymorphic + inheritance scheme which uses + :paramref:`_orm.Mapper.polymorphic_on` at the base. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`orm_inheritance_abstract_poly` + + :param always_refresh: If True, all query operations for this mapped + class will overwrite all data within object instances that already + exist within the session, erasing any in-memory changes with + whatever information was loaded from the database. Usage of this + flag is highly discouraged; as an alternative, see the method + :meth:`_query.Query.populate_existing`. + + :param allow_partial_pks: Defaults to True. Indicates that a + composite primary key with some NULL values should be considered as + possibly existing within the database. This affects whether a + mapper will assign an incoming row to an existing identity, as well + as if :meth:`.Session.merge` will check the database first for a + particular primary key value. A "partial primary key" can occur if + one has mapped to an OUTER JOIN, for example. + + :param batch: Defaults to ``True``, indicating that save operations + of multiple entities can be batched together for efficiency. + Setting to False indicates + that an instance will be fully saved before saving the next + instance. This is used in the extremely rare case that a + :class:`.MapperEvents` listener requires being called + in between individual row persistence operations. + + :param column_prefix: A string which will be prepended + to the mapped attribute name when :class:`_schema.Column` + objects are automatically assigned as attributes to the + mapped class. Does not affect :class:`.Column` objects that + are mapped explicitly in the :paramref:`.Mapper.properties` + dictionary. + + This parameter is typically useful with imperative mappings + that keep the :class:`.Table` object separate. Below, assuming + the ``user_table`` :class:`.Table` object has columns named + ``user_id``, ``user_name``, and ``password``:: + + class User(Base): + __table__ = user_table + __mapper_args__ = {'column_prefix':'_'} + + The above mapping will assign the ``user_id``, ``user_name``, and + ``password`` columns to attributes named ``_user_id``, + ``_user_name``, and ``_password`` on the mapped ``User`` class. + + The :paramref:`.Mapper.column_prefix` parameter is uncommon in + modern use. For dealing with reflected tables, a more flexible + approach to automating a naming scheme is to intercept the + :class:`.Column` objects as they are reflected; see the section + :ref:`mapper_automated_reflection_schemes` for notes on this usage + pattern. + + :param concrete: If True, indicates this mapper should use concrete + table inheritance with its parent mapper. + + See the section :ref:`concrete_inheritance` for an example. + + :param confirm_deleted_rows: defaults to True; when a DELETE occurs + of one more rows based on specific primary keys, a warning is + emitted when the number of rows matched does not equal the number + of rows expected. This parameter may be set to False to handle the + case where database ON DELETE CASCADE rules may be deleting some of + those rows automatically. The warning may be changed to an + exception in a future release. + + :param eager_defaults: if True, the ORM will immediately fetch the + value of server-generated default values after an INSERT or UPDATE, + rather than leaving them as expired to be fetched on next access. + This can be used for event schemes where the server-generated values + are needed immediately before the flush completes. + + The fetch of values occurs either by using ``RETURNING`` inline + with the ``INSERT`` or ``UPDATE`` statement, or by adding an + additional ``SELECT`` statement subsequent to the ``INSERT`` or + ``UPDATE``, if the backend does not support ``RETURNING``. + + The use of ``RETURNING`` is extremely performant in particular for + ``INSERT`` statements where SQLAlchemy can take advantage of + :ref:`insertmanyvalues `, whereas the use of + an additional ``SELECT`` is relatively poor performing, adding + additional SQL round trips which would be unnecessary if these new + attributes are not to be accessed in any case. + + For this reason, :paramref:`.Mapper.eager_defaults` defaults to the + string value ``"auto"``, which indicates that server defaults for + INSERT should be fetched using ``RETURNING`` if the backing database + supports it and if the dialect in use supports "insertmanyreturning" + for an INSERT statement. If the backing database does not support + ``RETURNING`` or "insertmanyreturning" is not available, server + defaults will not be fetched. + + .. versionchanged:: 2.0.0rc1 added the "auto" option for + :paramref:`.Mapper.eager_defaults` + + .. seealso:: + + :ref:`orm_server_defaults` + + .. versionchanged:: 2.0.0 RETURNING now works with multiple rows + INSERTed at once using the + :ref:`insertmanyvalues ` feature, which + among other things allows the :paramref:`.Mapper.eager_defaults` + feature to be very performant on supporting backends. + + :param exclude_properties: A list or set of string column names to + be excluded from mapping. + + .. seealso:: + + :ref:`include_exclude_cols` + + :param include_properties: An inclusive list or set of string column + names to map. + + .. seealso:: + + :ref:`include_exclude_cols` + + :param inherits: A mapped class or the corresponding + :class:`_orm.Mapper` + of one indicating a superclass to which this :class:`_orm.Mapper` + should *inherit* from. The mapped class here must be a subclass + of the other mapper's class. When using Declarative, this argument + is passed automatically as a result of the natural class + hierarchy of the declared classes. + + .. seealso:: + + :ref:`inheritance_toplevel` + + :param inherit_condition: For joined table inheritance, a SQL + expression which will + define how the two tables are joined; defaults to a natural join + between the two tables. + + :param inherit_foreign_keys: When ``inherit_condition`` is used and + the columns present are missing a :class:`_schema.ForeignKey` + configuration, this parameter can be used to specify which columns + are "foreign". In most cases can be left as ``None``. + + :param legacy_is_orphan: Boolean, defaults to ``False``. + When ``True``, specifies that "legacy" orphan consideration + is to be applied to objects mapped by this mapper, which means + that a pending (that is, not persistent) object is auto-expunged + from an owning :class:`.Session` only when it is de-associated + from *all* parents that specify a ``delete-orphan`` cascade towards + this mapper. The new default behavior is that the object is + auto-expunged when it is de-associated with *any* of its parents + that specify ``delete-orphan`` cascade. This behavior is more + consistent with that of a persistent object, and allows behavior to + be consistent in more scenarios independently of whether or not an + orphan object has been flushed yet or not. + + See the change note and example at :ref:`legacy_is_orphan_addition` + for more detail on this change. + + :param non_primary: Specify that this :class:`_orm.Mapper` + is in addition + to the "primary" mapper, that is, the one used for persistence. + The :class:`_orm.Mapper` created here may be used for ad-hoc + mapping of the class to an alternate selectable, for loading + only. + + .. seealso:: + + :ref:`relationship_aliased_class` - the new pattern that removes + the need for the :paramref:`_orm.Mapper.non_primary` flag. + + :param passive_deletes: Indicates DELETE behavior of foreign key + columns when a joined-table inheritance entity is being deleted. + Defaults to ``False`` for a base mapper; for an inheriting mapper, + defaults to ``False`` unless the value is set to ``True`` + on the superclass mapper. + + When ``True``, it is assumed that ON DELETE CASCADE is configured + on the foreign key relationships that link this mapper's table + to its superclass table, so that when the unit of work attempts + to delete the entity, it need only emit a DELETE statement for the + superclass table, and not this table. + + When ``False``, a DELETE statement is emitted for this mapper's + table individually. If the primary key attributes local to this + table are unloaded, then a SELECT must be emitted in order to + validate these attributes; note that the primary key columns + of a joined-table subclass are not part of the "primary key" of + the object as a whole. + + Note that a value of ``True`` is **always** forced onto the + subclass mappers; that is, it's not possible for a superclass + to specify passive_deletes without this taking effect for + all subclass mappers. + + .. seealso:: + + :ref:`passive_deletes` - description of similar feature as + used with :func:`_orm.relationship` + + :paramref:`.mapper.passive_updates` - supporting ON UPDATE + CASCADE for joined-table inheritance mappers + + :param passive_updates: Indicates UPDATE behavior of foreign key + columns when a primary key column changes on a joined-table + inheritance mapping. Defaults to ``True``. + + When True, it is assumed that ON UPDATE CASCADE is configured on + the foreign key in the database, and that the database will handle + propagation of an UPDATE from a source column to dependent columns + on joined-table rows. + + When False, it is assumed that the database does not enforce + referential integrity and will not be issuing its own CASCADE + operation for an update. The unit of work process will + emit an UPDATE statement for the dependent columns during a + primary key change. + + .. seealso:: + + :ref:`passive_updates` - description of a similar feature as + used with :func:`_orm.relationship` + + :paramref:`.mapper.passive_deletes` - supporting ON DELETE + CASCADE for joined-table inheritance mappers + + :param polymorphic_load: Specifies "polymorphic loading" behavior + for a subclass in an inheritance hierarchy (joined and single + table inheritance only). Valid values are: + + * "'inline'" - specifies this class should be part of + the "with_polymorphic" mappers, e.g. its columns will be included + in a SELECT query against the base. + + * "'selectin'" - specifies that when instances of this class + are loaded, an additional SELECT will be emitted to retrieve + the columns specific to this subclass. The SELECT uses + IN to fetch multiple subclasses at once. + + .. versionadded:: 1.2 + + .. seealso:: + + :ref:`with_polymorphic_mapper_config` + + :ref:`polymorphic_selectin` + + :param polymorphic_on: Specifies the column, attribute, or + SQL expression used to determine the target class for an + incoming row, when inheriting classes are present. + + May be specified as a string attribute name, or as a SQL + expression such as a :class:`_schema.Column` or in a Declarative + mapping a :func:`_orm.mapped_column` object. It is typically + expected that the SQL expression corresponds to a column in the + base-most mapped :class:`.Table`:: + + class Employee(Base): + __tablename__ = 'employee' + + id: Mapped[int] = mapped_column(primary_key=True) + discriminator: Mapped[str] = mapped_column(String(50)) + + __mapper_args__ = { + "polymorphic_on":discriminator, + "polymorphic_identity":"employee" + } + + It may also be specified + as a SQL expression, as in this example where we + use the :func:`.case` construct to provide a conditional + approach:: + + class Employee(Base): + __tablename__ = 'employee' + + id: Mapped[int] = mapped_column(primary_key=True) + discriminator: Mapped[str] = mapped_column(String(50)) + + __mapper_args__ = { + "polymorphic_on":case( + (discriminator == "EN", "engineer"), + (discriminator == "MA", "manager"), + else_="employee"), + "polymorphic_identity":"employee" + } + + It may also refer to any attribute using its string name, + which is of particular use when using annotated column + configurations:: + + class Employee(Base): + __tablename__ = 'employee' + + id: Mapped[int] = mapped_column(primary_key=True) + discriminator: Mapped[str] + + __mapper_args__ = { + "polymorphic_on": "discriminator", + "polymorphic_identity": "employee" + } + + When setting ``polymorphic_on`` to reference an + attribute or expression that's not present in the + locally mapped :class:`_schema.Table`, yet the value + of the discriminator should be persisted to the database, + the value of the + discriminator is not automatically set on new + instances; this must be handled by the user, + either through manual means or via event listeners. + A typical approach to establishing such a listener + looks like:: + + from sqlalchemy import event + from sqlalchemy.orm import object_mapper + + @event.listens_for(Employee, "init", propagate=True) + def set_identity(instance, *arg, **kw): + mapper = object_mapper(instance) + instance.discriminator = mapper.polymorphic_identity + + Where above, we assign the value of ``polymorphic_identity`` + for the mapped class to the ``discriminator`` attribute, + thus persisting the value to the ``discriminator`` column + in the database. + + .. warning:: + + Currently, **only one discriminator column may be set**, typically + on the base-most class in the hierarchy. "Cascading" polymorphic + columns are not yet supported. + + .. seealso:: + + :ref:`inheritance_toplevel` + + :param polymorphic_identity: Specifies the value which + identifies this particular class as returned by the column expression + referred to by the :paramref:`_orm.Mapper.polymorphic_on` setting. As + rows are received, the value corresponding to the + :paramref:`_orm.Mapper.polymorphic_on` column expression is compared + to this value, indicating which subclass should be used for the newly + reconstructed object. + + .. seealso:: + + :ref:`inheritance_toplevel` + + :param properties: A dictionary mapping the string names of object + attributes to :class:`.MapperProperty` instances, which define the + persistence behavior of that attribute. Note that + :class:`_schema.Column` + objects present in + the mapped :class:`_schema.Table` are automatically placed into + ``ColumnProperty`` instances upon mapping, unless overridden. + When using Declarative, this argument is passed automatically, + based on all those :class:`.MapperProperty` instances declared + in the declared class body. + + .. seealso:: + + :ref:`orm_mapping_properties` - in the + :ref:`orm_mapping_classes_toplevel` + + :param primary_key: A list of :class:`_schema.Column` + objects, or alternatively string names of attribute names which + refer to :class:`_schema.Column`, which define + the primary key to be used against this mapper's selectable unit. + This is normally simply the primary key of the ``local_table``, but + can be overridden here. + + .. versionchanged:: 2.0.2 :paramref:`_orm.Mapper.primary_key` + arguments may be indicated as string attribute names as well. + + .. seealso:: + + :ref:`mapper_primary_key` - background and example use + + :param version_id_col: A :class:`_schema.Column` + that will be used to keep a running version id of rows + in the table. This is used to detect concurrent updates or + the presence of stale data in a flush. The methodology is to + detect if an UPDATE statement does not match the last known + version id, a + :class:`~sqlalchemy.orm.exc.StaleDataError` exception is + thrown. + By default, the column must be of :class:`.Integer` type, + unless ``version_id_generator`` specifies an alternative version + generator. + + .. seealso:: + + :ref:`mapper_version_counter` - discussion of version counting + and rationale. + + :param version_id_generator: Define how new version ids should + be generated. Defaults to ``None``, which indicates that + a simple integer counting scheme be employed. To provide a custom + versioning scheme, provide a callable function of the form:: + + def generate_version(version): + return next_version + + Alternatively, server-side versioning functions such as triggers, + or programmatic versioning schemes outside of the version id + generator may be used, by specifying the value ``False``. + Please see :ref:`server_side_version_counter` for a discussion + of important points when using this option. + + .. seealso:: + + :ref:`custom_version_counter` + + :ref:`server_side_version_counter` + + + :param with_polymorphic: A tuple in the form ``(, + )`` indicating the default style of "polymorphic" + loading, that is, which tables are queried at once. is + any single or list of mappers and/or classes indicating the + inherited classes that should be loaded at once. The special value + ``'*'`` may be used to indicate all descending classes should be + loaded immediately. The second tuple argument + indicates a selectable that will be used to query for multiple + classes. + + The :paramref:`_orm.Mapper.polymorphic_load` parameter may be + preferable over the use of :paramref:`_orm.Mapper.with_polymorphic` + in modern mappings to indicate a per-subclass technique of + indicating polymorphic loading styles. + + .. seealso:: + + :ref:`with_polymorphic_mapper_config` + + """ + self.class_ = util.assert_arg_type(class_, type, "class_") + self._sort_key = "%s.%s" % ( + self.class_.__module__, + self.class_.__name__, + ) + + self._primary_key_argument = util.to_list(primary_key) + self.non_primary = non_primary + + self.always_refresh = always_refresh + + if isinstance(version_id_col, MapperProperty): + self.version_id_prop = version_id_col + self.version_id_col = None + else: + self.version_id_col = ( + coercions.expect( + roles.ColumnArgumentOrKeyRole, + version_id_col, + argname="version_id_col", + ) + if version_id_col is not None + else None + ) + + if version_id_generator is False: + self.version_id_generator = False + elif version_id_generator is None: + self.version_id_generator = lambda x: (x or 0) + 1 + else: + self.version_id_generator = version_id_generator + + self.concrete = concrete + self.single = False + + if inherits is not None: + self.inherits = _parse_mapper_argument(inherits) + else: + self.inherits = None + + if local_table is not None: + self.local_table = coercions.expect( + roles.StrictFromClauseRole, + local_table, + disable_inspection=True, + argname="local_table", + ) + elif self.inherits: + # note this is a new flow as of 2.0 so that + # .local_table need not be Optional + self.local_table = self.inherits.local_table + self.single = True + else: + raise sa_exc.ArgumentError( + f"Mapper[{self.class_.__name__}(None)] has None for a " + "primary table argument and does not specify 'inherits'" + ) + + if inherit_condition is not None: + self.inherit_condition = coercions.expect( + roles.OnClauseRole, inherit_condition + ) + else: + self.inherit_condition = None + + self.inherit_foreign_keys = inherit_foreign_keys + self._init_properties = dict(properties) if properties else {} + self._delete_orphans = [] + self.batch = batch + self.eager_defaults = eager_defaults + self.column_prefix = column_prefix + + # interim - polymorphic_on is further refined in + # _configure_polymorphic_setter + self.polymorphic_on = ( + coercions.expect( # type: ignore + roles.ColumnArgumentOrKeyRole, + polymorphic_on, + argname="polymorphic_on", + ) + if polymorphic_on is not None + else None + ) + self.polymorphic_abstract = polymorphic_abstract + self._dependency_processors = [] + self.validators = util.EMPTY_DICT + self.passive_updates = passive_updates + self.passive_deletes = passive_deletes + self.legacy_is_orphan = legacy_is_orphan + self._clause_adapter = None + self._requires_row_aliasing = False + self._inherits_equated_pairs = None + self._memoized_values = {} + self._compiled_cache_size = _compiled_cache_size + self._reconstructor = None + self.allow_partial_pks = allow_partial_pks + + if self.inherits and not self.concrete: + self.confirm_deleted_rows = False + else: + self.confirm_deleted_rows = confirm_deleted_rows + + self._set_with_polymorphic(with_polymorphic) + self.polymorphic_load = polymorphic_load + + # our 'polymorphic identity', a string name that when located in a + # result set row indicates this Mapper should be used to construct + # the object instance for that row. + self.polymorphic_identity = polymorphic_identity + + # a dictionary of 'polymorphic identity' names, associating those + # names with Mappers that will be used to construct object instances + # upon a select operation. + if _polymorphic_map is None: + self.polymorphic_map = {} + else: + self.polymorphic_map = _polymorphic_map + + if include_properties is not None: + self.include_properties = util.to_set(include_properties) + else: + self.include_properties = None + if exclude_properties: + self.exclude_properties = util.to_set(exclude_properties) + else: + self.exclude_properties = None + + # prevent this mapper from being constructed + # while a configure_mappers() is occurring (and defer a + # configure_mappers() until construction succeeds) + with _CONFIGURE_MUTEX: + cast("MapperEvents", self.dispatch._events)._new_mapper_instance( + class_, self + ) + self._configure_inheritance() + self._configure_class_instrumentation() + self._configure_properties() + self._configure_polymorphic_setter() + self._configure_pks() + self.registry._flag_new_mapper(self) + self._log("constructed") + self._expire_memoizations() + + self.dispatch.after_mapper_constructed(self, self.class_) + + def _prefer_eager_defaults(self, dialect, table): + if self.eager_defaults == "auto": + if not table.implicit_returning: + return False + + return ( + table in self._server_default_col_keys + and dialect.insert_executemany_returning + ) + else: + return self.eager_defaults + + def _gen_cache_key(self, anon_map, bindparams): + return (self,) + + # ### BEGIN + # ATTRIBUTE DECLARATIONS START HERE + + is_mapper = True + """Part of the inspection API.""" + + represents_outer_join = False + + registry: _RegistryType + + @property + def mapper(self) -> Mapper[_O]: + """Part of the inspection API. + + Returns self. + + """ + return self + + @property + def entity(self): + r"""Part of the inspection API. + + Returns self.class\_. + + """ + return self.class_ + + class_: Type[_O] + """The class to which this :class:`_orm.Mapper` is mapped.""" + + _identity_class: Type[_O] + + _delete_orphans: List[Tuple[str, Type[Any]]] + _dependency_processors: List[DependencyProcessor] + _memoized_values: Dict[Any, Callable[[], Any]] + _inheriting_mappers: util.WeakSequence[Mapper[Any]] + _all_tables: Set[TableClause] + _polymorphic_attr_key: Optional[str] + + _pks_by_table: Dict[FromClause, OrderedSet[ColumnClause[Any]]] + _cols_by_table: Dict[FromClause, OrderedSet[ColumnElement[Any]]] + + _props: util.OrderedDict[str, MapperProperty[Any]] + _init_properties: Dict[str, MapperProperty[Any]] + + _columntoproperty: _ColumnMapping + + _set_polymorphic_identity: Optional[Callable[[InstanceState[_O]], None]] + _validate_polymorphic_identity: Optional[ + Callable[[Mapper[_O], InstanceState[_O], _InstanceDict], None] + ] + + tables: Sequence[TableClause] + """A sequence containing the collection of :class:`_schema.Table` + or :class:`_schema.TableClause` objects which this :class:`_orm.Mapper` + is aware of. + + If the mapper is mapped to a :class:`_expression.Join`, or an + :class:`_expression.Alias` + representing a :class:`_expression.Select`, the individual + :class:`_schema.Table` + objects that comprise the full construct will be represented here. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + validators: util.immutabledict[str, Tuple[str, Dict[str, Any]]] + """An immutable dictionary of attributes which have been decorated + using the :func:`_orm.validates` decorator. + + The dictionary contains string attribute names as keys + mapped to the actual validation method. + + """ + + always_refresh: bool + allow_partial_pks: bool + version_id_col: Optional[ColumnElement[Any]] + + with_polymorphic: Optional[ + Tuple[ + Union[Literal["*"], Sequence[Union[Mapper[Any], Type[Any]]]], + Optional[FromClause], + ] + ] + + version_id_generator: Optional[Union[Literal[False], Callable[[Any], Any]]] + + local_table: FromClause + """The immediate :class:`_expression.FromClause` to which this + :class:`_orm.Mapper` refers. + + Typically is an instance of :class:`_schema.Table`, may be any + :class:`.FromClause`. + + The "local" table is the + selectable that the :class:`_orm.Mapper` is directly responsible for + managing from an attribute access and flush perspective. For + non-inheriting mappers, :attr:`.Mapper.local_table` will be the same + as :attr:`.Mapper.persist_selectable`. For inheriting mappers, + :attr:`.Mapper.local_table` refers to the specific portion of + :attr:`.Mapper.persist_selectable` that includes the columns to which + this :class:`.Mapper` is loading/persisting, such as a particular + :class:`.Table` within a join. + + .. seealso:: + + :attr:`_orm.Mapper.persist_selectable`. + + :attr:`_orm.Mapper.selectable`. + + """ + + persist_selectable: FromClause + """The :class:`_expression.FromClause` to which this :class:`_orm.Mapper` + is mapped. + + Typically is an instance of :class:`_schema.Table`, may be any + :class:`.FromClause`. + + The :attr:`_orm.Mapper.persist_selectable` is similar to + :attr:`.Mapper.local_table`, but represents the :class:`.FromClause` that + represents the inheriting class hierarchy overall in an inheritance + scenario. + + :attr.`.Mapper.persist_selectable` is also separate from the + :attr:`.Mapper.selectable` attribute, the latter of which may be an + alternate subquery used for selecting columns. + :attr.`.Mapper.persist_selectable` is oriented towards columns that + will be written on a persist operation. + + .. seealso:: + + :attr:`_orm.Mapper.selectable`. + + :attr:`_orm.Mapper.local_table`. + + """ + + inherits: Optional[Mapper[Any]] + """References the :class:`_orm.Mapper` which this :class:`_orm.Mapper` + inherits from, if any. + + """ + + inherit_condition: Optional[ColumnElement[bool]] + + configured: bool = False + """Represent ``True`` if this :class:`_orm.Mapper` has been configured. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + .. seealso:: + + :func:`.configure_mappers`. + + """ + + concrete: bool + """Represent ``True`` if this :class:`_orm.Mapper` is a concrete + inheritance mapper. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + primary_key: Tuple[Column[Any], ...] + """An iterable containing the collection of :class:`_schema.Column` + objects + which comprise the 'primary key' of the mapped table, from the + perspective of this :class:`_orm.Mapper`. + + This list is against the selectable in + :attr:`_orm.Mapper.persist_selectable`. + In the case of inheriting mappers, some columns may be managed by a + superclass mapper. For example, in the case of a + :class:`_expression.Join`, the + primary key is determined by all of the primary key columns across all + tables referenced by the :class:`_expression.Join`. + + The list is also not necessarily the same as the primary key column + collection associated with the underlying tables; the :class:`_orm.Mapper` + features a ``primary_key`` argument that can override what the + :class:`_orm.Mapper` considers as primary key columns. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + class_manager: ClassManager[_O] + """The :class:`.ClassManager` which maintains event listeners + and class-bound descriptors for this :class:`_orm.Mapper`. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + single: bool + """Represent ``True`` if this :class:`_orm.Mapper` is a single table + inheritance mapper. + + :attr:`_orm.Mapper.local_table` will be ``None`` if this flag is set. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + non_primary: bool + """Represent ``True`` if this :class:`_orm.Mapper` is a "non-primary" + mapper, e.g. a mapper that is used only to select rows but not for + persistence management. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + polymorphic_on: Optional[KeyedColumnElement[Any]] + """The :class:`_schema.Column` or SQL expression specified as the + ``polymorphic_on`` argument + for this :class:`_orm.Mapper`, within an inheritance scenario. + + This attribute is normally a :class:`_schema.Column` instance but + may also be an expression, such as one derived from + :func:`.cast`. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + polymorphic_map: Dict[Any, Mapper[Any]] + """A mapping of "polymorphic identity" identifiers mapped to + :class:`_orm.Mapper` instances, within an inheritance scenario. + + The identifiers can be of any type which is comparable to the + type of column represented by :attr:`_orm.Mapper.polymorphic_on`. + + An inheritance chain of mappers will all reference the same + polymorphic map object. The object is used to correlate incoming + result rows to target mappers. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + polymorphic_identity: Optional[Any] + """Represent an identifier which is matched against the + :attr:`_orm.Mapper.polymorphic_on` column during result row loading. + + Used only with inheritance, this object can be of any type which is + comparable to the type of column represented by + :attr:`_orm.Mapper.polymorphic_on`. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + base_mapper: Mapper[Any] + """The base-most :class:`_orm.Mapper` in an inheritance chain. + + In a non-inheriting scenario, this attribute will always be this + :class:`_orm.Mapper`. In an inheritance scenario, it references + the :class:`_orm.Mapper` which is parent to all other :class:`_orm.Mapper` + objects in the inheritance chain. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + columns: ReadOnlyColumnCollection[str, Column[Any]] + """A collection of :class:`_schema.Column` or other scalar expression + objects maintained by this :class:`_orm.Mapper`. + + The collection behaves the same as that of the ``c`` attribute on + any :class:`_schema.Table` object, + except that only those columns included in + this mapping are present, and are keyed based on the attribute name + defined in the mapping, not necessarily the ``key`` attribute of the + :class:`_schema.Column` itself. Additionally, scalar expressions mapped + by :func:`.column_property` are also present here. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + c: ReadOnlyColumnCollection[str, Column[Any]] + """A synonym for :attr:`_orm.Mapper.columns`.""" + + @util.non_memoized_property + @util.deprecated("1.3", "Use .persist_selectable") + def mapped_table(self): + return self.persist_selectable + + @util.memoized_property + def _path_registry(self) -> CachingEntityRegistry: + return PathRegistry.per_mapper(self) + + def _configure_inheritance(self): + """Configure settings related to inheriting and/or inherited mappers + being present.""" + + # a set of all mappers which inherit from this one. + self._inheriting_mappers = util.WeakSequence() + + if self.inherits: + if not issubclass(self.class_, self.inherits.class_): + raise sa_exc.ArgumentError( + "Class '%s' does not inherit from '%s'" + % (self.class_.__name__, self.inherits.class_.__name__) + ) + + self.dispatch._update(self.inherits.dispatch) + + if self.non_primary != self.inherits.non_primary: + np = not self.non_primary and "primary" or "non-primary" + raise sa_exc.ArgumentError( + "Inheritance of %s mapper for class '%s' is " + "only allowed from a %s mapper" + % (np, self.class_.__name__, np) + ) + + if self.single: + self.persist_selectable = self.inherits.persist_selectable + elif self.local_table is not self.inherits.local_table: + if self.concrete: + self.persist_selectable = self.local_table + for mapper in self.iterate_to_root(): + if mapper.polymorphic_on is not None: + mapper._requires_row_aliasing = True + else: + if self.inherit_condition is None: + # figure out inherit condition from our table to the + # immediate table of the inherited mapper, not its + # full table which could pull in other stuff we don't + # want (allows test/inheritance.InheritTest4 to pass) + try: + self.inherit_condition = sql_util.join_condition( + self.inherits.local_table, self.local_table + ) + except sa_exc.NoForeignKeysError as nfe: + assert self.inherits.local_table is not None + assert self.local_table is not None + raise sa_exc.NoForeignKeysError( + "Can't determine the inherit condition " + "between inherited table '%s' and " + "inheriting " + "table '%s'; tables have no " + "foreign key relationships established. " + "Please ensure the inheriting table has " + "a foreign key relationship to the " + "inherited " + "table, or provide an " + "'on clause' using " + "the 'inherit_condition' mapper argument." + % ( + self.inherits.local_table.description, + self.local_table.description, + ) + ) from nfe + except sa_exc.AmbiguousForeignKeysError as afe: + assert self.inherits.local_table is not None + assert self.local_table is not None + raise sa_exc.AmbiguousForeignKeysError( + "Can't determine the inherit condition " + "between inherited table '%s' and " + "inheriting " + "table '%s'; tables have more than one " + "foreign key relationship established. " + "Please specify the 'on clause' using " + "the 'inherit_condition' mapper argument." + % ( + self.inherits.local_table.description, + self.local_table.description, + ) + ) from afe + assert self.inherits.persist_selectable is not None + self.persist_selectable = sql.join( + self.inherits.persist_selectable, + self.local_table, + self.inherit_condition, + ) + + fks = util.to_set(self.inherit_foreign_keys) + self._inherits_equated_pairs = sql_util.criterion_as_pairs( + self.persist_selectable.onclause, + consider_as_foreign_keys=fks, + ) + else: + self.persist_selectable = self.local_table + + if self.polymorphic_identity is None: + self._identity_class = self.class_ + + if ( + not self.polymorphic_abstract + and self.inherits.base_mapper.polymorphic_on is not None + ): + util.warn( + f"{self} does not indicate a 'polymorphic_identity', " + "yet is part of an inheritance hierarchy that has a " + f"'polymorphic_on' column of " + f"'{self.inherits.base_mapper.polymorphic_on}'. " + "If this is an intermediary class that should not be " + "instantiated, the class may either be left unmapped, " + "or may include the 'polymorphic_abstract=True' " + "parameter in its Mapper arguments. To leave the " + "class unmapped when using Declarative, set the " + "'__abstract__ = True' attribute on the class." + ) + elif self.concrete: + self._identity_class = self.class_ + else: + self._identity_class = self.inherits._identity_class + + if self.version_id_col is None: + self.version_id_col = self.inherits.version_id_col + self.version_id_generator = self.inherits.version_id_generator + elif ( + self.inherits.version_id_col is not None + and self.version_id_col is not self.inherits.version_id_col + ): + util.warn( + "Inheriting version_id_col '%s' does not match inherited " + "version_id_col '%s' and will not automatically populate " + "the inherited versioning column. " + "version_id_col should only be specified on " + "the base-most mapper that includes versioning." + % ( + self.version_id_col.description, + self.inherits.version_id_col.description, + ) + ) + + self.polymorphic_map = self.inherits.polymorphic_map + self.batch = self.inherits.batch + self.inherits._inheriting_mappers.append(self) + self.base_mapper = self.inherits.base_mapper + self.passive_updates = self.inherits.passive_updates + self.passive_deletes = ( + self.inherits.passive_deletes or self.passive_deletes + ) + self._all_tables = self.inherits._all_tables + + if self.polymorphic_identity is not None: + if self.polymorphic_identity in self.polymorphic_map: + util.warn( + "Reassigning polymorphic association for identity %r " + "from %r to %r: Check for duplicate use of %r as " + "value for polymorphic_identity." + % ( + self.polymorphic_identity, + self.polymorphic_map[self.polymorphic_identity], + self, + self.polymorphic_identity, + ) + ) + self.polymorphic_map[self.polymorphic_identity] = self + + if self.polymorphic_load and self.concrete: + raise sa_exc.ArgumentError( + "polymorphic_load is not currently supported " + "with concrete table inheritance" + ) + if self.polymorphic_load == "inline": + self.inherits._add_with_polymorphic_subclass(self) + elif self.polymorphic_load == "selectin": + pass + elif self.polymorphic_load is not None: + raise sa_exc.ArgumentError( + "unknown argument for polymorphic_load: %r" + % self.polymorphic_load + ) + + else: + self._all_tables = set() + self.base_mapper = self + assert self.local_table is not None + self.persist_selectable = self.local_table + if self.polymorphic_identity is not None: + self.polymorphic_map[self.polymorphic_identity] = self + self._identity_class = self.class_ + + if self.persist_selectable is None: + raise sa_exc.ArgumentError( + "Mapper '%s' does not have a persist_selectable specified." + % self + ) + + def _set_with_polymorphic( + self, with_polymorphic: Optional[_WithPolymorphicArg] + ) -> None: + if with_polymorphic == "*": + self.with_polymorphic = ("*", None) + elif isinstance(with_polymorphic, (tuple, list)): + if isinstance(with_polymorphic[0], (str, tuple, list)): + self.with_polymorphic = cast( + """Tuple[ + Union[ + Literal["*"], + Sequence[Union["Mapper[Any]", Type[Any]]], + ], + Optional["FromClause"], + ]""", + with_polymorphic, + ) + else: + self.with_polymorphic = (with_polymorphic, None) + elif with_polymorphic is not None: + raise sa_exc.ArgumentError( + f"Invalid setting for with_polymorphic: {with_polymorphic!r}" + ) + else: + self.with_polymorphic = None + + if self.with_polymorphic and self.with_polymorphic[1] is not None: + self.with_polymorphic = ( + self.with_polymorphic[0], + coercions.expect( + roles.StrictFromClauseRole, + self.with_polymorphic[1], + allow_select=True, + ), + ) + + if self.configured: + self._expire_memoizations() + + def _add_with_polymorphic_subclass(self, mapper): + subcl = mapper.class_ + if self.with_polymorphic is None: + self._set_with_polymorphic((subcl,)) + elif self.with_polymorphic[0] != "*": + assert isinstance(self.with_polymorphic[0], tuple) + self._set_with_polymorphic( + (self.with_polymorphic[0] + (subcl,), self.with_polymorphic[1]) + ) + + def _set_concrete_base(self, mapper): + """Set the given :class:`_orm.Mapper` as the 'inherits' for this + :class:`_orm.Mapper`, assuming this :class:`_orm.Mapper` is concrete + and does not already have an inherits.""" + + assert self.concrete + assert not self.inherits + assert isinstance(mapper, Mapper) + self.inherits = mapper + self.inherits.polymorphic_map.update(self.polymorphic_map) + self.polymorphic_map = self.inherits.polymorphic_map + for mapper in self.iterate_to_root(): + if mapper.polymorphic_on is not None: + mapper._requires_row_aliasing = True + self.batch = self.inherits.batch + for mp in self.self_and_descendants: + mp.base_mapper = self.inherits.base_mapper + self.inherits._inheriting_mappers.append(self) + self.passive_updates = self.inherits.passive_updates + self._all_tables = self.inherits._all_tables + + for key, prop in mapper._props.items(): + if key not in self._props and not self._should_exclude( + key, key, local=False, column=None + ): + self._adapt_inherited_property(key, prop, False) + + def _set_polymorphic_on(self, polymorphic_on): + self.polymorphic_on = polymorphic_on + self._configure_polymorphic_setter(True) + + def _configure_class_instrumentation(self): + """If this mapper is to be a primary mapper (i.e. the + non_primary flag is not set), associate this Mapper with the + given class and entity name. + + Subsequent calls to ``class_mapper()`` for the ``class_`` / ``entity`` + name combination will return this mapper. Also decorate the + `__init__` method on the mapped class to include optional + auto-session attachment logic. + + """ + + # we expect that declarative has applied the class manager + # already and set up a registry. if this is None, + # this raises as of 2.0. + manager = attributes.opt_manager_of_class(self.class_) + + if self.non_primary: + if not manager or not manager.is_mapped: + raise sa_exc.InvalidRequestError( + "Class %s has no primary mapper configured. Configure " + "a primary mapper first before setting up a non primary " + "Mapper." % self.class_ + ) + self.class_manager = manager + + assert manager.registry is not None + self.registry = manager.registry + self._identity_class = manager.mapper._identity_class + manager.registry._add_non_primary_mapper(self) + return + + if manager is None or not manager.registry: + raise sa_exc.InvalidRequestError( + "The _mapper() function and Mapper() constructor may not be " + "invoked directly outside of a declarative registry." + " Please use the sqlalchemy.orm.registry.map_imperatively() " + "function for a classical mapping." + ) + + self.dispatch.instrument_class(self, self.class_) + + # this invokes the class_instrument event and sets up + # the __init__ method. documented behavior is that this must + # occur after the instrument_class event above. + # yes two events with the same two words reversed and different APIs. + # :( + + manager = instrumentation.register_class( + self.class_, + mapper=self, + expired_attribute_loader=util.partial( + loading.load_scalar_attributes, self + ), + # finalize flag means instrument the __init__ method + # and call the class_instrument event + finalize=True, + ) + + self.class_manager = manager + + assert manager.registry is not None + self.registry = manager.registry + + # The remaining members can be added by any mapper, + # e_name None or not. + if manager.mapper is None: + return + + event.listen(manager, "init", _event_on_init, raw=True) + + for key, method in util.iterate_attributes(self.class_): + if key == "__init__" and hasattr(method, "_sa_original_init"): + method = method._sa_original_init + if hasattr(method, "__func__"): + method = method.__func__ + if callable(method): + if hasattr(method, "__sa_reconstructor__"): + self._reconstructor = method + event.listen(manager, "load", _event_on_load, raw=True) + elif hasattr(method, "__sa_validators__"): + validation_opts = method.__sa_validation_opts__ + for name in method.__sa_validators__: + if name in self.validators: + raise sa_exc.InvalidRequestError( + "A validation function for mapped " + "attribute %r on mapper %s already exists." + % (name, self) + ) + self.validators = self.validators.union( + {name: (method, validation_opts)} + ) + + def _set_dispose_flags(self) -> None: + self.configured = True + self._ready_for_configure = True + self._dispose_called = True + + self.__dict__.pop("_configure_failed", None) + + def _str_arg_to_mapped_col(self, argname: str, key: str) -> Column[Any]: + try: + prop = self._props[key] + except KeyError as err: + raise sa_exc.ArgumentError( + f"Can't determine {argname} column '{key}' - " + "no attribute is mapped to this name." + ) from err + try: + expr = prop.expression + except AttributeError as ae: + raise sa_exc.ArgumentError( + f"Can't determine {argname} column '{key}'; " + "property does not refer to a single mapped Column" + ) from ae + if not isinstance(expr, Column): + raise sa_exc.ArgumentError( + f"Can't determine {argname} column '{key}'; " + "property does not refer to a single " + "mapped Column" + ) + return expr + + def _configure_pks(self) -> None: + self.tables = sql_util.find_tables(self.persist_selectable) + + self._all_tables.update(t for t in self.tables) + + self._pks_by_table = {} + self._cols_by_table = {} + + all_cols = util.column_set( + chain(*[col.proxy_set for col in self._columntoproperty]) + ) + + pk_cols = util.column_set(c for c in all_cols if c.primary_key) + + # identify primary key columns which are also mapped by this mapper. + for fc in set(self.tables).union([self.persist_selectable]): + if fc.primary_key and pk_cols.issuperset(fc.primary_key): + # ordering is important since it determines the ordering of + # mapper.primary_key (and therefore query.get()) + self._pks_by_table[fc] = util.ordered_column_set( # type: ignore # noqa: E501 + fc.primary_key + ).intersection( + pk_cols + ) + self._cols_by_table[fc] = util.ordered_column_set(fc.c).intersection( # type: ignore # noqa: E501 + all_cols + ) + + if self._primary_key_argument: + coerced_pk_arg = [ + ( + self._str_arg_to_mapped_col("primary_key", c) + if isinstance(c, str) + else c + ) + for c in ( + coercions.expect( + roles.DDLConstraintColumnRole, + coerce_pk, + argname="primary_key", + ) + for coerce_pk in self._primary_key_argument + ) + ] + else: + coerced_pk_arg = None + + # if explicit PK argument sent, add those columns to the + # primary key mappings + if coerced_pk_arg: + for k in coerced_pk_arg: + if k.table not in self._pks_by_table: + self._pks_by_table[k.table] = util.OrderedSet() + self._pks_by_table[k.table].add(k) + + # otherwise, see that we got a full PK for the mapped table + elif ( + self.persist_selectable not in self._pks_by_table + or len(self._pks_by_table[self.persist_selectable]) == 0 + ): + raise sa_exc.ArgumentError( + "Mapper %s could not assemble any primary " + "key columns for mapped table '%s'" + % (self, self.persist_selectable.description) + ) + elif self.local_table not in self._pks_by_table and isinstance( + self.local_table, schema.Table + ): + util.warn( + "Could not assemble any primary " + "keys for locally mapped table '%s' - " + "no rows will be persisted in this Table." + % self.local_table.description + ) + + if ( + self.inherits + and not self.concrete + and not self._primary_key_argument + ): + # if inheriting, the "primary key" for this mapper is + # that of the inheriting (unless concrete or explicit) + self.primary_key = self.inherits.primary_key + else: + # determine primary key from argument or persist_selectable pks + primary_key: Collection[ColumnElement[Any]] + + if coerced_pk_arg: + primary_key = [ + cc if cc is not None else c + for cc, c in ( + (self.persist_selectable.corresponding_column(c), c) + for c in coerced_pk_arg + ) + ] + else: + # if heuristically determined PKs, reduce to the minimal set + # of columns by eliminating FK->PK pairs for a multi-table + # expression. May over-reduce for some kinds of UNIONs + # / CTEs; use explicit PK argument for these special cases + primary_key = sql_util.reduce_columns( + self._pks_by_table[self.persist_selectable], + ignore_nonexistent_tables=True, + ) + + if len(primary_key) == 0: + raise sa_exc.ArgumentError( + "Mapper %s could not assemble any primary " + "key columns for mapped table '%s'" + % (self, self.persist_selectable.description) + ) + + self.primary_key = tuple(primary_key) + self._log("Identified primary key columns: %s", primary_key) + + # determine cols that aren't expressed within our tables; mark these + # as "read only" properties which are refreshed upon INSERT/UPDATE + self._readonly_props = { + self._columntoproperty[col] + for col in self._columntoproperty + if self._columntoproperty[col] not in self._identity_key_props + and ( + not hasattr(col, "table") + or col.table not in self._cols_by_table + ) + } + + def _configure_properties(self) -> None: + self.columns = self.c = sql_base.ColumnCollection() # type: ignore + + # object attribute names mapped to MapperProperty objects + self._props = util.OrderedDict() + + # table columns mapped to MapperProperty + self._columntoproperty = _ColumnMapping(self) + + explicit_col_props_by_column: Dict[ + KeyedColumnElement[Any], Tuple[str, ColumnProperty[Any]] + ] = {} + explicit_col_props_by_key: Dict[str, ColumnProperty[Any]] = {} + + # step 1: go through properties that were explicitly passed + # in the properties dictionary. For Columns that are local, put them + # aside in a separate collection we will reconcile with the Table + # that's given. For other properties, set them up in _props now. + if self._init_properties: + for key, prop_arg in self._init_properties.items(): + if not isinstance(prop_arg, MapperProperty): + possible_col_prop = self._make_prop_from_column( + key, prop_arg + ) + else: + possible_col_prop = prop_arg + + # issue #8705. if the explicit property is actually a + # Column that is local to the local Table, don't set it up + # in ._props yet, integrate it into the order given within + # the Table. + + _map_as_property_now = True + if isinstance(possible_col_prop, properties.ColumnProperty): + for given_col in possible_col_prop.columns: + if self.local_table.c.contains_column(given_col): + _map_as_property_now = False + explicit_col_props_by_key[key] = possible_col_prop + explicit_col_props_by_column[given_col] = ( + key, + possible_col_prop, + ) + + if _map_as_property_now: + self._configure_property( + key, + possible_col_prop, + init=False, + ) + + # step 2: pull properties from the inherited mapper. reconcile + # columns with those which are explicit above. for properties that + # are only in the inheriting mapper, set them up as local props + if self.inherits: + for key, inherited_prop in self.inherits._props.items(): + if self._should_exclude(key, key, local=False, column=None): + continue + + incoming_prop = explicit_col_props_by_key.get(key) + if incoming_prop: + new_prop = self._reconcile_prop_with_incoming_columns( + key, + inherited_prop, + warn_only=False, + incoming_prop=incoming_prop, + ) + explicit_col_props_by_key[key] = new_prop + + for inc_col in incoming_prop.columns: + explicit_col_props_by_column[inc_col] = ( + key, + new_prop, + ) + elif key not in self._props: + self._adapt_inherited_property(key, inherited_prop, False) + + # step 3. Iterate through all columns in the persist selectable. + # this includes not only columns in the local table / fromclause, + # but also those columns in the superclass table if we are joined + # inh or single inh mapper. map these columns as well. additional + # reconciliation against inherited columns occurs here also. + + for column in self.persist_selectable.columns: + if column in explicit_col_props_by_column: + # column was explicitly passed to properties; configure + # it now in the order in which it corresponds to the + # Table / selectable + key, prop = explicit_col_props_by_column[column] + self._configure_property(key, prop, init=False) + continue + + elif column in self._columntoproperty: + continue + + column_key = (self.column_prefix or "") + column.key + if self._should_exclude( + column.key, + column_key, + local=self.local_table.c.contains_column(column), + column=column, + ): + continue + + # adjust the "key" used for this column to that + # of the inheriting mapper + for mapper in self.iterate_to_root(): + if column in mapper._columntoproperty: + column_key = mapper._columntoproperty[column].key + + self._configure_property( + column_key, + column, + init=False, + setparent=True, + ) + + def _configure_polymorphic_setter(self, init=False): + """Configure an attribute on the mapper representing the + 'polymorphic_on' column, if applicable, and not + already generated by _configure_properties (which is typical). + + Also create a setter function which will assign this + attribute to the value of the 'polymorphic_identity' + upon instance construction, also if applicable. This + routine will run when an instance is created. + + """ + setter = False + polymorphic_key: Optional[str] = None + + if self.polymorphic_on is not None: + setter = True + + if isinstance(self.polymorphic_on, str): + # polymorphic_on specified as a string - link + # it to mapped ColumnProperty + try: + self.polymorphic_on = self._props[self.polymorphic_on] + except KeyError as err: + raise sa_exc.ArgumentError( + "Can't determine polymorphic_on " + "value '%s' - no attribute is " + "mapped to this name." % self.polymorphic_on + ) from err + + if self.polymorphic_on in self._columntoproperty: + # polymorphic_on is a column that is already mapped + # to a ColumnProperty + prop = self._columntoproperty[self.polymorphic_on] + elif isinstance(self.polymorphic_on, MapperProperty): + # polymorphic_on is directly a MapperProperty, + # ensure it's a ColumnProperty + if not isinstance( + self.polymorphic_on, properties.ColumnProperty + ): + raise sa_exc.ArgumentError( + "Only direct column-mapped " + "property or SQL expression " + "can be passed for polymorphic_on" + ) + prop = self.polymorphic_on + else: + # polymorphic_on is a Column or SQL expression and + # doesn't appear to be mapped. this means it can be 1. + # only present in the with_polymorphic selectable or + # 2. a totally standalone SQL expression which we'd + # hope is compatible with this mapper's persist_selectable + col = self.persist_selectable.corresponding_column( + self.polymorphic_on + ) + if col is None: + # polymorphic_on doesn't derive from any + # column/expression isn't present in the mapped + # table. we will make a "hidden" ColumnProperty + # for it. Just check that if it's directly a + # schema.Column and we have with_polymorphic, it's + # likely a user error if the schema.Column isn't + # represented somehow in either persist_selectable or + # with_polymorphic. Otherwise as of 0.7.4 we + # just go with it and assume the user wants it + # that way (i.e. a CASE statement) + setter = False + instrument = False + col = self.polymorphic_on + if isinstance(col, schema.Column) and ( + self.with_polymorphic is None + or self.with_polymorphic[1] is None + or self.with_polymorphic[1].corresponding_column(col) + is None + ): + raise sa_exc.InvalidRequestError( + "Could not map polymorphic_on column " + "'%s' to the mapped table - polymorphic " + "loads will not function properly" + % col.description + ) + else: + # column/expression that polymorphic_on derives from + # is present in our mapped table + # and is probably mapped, but polymorphic_on itself + # is not. This happens when + # the polymorphic_on is only directly present in the + # with_polymorphic selectable, as when use + # polymorphic_union. + # we'll make a separate ColumnProperty for it. + instrument = True + key = getattr(col, "key", None) + if key: + if self._should_exclude(key, key, False, col): + raise sa_exc.InvalidRequestError( + "Cannot exclude or override the " + "discriminator column %r" % key + ) + else: + self.polymorphic_on = col = col.label("_sa_polymorphic_on") + key = col.key + + prop = properties.ColumnProperty(col, _instrument=instrument) + self._configure_property(key, prop, init=init, setparent=True) + + # the actual polymorphic_on should be the first public-facing + # column in the property + self.polymorphic_on = prop.columns[0] + polymorphic_key = prop.key + else: + # no polymorphic_on was set. + # check inheriting mappers for one. + for mapper in self.iterate_to_root(): + # determine if polymorphic_on of the parent + # should be propagated here. If the col + # is present in our mapped table, or if our mapped + # table is the same as the parent (i.e. single table + # inheritance), we can use it + if mapper.polymorphic_on is not None: + if self.persist_selectable is mapper.persist_selectable: + self.polymorphic_on = mapper.polymorphic_on + else: + self.polymorphic_on = ( + self.persist_selectable + ).corresponding_column(mapper.polymorphic_on) + # we can use the parent mapper's _set_polymorphic_identity + # directly; it ensures the polymorphic_identity of the + # instance's mapper is used so is portable to subclasses. + if self.polymorphic_on is not None: + self._set_polymorphic_identity = ( + mapper._set_polymorphic_identity + ) + self._polymorphic_attr_key = ( + mapper._polymorphic_attr_key + ) + self._validate_polymorphic_identity = ( + mapper._validate_polymorphic_identity + ) + else: + self._set_polymorphic_identity = None + self._polymorphic_attr_key = None + return + + if self.polymorphic_abstract and self.polymorphic_on is None: + raise sa_exc.InvalidRequestError( + "The Mapper.polymorphic_abstract parameter may only be used " + "on a mapper hierarchy which includes the " + "Mapper.polymorphic_on parameter at the base of the hierarchy." + ) + + if setter: + + def _set_polymorphic_identity(state): + dict_ = state.dict + # TODO: what happens if polymorphic_on column attribute name + # does not match .key? + + polymorphic_identity = ( + state.manager.mapper.polymorphic_identity + ) + if ( + polymorphic_identity is None + and state.manager.mapper.polymorphic_abstract + ): + raise sa_exc.InvalidRequestError( + f"Can't instantiate class for {state.manager.mapper}; " + "mapper is marked polymorphic_abstract=True" + ) + + state.get_impl(polymorphic_key).set( + state, + dict_, + polymorphic_identity, + None, + ) + + self._polymorphic_attr_key = polymorphic_key + + def _validate_polymorphic_identity(mapper, state, dict_): + if ( + polymorphic_key in dict_ + and dict_[polymorphic_key] + not in mapper._acceptable_polymorphic_identities + ): + util.warn_limited( + "Flushing object %s with " + "incompatible polymorphic identity %r; the " + "object may not refresh and/or load correctly", + (state_str(state), dict_[polymorphic_key]), + ) + + self._set_polymorphic_identity = _set_polymorphic_identity + self._validate_polymorphic_identity = ( + _validate_polymorphic_identity + ) + else: + self._polymorphic_attr_key = None + self._set_polymorphic_identity = None + + _validate_polymorphic_identity = None + + @HasMemoized.memoized_attribute + def _version_id_prop(self): + if self.version_id_col is not None: + return self._columntoproperty[self.version_id_col] + else: + return None + + @HasMemoized.memoized_attribute + def _acceptable_polymorphic_identities(self): + identities = set() + + stack = deque([self]) + while stack: + item = stack.popleft() + if item.persist_selectable is self.persist_selectable: + identities.add(item.polymorphic_identity) + stack.extend(item._inheriting_mappers) + + return identities + + @HasMemoized.memoized_attribute + def _prop_set(self): + return frozenset(self._props.values()) + + @util.preload_module("sqlalchemy.orm.descriptor_props") + def _adapt_inherited_property(self, key, prop, init): + descriptor_props = util.preloaded.orm_descriptor_props + + if not self.concrete: + self._configure_property(key, prop, init=False, setparent=False) + elif key not in self._props: + # determine if the class implements this attribute; if not, + # or if it is implemented by the attribute that is handling the + # given superclass-mapped property, then we need to report that we + # can't use this at the instance level since we are a concrete + # mapper and we don't map this. don't trip user-defined + # descriptors that might have side effects when invoked. + implementing_attribute = self.class_manager._get_class_attr_mro( + key, prop + ) + if implementing_attribute is prop or ( + isinstance( + implementing_attribute, attributes.InstrumentedAttribute + ) + and implementing_attribute._parententity is prop.parent + ): + self._configure_property( + key, + descriptor_props.ConcreteInheritedProperty(), + init=init, + setparent=True, + ) + + @util.preload_module("sqlalchemy.orm.descriptor_props") + def _configure_property( + self, + key: str, + prop_arg: Union[KeyedColumnElement[Any], MapperProperty[Any]], + *, + init: bool = True, + setparent: bool = True, + warn_for_existing: bool = False, + ) -> MapperProperty[Any]: + descriptor_props = util.preloaded.orm_descriptor_props + self._log( + "_configure_property(%s, %s)", key, prop_arg.__class__.__name__ + ) + + if not isinstance(prop_arg, MapperProperty): + prop: MapperProperty[Any] = self._property_from_column( + key, prop_arg + ) + else: + prop = prop_arg + + if isinstance(prop, properties.ColumnProperty): + col = self.persist_selectable.corresponding_column(prop.columns[0]) + + # if the column is not present in the mapped table, + # test if a column has been added after the fact to the + # parent table (or their parent, etc.) [ticket:1570] + if col is None and self.inherits: + path = [self] + for m in self.inherits.iterate_to_root(): + col = m.local_table.corresponding_column(prop.columns[0]) + if col is not None: + for m2 in path: + m2.persist_selectable._refresh_for_new_column(col) + col = self.persist_selectable.corresponding_column( + prop.columns[0] + ) + break + path.append(m) + + # subquery expression, column not present in the mapped + # selectable. + if col is None: + col = prop.columns[0] + + # column is coming in after _readonly_props was + # initialized; check for 'readonly' + if hasattr(self, "_readonly_props") and ( + not hasattr(col, "table") + or col.table not in self._cols_by_table + ): + self._readonly_props.add(prop) + + else: + # if column is coming in after _cols_by_table was + # initialized, ensure the col is in the right set + if ( + hasattr(self, "_cols_by_table") + and col.table in self._cols_by_table + and col not in self._cols_by_table[col.table] + ): + self._cols_by_table[col.table].add(col) + + # if this properties.ColumnProperty represents the "polymorphic + # discriminator" column, mark it. We'll need this when rendering + # columns in SELECT statements. + if not hasattr(prop, "_is_polymorphic_discriminator"): + prop._is_polymorphic_discriminator = ( + col is self.polymorphic_on + or prop.columns[0] is self.polymorphic_on + ) + + if isinstance(col, expression.Label): + # new in 1.4, get column property against expressions + # to be addressable in subqueries + col.key = col._tq_key_label = key + + self.columns.add(col, key) + + for col in prop.columns: + for proxy_col in col.proxy_set: + self._columntoproperty[proxy_col] = prop + + if getattr(prop, "key", key) != key: + util.warn( + f"ORM mapped property {self.class_.__name__}.{prop.key} being " + "assigned to attribute " + f"{key!r} is already associated with " + f"attribute {prop.key!r}. The attribute will be de-associated " + f"from {prop.key!r}." + ) + + prop.key = key + + if setparent: + prop.set_parent(self, init) + + if key in self._props and getattr( + self._props[key], "_mapped_by_synonym", False + ): + syn = self._props[key]._mapped_by_synonym + raise sa_exc.ArgumentError( + "Can't call map_column=True for synonym %r=%r, " + "a ColumnProperty already exists keyed to the name " + "%r for column %r" % (syn, key, key, syn) + ) + + # replacement cases + + # case one: prop is replacing a prop that we have mapped. this is + # independent of whatever might be in the actual class dictionary + if ( + key in self._props + and not isinstance( + self._props[key], descriptor_props.ConcreteInheritedProperty + ) + and not isinstance(prop, descriptor_props.SynonymProperty) + ): + if warn_for_existing: + util.warn_deprecated( + f"User-placed attribute {self.class_.__name__}.{key} on " + f"{self} is replacing an existing ORM-mapped attribute. " + "Behavior is not fully defined in this case. This " + "use is deprecated and will raise an error in a future " + "release", + "2.0", + ) + oldprop = self._props[key] + self._path_registry.pop(oldprop, None) + + # case two: prop is replacing an attribute on the class of some kind. + # we have to be more careful here since it's normal when using + # Declarative that all the "declared attributes" on the class + # get replaced. + elif ( + warn_for_existing + and self.class_.__dict__.get(key, None) is not None + and not isinstance(prop, descriptor_props.SynonymProperty) + and not isinstance( + self._props.get(key, None), + descriptor_props.ConcreteInheritedProperty, + ) + ): + util.warn_deprecated( + f"User-placed attribute {self.class_.__name__}.{key} on " + f"{self} is replacing an existing class-bound " + "attribute of the same name. " + "Behavior is not fully defined in this case. This " + "use is deprecated and will raise an error in a future " + "release", + "2.0", + ) + + self._props[key] = prop + + if not self.non_primary: + prop.instrument_class(self) + + for mapper in self._inheriting_mappers: + mapper._adapt_inherited_property(key, prop, init) + + if init: + prop.init() + prop.post_instrument_class(self) + + if self.configured: + self._expire_memoizations() + + return prop + + def _make_prop_from_column( + self, + key: str, + column: Union[ + Sequence[KeyedColumnElement[Any]], KeyedColumnElement[Any] + ], + ) -> ColumnProperty[Any]: + columns = util.to_list(column) + mapped_column = [] + for c in columns: + mc = self.persist_selectable.corresponding_column(c) + if mc is None: + mc = self.local_table.corresponding_column(c) + if mc is not None: + # if the column is in the local table but not the + # mapped table, this corresponds to adding a + # column after the fact to the local table. + # [ticket:1523] + self.persist_selectable._refresh_for_new_column(mc) + mc = self.persist_selectable.corresponding_column(c) + if mc is None: + raise sa_exc.ArgumentError( + "When configuring property '%s' on %s, " + "column '%s' is not represented in the mapper's " + "table. Use the `column_property()` function to " + "force this column to be mapped as a read-only " + "attribute." % (key, self, c) + ) + mapped_column.append(mc) + return properties.ColumnProperty(*mapped_column) + + def _reconcile_prop_with_incoming_columns( + self, + key: str, + existing_prop: MapperProperty[Any], + warn_only: bool, + incoming_prop: Optional[ColumnProperty[Any]] = None, + single_column: Optional[KeyedColumnElement[Any]] = None, + ) -> ColumnProperty[Any]: + if incoming_prop and ( + self.concrete + or not isinstance(existing_prop, properties.ColumnProperty) + ): + return incoming_prop + + existing_column = existing_prop.columns[0] + + if incoming_prop and existing_column in incoming_prop.columns: + return incoming_prop + + if incoming_prop is None: + assert single_column is not None + incoming_column = single_column + equated_pair_key = (existing_prop.columns[0], incoming_column) + else: + assert single_column is None + incoming_column = incoming_prop.columns[0] + equated_pair_key = (incoming_column, existing_prop.columns[0]) + + if ( + ( + not self._inherits_equated_pairs + or (equated_pair_key not in self._inherits_equated_pairs) + ) + and not existing_column.shares_lineage(incoming_column) + and existing_column is not self.version_id_col + and incoming_column is not self.version_id_col + ): + msg = ( + "Implicitly combining column %s with column " + "%s under attribute '%s'. Please configure one " + "or more attributes for these same-named columns " + "explicitly." + % ( + existing_prop.columns[-1], + incoming_column, + key, + ) + ) + if warn_only: + util.warn(msg) + else: + raise sa_exc.InvalidRequestError(msg) + + # existing properties.ColumnProperty from an inheriting + # mapper. make a copy and append our column to it + # breakpoint() + new_prop = existing_prop.copy() + + new_prop.columns.insert(0, incoming_column) + self._log( + "inserting column to existing list " + "in properties.ColumnProperty %s", + key, + ) + return new_prop # type: ignore + + @util.preload_module("sqlalchemy.orm.descriptor_props") + def _property_from_column( + self, + key: str, + column: KeyedColumnElement[Any], + ) -> ColumnProperty[Any]: + """generate/update a :class:`.ColumnProperty` given a + :class:`_schema.Column` or other SQL expression object.""" + + descriptor_props = util.preloaded.orm_descriptor_props + + prop = self._props.get(key) + + if isinstance(prop, properties.ColumnProperty): + return self._reconcile_prop_with_incoming_columns( + key, + prop, + single_column=column, + warn_only=prop.parent is not self, + ) + elif prop is None or isinstance( + prop, descriptor_props.ConcreteInheritedProperty + ): + return self._make_prop_from_column(key, column) + else: + raise sa_exc.ArgumentError( + "WARNING: when configuring property '%s' on %s, " + "column '%s' conflicts with property '%r'. " + "To resolve this, map the column to the class under a " + "different name in the 'properties' dictionary. Or, " + "to remove all awareness of the column entirely " + "(including its availability as a foreign key), " + "use the 'include_properties' or 'exclude_properties' " + "mapper arguments to control specifically which table " + "columns get mapped." % (key, self, column.key, prop) + ) + + @util.langhelpers.tag_method_for_warnings( + "This warning originated from the `configure_mappers()` process, " + "which was invoked automatically in response to a user-initiated " + "operation.", + sa_exc.SAWarning, + ) + def _check_configure(self) -> None: + if self.registry._new_mappers: + _configure_registries({self.registry}, cascade=True) + + def _post_configure_properties(self) -> None: + """Call the ``init()`` method on all ``MapperProperties`` + attached to this mapper. + + This is a deferred configuration step which is intended + to execute once all mappers have been constructed. + + """ + + self._log("_post_configure_properties() started") + l = [(key, prop) for key, prop in self._props.items()] + for key, prop in l: + self._log("initialize prop %s", key) + + if prop.parent is self and not prop._configure_started: + prop.init() + + if prop._configure_finished: + prop.post_instrument_class(self) + + self._log("_post_configure_properties() complete") + self.configured = True + + def add_properties(self, dict_of_properties): + """Add the given dictionary of properties to this mapper, + using `add_property`. + + """ + for key, value in dict_of_properties.items(): + self.add_property(key, value) + + def add_property( + self, key: str, prop: Union[Column[Any], MapperProperty[Any]] + ) -> None: + """Add an individual MapperProperty to this mapper. + + If the mapper has not been configured yet, just adds the + property to the initial properties dictionary sent to the + constructor. If this Mapper has already been configured, then + the given MapperProperty is configured immediately. + + """ + prop = self._configure_property( + key, prop, init=self.configured, warn_for_existing=True + ) + assert isinstance(prop, MapperProperty) + self._init_properties[key] = prop + + def _expire_memoizations(self) -> None: + for mapper in self.iterate_to_root(): + mapper._reset_memoizations() + + @property + def _log_desc(self) -> str: + return ( + "(" + + self.class_.__name__ + + "|" + + ( + self.local_table is not None + and self.local_table.description + or str(self.local_table) + ) + + (self.non_primary and "|non-primary" or "") + + ")" + ) + + def _log(self, msg: str, *args: Any) -> None: + self.logger.info("%s " + msg, *((self._log_desc,) + args)) + + def _log_debug(self, msg: str, *args: Any) -> None: + self.logger.debug("%s " + msg, *((self._log_desc,) + args)) + + def __repr__(self) -> str: + return "" % (id(self), self.class_.__name__) + + def __str__(self) -> str: + return "Mapper[%s%s(%s)]" % ( + self.class_.__name__, + self.non_primary and " (non-primary)" or "", + ( + self.local_table.description + if self.local_table is not None + else self.persist_selectable.description + ), + ) + + def _is_orphan(self, state: InstanceState[_O]) -> bool: + orphan_possible = False + for mapper in self.iterate_to_root(): + for key, cls in mapper._delete_orphans: + orphan_possible = True + + has_parent = attributes.manager_of_class(cls).has_parent( + state, key, optimistic=state.has_identity + ) + + if self.legacy_is_orphan and has_parent: + return False + elif not self.legacy_is_orphan and not has_parent: + return True + + if self.legacy_is_orphan: + return orphan_possible + else: + return False + + def has_property(self, key: str) -> bool: + return key in self._props + + def get_property( + self, key: str, _configure_mappers: bool = False + ) -> MapperProperty[Any]: + """return a MapperProperty associated with the given key.""" + + if _configure_mappers: + self._check_configure() + + try: + return self._props[key] + except KeyError as err: + raise sa_exc.InvalidRequestError( + f"Mapper '{self}' has no property '{key}'. If this property " + "was indicated from other mappers or configure events, ensure " + "registry.configure() has been called." + ) from err + + def get_property_by_column( + self, column: ColumnElement[_T] + ) -> MapperProperty[_T]: + """Given a :class:`_schema.Column` object, return the + :class:`.MapperProperty` which maps this column.""" + + return self._columntoproperty[column] + + @property + def iterate_properties(self): + """return an iterator of all MapperProperty objects.""" + + return iter(self._props.values()) + + def _mappers_from_spec( + self, spec: Any, selectable: Optional[FromClause] + ) -> Sequence[Mapper[Any]]: + """given a with_polymorphic() argument, return the set of mappers it + represents. + + Trims the list of mappers to just those represented within the given + selectable, if present. This helps some more legacy-ish mappings. + + """ + if spec == "*": + mappers = list(self.self_and_descendants) + elif spec: + mapper_set = set() + for m in util.to_list(spec): + m = _class_to_mapper(m) + if not m.isa(self): + raise sa_exc.InvalidRequestError( + "%r does not inherit from %r" % (m, self) + ) + + if selectable is None: + mapper_set.update(m.iterate_to_root()) + else: + mapper_set.add(m) + mappers = [m for m in self.self_and_descendants if m in mapper_set] + else: + mappers = [] + + if selectable is not None: + tables = set( + sql_util.find_tables(selectable, include_aliases=True) + ) + mappers = [m for m in mappers if m.local_table in tables] + return mappers + + def _selectable_from_mappers( + self, mappers: Iterable[Mapper[Any]], innerjoin: bool + ) -> FromClause: + """given a list of mappers (assumed to be within this mapper's + inheritance hierarchy), construct an outerjoin amongst those mapper's + mapped tables. + + """ + from_obj = self.persist_selectable + for m in mappers: + if m is self: + continue + if m.concrete: + raise sa_exc.InvalidRequestError( + "'with_polymorphic()' requires 'selectable' argument " + "when concrete-inheriting mappers are used." + ) + elif not m.single: + if innerjoin: + from_obj = from_obj.join( + m.local_table, m.inherit_condition + ) + else: + from_obj = from_obj.outerjoin( + m.local_table, m.inherit_condition + ) + + return from_obj + + @HasMemoized.memoized_attribute + def _version_id_has_server_side_value(self) -> bool: + vid_col = self.version_id_col + + if vid_col is None: + return False + + elif not isinstance(vid_col, Column): + return True + else: + return vid_col.server_default is not None or ( + vid_col.default is not None + and ( + not vid_col.default.is_scalar + and not vid_col.default.is_callable + ) + ) + + @HasMemoized.memoized_attribute + def _single_table_criterion(self): + if self.single and self.inherits and self.polymorphic_on is not None: + return self.polymorphic_on._annotate( + {"parententity": self, "parentmapper": self} + ).in_( + [ + m.polymorphic_identity + for m in self.self_and_descendants + if not m.polymorphic_abstract + ] + ) + else: + return None + + @HasMemoized.memoized_attribute + def _has_aliased_polymorphic_fromclause(self): + """return True if with_polymorphic[1] is an aliased fromclause, + like a subquery. + + As of #8168, polymorphic adaption with ORMAdapter is used only + if this is present. + + """ + return self.with_polymorphic and isinstance( + self.with_polymorphic[1], + expression.AliasedReturnsRows, + ) + + @HasMemoized.memoized_attribute + def _should_select_with_poly_adapter(self): + """determine if _MapperEntity or _ORMColumnEntity will need to use + polymorphic adaption when setting up a SELECT as well as fetching + rows for mapped classes and subclasses against this Mapper. + + moved here from context.py for #8456 to generalize the ruleset + for this condition. + + """ + + # this has been simplified as of #8456. + # rule is: if we have a with_polymorphic or a concrete-style + # polymorphic selectable, *or* if the base mapper has either of those, + # we turn on the adaption thing. if not, we do *no* adaption. + # + # (UPDATE for #8168: the above comment was not accurate, as we were + # still saying "do polymorphic" if we were using an auto-generated + # flattened JOIN for with_polymorphic.) + # + # this splits the behavior among the "regular" joined inheritance + # and single inheritance mappers, vs. the "weird / difficult" + # concrete and joined inh mappings that use a with_polymorphic of + # some kind or polymorphic_union. + # + # note we have some tests in test_polymorphic_rel that query against + # a subclass, then refer to the superclass that has a with_polymorphic + # on it (such as test_join_from_polymorphic_explicit_aliased_three). + # these tests actually adapt the polymorphic selectable (like, the + # UNION or the SELECT subquery with JOIN in it) to be just the simple + # subclass table. Hence even if we are a "plain" inheriting mapper + # but our base has a wpoly on it, we turn on adaption. This is a + # legacy case we should probably disable. + # + # + # UPDATE: simplified way more as of #8168. polymorphic adaption + # is turned off even if with_polymorphic is set, as long as there + # is no user-defined aliased selectable / subquery configured. + # this scales back the use of polymorphic adaption in practice + # to basically no cases except for concrete inheritance with a + # polymorphic base class. + # + return ( + self._has_aliased_polymorphic_fromclause + or self._requires_row_aliasing + or (self.base_mapper._has_aliased_polymorphic_fromclause) + or self.base_mapper._requires_row_aliasing + ) + + @HasMemoized.memoized_attribute + def _with_polymorphic_mappers(self) -> Sequence[Mapper[Any]]: + self._check_configure() + + if not self.with_polymorphic: + return [] + return self._mappers_from_spec(*self.with_polymorphic) + + @HasMemoized.memoized_attribute + def _post_inspect(self): + """This hook is invoked by attribute inspection. + + E.g. when Query calls: + + coercions.expect(roles.ColumnsClauseRole, ent, keep_inspect=True) + + This allows the inspection process run a configure mappers hook. + + """ + self._check_configure() + + @HasMemoized_ro_memoized_attribute + def _with_polymorphic_selectable(self) -> FromClause: + if not self.with_polymorphic: + return self.persist_selectable + + spec, selectable = self.with_polymorphic + if selectable is not None: + return selectable + else: + return self._selectable_from_mappers( + self._mappers_from_spec(spec, selectable), False + ) + + with_polymorphic_mappers = _with_polymorphic_mappers + """The list of :class:`_orm.Mapper` objects included in the + default "polymorphic" query. + + """ + + @HasMemoized_ro_memoized_attribute + def _insert_cols_evaluating_none(self): + return { + table: frozenset( + col for col in columns if col.type.should_evaluate_none + ) + for table, columns in self._cols_by_table.items() + } + + @HasMemoized.memoized_attribute + def _insert_cols_as_none(self): + return { + table: frozenset( + col.key + for col in columns + if not col.primary_key + and not col.server_default + and not col.default + and not col.type.should_evaluate_none + ) + for table, columns in self._cols_by_table.items() + } + + @HasMemoized.memoized_attribute + def _propkey_to_col(self): + return { + table: {self._columntoproperty[col].key: col for col in columns} + for table, columns in self._cols_by_table.items() + } + + @HasMemoized.memoized_attribute + def _pk_keys_by_table(self): + return { + table: frozenset([col.key for col in pks]) + for table, pks in self._pks_by_table.items() + } + + @HasMemoized.memoized_attribute + def _pk_attr_keys_by_table(self): + return { + table: frozenset([self._columntoproperty[col].key for col in pks]) + for table, pks in self._pks_by_table.items() + } + + @HasMemoized.memoized_attribute + def _server_default_cols( + self, + ) -> Mapping[FromClause, FrozenSet[Column[Any]]]: + return { + table: frozenset( + [ + col + for col in cast("Iterable[Column[Any]]", columns) + if col.server_default is not None + or ( + col.default is not None + and col.default.is_clause_element + ) + ] + ) + for table, columns in self._cols_by_table.items() + } + + @HasMemoized.memoized_attribute + def _server_onupdate_default_cols( + self, + ) -> Mapping[FromClause, FrozenSet[Column[Any]]]: + return { + table: frozenset( + [ + col + for col in cast("Iterable[Column[Any]]", columns) + if col.server_onupdate is not None + or ( + col.onupdate is not None + and col.onupdate.is_clause_element + ) + ] + ) + for table, columns in self._cols_by_table.items() + } + + @HasMemoized.memoized_attribute + def _server_default_col_keys(self) -> Mapping[FromClause, FrozenSet[str]]: + return { + table: frozenset(col.key for col in cols if col.key is not None) + for table, cols in self._server_default_cols.items() + } + + @HasMemoized.memoized_attribute + def _server_onupdate_default_col_keys( + self, + ) -> Mapping[FromClause, FrozenSet[str]]: + return { + table: frozenset(col.key for col in cols if col.key is not None) + for table, cols in self._server_onupdate_default_cols.items() + } + + @HasMemoized.memoized_attribute + def _server_default_plus_onupdate_propkeys(self) -> Set[str]: + result: Set[str] = set() + + col_to_property = self._columntoproperty + for table, columns in self._server_default_cols.items(): + result.update( + col_to_property[col].key + for col in columns.intersection(col_to_property) + ) + for table, columns in self._server_onupdate_default_cols.items(): + result.update( + col_to_property[col].key + for col in columns.intersection(col_to_property) + ) + return result + + @HasMemoized.memoized_instancemethod + def __clause_element__(self): + annotations: Dict[str, Any] = { + "entity_namespace": self, + "parententity": self, + "parentmapper": self, + } + if self.persist_selectable is not self.local_table: + # joined table inheritance, with polymorphic selectable, + # etc. + annotations["dml_table"] = self.local_table._annotate( + { + "entity_namespace": self, + "parententity": self, + "parentmapper": self, + } + )._set_propagate_attrs( + {"compile_state_plugin": "orm", "plugin_subject": self} + ) + + return self.selectable._annotate(annotations)._set_propagate_attrs( + {"compile_state_plugin": "orm", "plugin_subject": self} + ) + + @util.memoized_property + def select_identity_token(self): + return ( + expression.null() + ._annotate( + { + "entity_namespace": self, + "parententity": self, + "parentmapper": self, + "identity_token": True, + } + ) + ._set_propagate_attrs( + {"compile_state_plugin": "orm", "plugin_subject": self} + ) + ) + + @property + def selectable(self) -> FromClause: + """The :class:`_schema.FromClause` construct this + :class:`_orm.Mapper` selects from by default. + + Normally, this is equivalent to :attr:`.persist_selectable`, unless + the ``with_polymorphic`` feature is in use, in which case the + full "polymorphic" selectable is returned. + + """ + return self._with_polymorphic_selectable + + def _with_polymorphic_args( + self, + spec: Any = None, + selectable: Union[Literal[False, None], FromClause] = False, + innerjoin: bool = False, + ) -> Tuple[Sequence[Mapper[Any]], FromClause]: + if selectable not in (None, False): + selectable = coercions.expect( + roles.StrictFromClauseRole, selectable, allow_select=True + ) + + if self.with_polymorphic: + if not spec: + spec = self.with_polymorphic[0] + if selectable is False: + selectable = self.with_polymorphic[1] + elif selectable is False: + selectable = None + mappers = self._mappers_from_spec(spec, selectable) + if selectable is not None: + return mappers, selectable + else: + return mappers, self._selectable_from_mappers(mappers, innerjoin) + + @HasMemoized.memoized_attribute + def _polymorphic_properties(self): + return list( + self._iterate_polymorphic_properties( + self._with_polymorphic_mappers + ) + ) + + @property + def _all_column_expressions(self): + poly_properties = self._polymorphic_properties + adapter = self._polymorphic_adapter + + return [ + adapter.columns[c] if adapter else c + for prop in poly_properties + if isinstance(prop, properties.ColumnProperty) + and prop._renders_in_subqueries + for c in prop.columns + ] + + def _columns_plus_keys(self, polymorphic_mappers=()): + if polymorphic_mappers: + poly_properties = self._iterate_polymorphic_properties( + polymorphic_mappers + ) + else: + poly_properties = self._polymorphic_properties + + return [ + (prop.key, prop.columns[0]) + for prop in poly_properties + if isinstance(prop, properties.ColumnProperty) + ] + + @HasMemoized.memoized_attribute + def _polymorphic_adapter(self) -> Optional[orm_util.ORMAdapter]: + if self._has_aliased_polymorphic_fromclause: + return orm_util.ORMAdapter( + orm_util._TraceAdaptRole.MAPPER_POLYMORPHIC_ADAPTER, + self, + selectable=self.selectable, + equivalents=self._equivalent_columns, + limit_on_entity=False, + ) + else: + return None + + def _iterate_polymorphic_properties(self, mappers=None): + """Return an iterator of MapperProperty objects which will render into + a SELECT.""" + if mappers is None: + mappers = self._with_polymorphic_mappers + + if not mappers: + for c in self.iterate_properties: + yield c + else: + # in the polymorphic case, filter out discriminator columns + # from other mappers, as these are sometimes dependent on that + # mapper's polymorphic selectable (which we don't want rendered) + for c in util.unique_list( + chain( + *[ + list(mapper.iterate_properties) + for mapper in [self] + mappers + ] + ) + ): + if getattr(c, "_is_polymorphic_discriminator", False) and ( + self.polymorphic_on is None + or c.columns[0] is not self.polymorphic_on + ): + continue + yield c + + @HasMemoized.memoized_attribute + def attrs(self) -> util.ReadOnlyProperties[MapperProperty[Any]]: + """A namespace of all :class:`.MapperProperty` objects + associated this mapper. + + This is an object that provides each property based on + its key name. For instance, the mapper for a + ``User`` class which has ``User.name`` attribute would + provide ``mapper.attrs.name``, which would be the + :class:`.ColumnProperty` representing the ``name`` + column. The namespace object can also be iterated, + which would yield each :class:`.MapperProperty`. + + :class:`_orm.Mapper` has several pre-filtered views + of this attribute which limit the types of properties + returned, including :attr:`.synonyms`, :attr:`.column_attrs`, + :attr:`.relationships`, and :attr:`.composites`. + + .. warning:: + + The :attr:`_orm.Mapper.attrs` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + + .. seealso:: + + :attr:`_orm.Mapper.all_orm_descriptors` + + """ + + self._check_configure() + return util.ReadOnlyProperties(self._props) + + @HasMemoized.memoized_attribute + def all_orm_descriptors(self) -> util.ReadOnlyProperties[InspectionAttr]: + """A namespace of all :class:`.InspectionAttr` attributes associated + with the mapped class. + + These attributes are in all cases Python :term:`descriptors` + associated with the mapped class or its superclasses. + + This namespace includes attributes that are mapped to the class + as well as attributes declared by extension modules. + It includes any Python descriptor type that inherits from + :class:`.InspectionAttr`. This includes + :class:`.QueryableAttribute`, as well as extension types such as + :class:`.hybrid_property`, :class:`.hybrid_method` and + :class:`.AssociationProxy`. + + To distinguish between mapped attributes and extension attributes, + the attribute :attr:`.InspectionAttr.extension_type` will refer + to a constant that distinguishes between different extension types. + + The sorting of the attributes is based on the following rules: + + 1. Iterate through the class and its superclasses in order from + subclass to superclass (i.e. iterate through ``cls.__mro__``) + + 2. For each class, yield the attributes in the order in which they + appear in ``__dict__``, with the exception of those in step + 3 below. In Python 3.6 and above this ordering will be the + same as that of the class' construction, with the exception + of attributes that were added after the fact by the application + or the mapper. + + 3. If a certain attribute key is also in the superclass ``__dict__``, + then it's included in the iteration for that class, and not the + class in which it first appeared. + + The above process produces an ordering that is deterministic in terms + of the order in which attributes were assigned to the class. + + .. versionchanged:: 1.3.19 ensured deterministic ordering for + :meth:`_orm.Mapper.all_orm_descriptors`. + + When dealing with a :class:`.QueryableAttribute`, the + :attr:`.QueryableAttribute.property` attribute refers to the + :class:`.MapperProperty` property, which is what you get when + referring to the collection of mapped properties via + :attr:`_orm.Mapper.attrs`. + + .. warning:: + + The :attr:`_orm.Mapper.all_orm_descriptors` + accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.all_orm_descriptors[somename]`` over + ``getattr(mapper.all_orm_descriptors, somename)`` to avoid name + collisions. + + .. seealso:: + + :attr:`_orm.Mapper.attrs` + + """ + return util.ReadOnlyProperties( + dict(self.class_manager._all_sqla_attributes()) + ) + + @HasMemoized.memoized_attribute + @util.preload_module("sqlalchemy.orm.descriptor_props") + def _pk_synonyms(self) -> Dict[str, str]: + """return a dictionary of {syn_attribute_name: pk_attr_name} for + all synonyms that refer to primary key columns + + """ + descriptor_props = util.preloaded.orm_descriptor_props + + pk_keys = {prop.key for prop in self._identity_key_props} + + return { + syn.key: syn.name + for k, syn in self._props.items() + if isinstance(syn, descriptor_props.SynonymProperty) + and syn.name in pk_keys + } + + @HasMemoized.memoized_attribute + @util.preload_module("sqlalchemy.orm.descriptor_props") + def synonyms(self) -> util.ReadOnlyProperties[SynonymProperty[Any]]: + """Return a namespace of all :class:`.Synonym` + properties maintained by this :class:`_orm.Mapper`. + + .. seealso:: + + :attr:`_orm.Mapper.attrs` - namespace of all + :class:`.MapperProperty` + objects. + + """ + descriptor_props = util.preloaded.orm_descriptor_props + + return self._filter_properties(descriptor_props.SynonymProperty) + + @property + def entity_namespace(self): + return self.class_ + + @HasMemoized.memoized_attribute + def column_attrs(self) -> util.ReadOnlyProperties[ColumnProperty[Any]]: + """Return a namespace of all :class:`.ColumnProperty` + properties maintained by this :class:`_orm.Mapper`. + + .. seealso:: + + :attr:`_orm.Mapper.attrs` - namespace of all + :class:`.MapperProperty` + objects. + + """ + return self._filter_properties(properties.ColumnProperty) + + @HasMemoized.memoized_attribute + @util.preload_module("sqlalchemy.orm.relationships") + def relationships( + self, + ) -> util.ReadOnlyProperties[RelationshipProperty[Any]]: + """A namespace of all :class:`.Relationship` properties + maintained by this :class:`_orm.Mapper`. + + .. warning:: + + the :attr:`_orm.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.relationships[somename]`` over + ``getattr(mapper.relationships, somename)`` to avoid name + collisions. + + .. seealso:: + + :attr:`_orm.Mapper.attrs` - namespace of all + :class:`.MapperProperty` + objects. + + """ + return self._filter_properties( + util.preloaded.orm_relationships.RelationshipProperty + ) + + @HasMemoized.memoized_attribute + @util.preload_module("sqlalchemy.orm.descriptor_props") + def composites(self) -> util.ReadOnlyProperties[CompositeProperty[Any]]: + """Return a namespace of all :class:`.Composite` + properties maintained by this :class:`_orm.Mapper`. + + .. seealso:: + + :attr:`_orm.Mapper.attrs` - namespace of all + :class:`.MapperProperty` + objects. + + """ + return self._filter_properties( + util.preloaded.orm_descriptor_props.CompositeProperty + ) + + def _filter_properties( + self, type_: Type[_MP] + ) -> util.ReadOnlyProperties[_MP]: + self._check_configure() + return util.ReadOnlyProperties( + util.OrderedDict( + (k, v) for k, v in self._props.items() if isinstance(v, type_) + ) + ) + + @HasMemoized.memoized_attribute + def _get_clause(self): + """create a "get clause" based on the primary key. this is used + by query.get() and many-to-one lazyloads to load this item + by primary key. + + """ + params = [ + ( + primary_key, + sql.bindparam("pk_%d" % idx, type_=primary_key.type), + ) + for idx, primary_key in enumerate(self.primary_key, 1) + ] + return ( + sql.and_(*[k == v for (k, v) in params]), + util.column_dict(params), + ) + + @HasMemoized.memoized_attribute + def _equivalent_columns(self) -> _EquivalentColumnMap: + """Create a map of all equivalent columns, based on + the determination of column pairs that are equated to + one another based on inherit condition. This is designed + to work with the queries that util.polymorphic_union + comes up with, which often don't include the columns from + the base table directly (including the subclass table columns + only). + + The resulting structure is a dictionary of columns mapped + to lists of equivalent columns, e.g.:: + + { + tablea.col1: + {tableb.col1, tablec.col1}, + tablea.col2: + {tabled.col2} + } + + """ + result: _EquivalentColumnMap = {} + + def visit_binary(binary): + if binary.operator == operators.eq: + if binary.left in result: + result[binary.left].add(binary.right) + else: + result[binary.left] = {binary.right} + if binary.right in result: + result[binary.right].add(binary.left) + else: + result[binary.right] = {binary.left} + + for mapper in self.base_mapper.self_and_descendants: + if mapper.inherit_condition is not None: + visitors.traverse( + mapper.inherit_condition, {}, {"binary": visit_binary} + ) + + return result + + def _is_userland_descriptor(self, assigned_name: str, obj: Any) -> bool: + if isinstance( + obj, + ( + _MappedAttribute, + instrumentation.ClassManager, + expression.ColumnElement, + ), + ): + return False + else: + return assigned_name not in self._dataclass_fields + + @HasMemoized.memoized_attribute + def _dataclass_fields(self): + return [f.name for f in util.dataclass_fields(self.class_)] + + def _should_exclude(self, name, assigned_name, local, column): + """determine whether a particular property should be implicitly + present on the class. + + This occurs when properties are propagated from an inherited class, or + are applied from the columns present in the mapped table. + + """ + + if column is not None and sql_base._never_select_column(column): + return True + + # check for class-bound attributes and/or descriptors, + # either local or from an inherited class + # ignore dataclass field default values + if local: + if self.class_.__dict__.get( + assigned_name, None + ) is not None and self._is_userland_descriptor( + assigned_name, self.class_.__dict__[assigned_name] + ): + return True + else: + attr = self.class_manager._get_class_attr_mro(assigned_name, None) + if attr is not None and self._is_userland_descriptor( + assigned_name, attr + ): + return True + + if ( + self.include_properties is not None + and name not in self.include_properties + and (column is None or column not in self.include_properties) + ): + self._log("not including property %s" % (name)) + return True + + if self.exclude_properties is not None and ( + name in self.exclude_properties + or (column is not None and column in self.exclude_properties) + ): + self._log("excluding property %s" % (name)) + return True + + return False + + def common_parent(self, other: Mapper[Any]) -> bool: + """Return true if the given mapper shares a + common inherited parent as this mapper.""" + + return self.base_mapper is other.base_mapper + + def is_sibling(self, other: Mapper[Any]) -> bool: + """return true if the other mapper is an inheriting sibling to this + one. common parent but different branch + + """ + return ( + self.base_mapper is other.base_mapper + and not self.isa(other) + and not other.isa(self) + ) + + def _canload( + self, state: InstanceState[Any], allow_subtypes: bool + ) -> bool: + s = self.primary_mapper() + if self.polymorphic_on is not None or allow_subtypes: + return _state_mapper(state).isa(s) + else: + return _state_mapper(state) is s + + def isa(self, other: Mapper[Any]) -> bool: + """Return True if the this mapper inherits from the given mapper.""" + + m: Optional[Mapper[Any]] = self + while m and m is not other: + m = m.inherits + return bool(m) + + def iterate_to_root(self) -> Iterator[Mapper[Any]]: + m: Optional[Mapper[Any]] = self + while m: + yield m + m = m.inherits + + @HasMemoized.memoized_attribute + def self_and_descendants(self) -> Sequence[Mapper[Any]]: + """The collection including this mapper and all descendant mappers. + + This includes not just the immediately inheriting mappers but + all their inheriting mappers as well. + + """ + descendants = [] + stack = deque([self]) + while stack: + item = stack.popleft() + descendants.append(item) + stack.extend(item._inheriting_mappers) + return util.WeakSequence(descendants) + + def polymorphic_iterator(self) -> Iterator[Mapper[Any]]: + """Iterate through the collection including this mapper and + all descendant mappers. + + This includes not just the immediately inheriting mappers but + all their inheriting mappers as well. + + To iterate through an entire hierarchy, use + ``mapper.base_mapper.polymorphic_iterator()``. + + """ + return iter(self.self_and_descendants) + + def primary_mapper(self) -> Mapper[Any]: + """Return the primary mapper corresponding to this mapper's class key + (class).""" + + return self.class_manager.mapper + + @property + def primary_base_mapper(self) -> Mapper[Any]: + return self.class_manager.mapper.base_mapper + + def _result_has_identity_key(self, result, adapter=None): + pk_cols: Sequence[ColumnClause[Any]] = self.primary_key + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + rk = result.keys() + for col in pk_cols: + if col not in rk: + return False + else: + return True + + def identity_key_from_row( + self, + row: Optional[Union[Row[Any], RowMapping]], + identity_token: Optional[Any] = None, + adapter: Optional[ORMAdapter] = None, + ) -> _IdentityKeyType[_O]: + """Return an identity-map key for use in storing/retrieving an + item from the identity map. + + :param row: A :class:`.Row` or :class:`.RowMapping` produced from a + result set that selected from the ORM mapped primary key columns. + + .. versionchanged:: 2.0 + :class:`.Row` or :class:`.RowMapping` are accepted + for the "row" argument + + """ + pk_cols: Sequence[ColumnClause[Any]] = self.primary_key + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + + if hasattr(row, "_mapping"): + mapping = row._mapping # type: ignore + else: + mapping = cast("Mapping[Any, Any]", row) + + return ( + self._identity_class, + tuple(mapping[column] for column in pk_cols), # type: ignore + identity_token, + ) + + def identity_key_from_primary_key( + self, + primary_key: Tuple[Any, ...], + identity_token: Optional[Any] = None, + ) -> _IdentityKeyType[_O]: + """Return an identity-map key for use in storing/retrieving an + item from an identity map. + + :param primary_key: A list of values indicating the identifier. + + """ + return ( + self._identity_class, + tuple(primary_key), + identity_token, + ) + + def identity_key_from_instance(self, instance: _O) -> _IdentityKeyType[_O]: + """Return the identity key for the given instance, based on + its primary key attributes. + + If the instance's state is expired, calling this method + will result in a database check to see if the object has been deleted. + If the row no longer exists, + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + This value is typically also found on the instance state under the + attribute name `key`. + + """ + state = attributes.instance_state(instance) + return self._identity_key_from_state(state, PassiveFlag.PASSIVE_OFF) + + def _identity_key_from_state( + self, + state: InstanceState[_O], + passive: PassiveFlag = PassiveFlag.PASSIVE_RETURN_NO_VALUE, + ) -> _IdentityKeyType[_O]: + dict_ = state.dict + manager = state.manager + return ( + self._identity_class, + tuple( + [ + manager[prop.key].impl.get(state, dict_, passive) + for prop in self._identity_key_props + ] + ), + state.identity_token, + ) + + def primary_key_from_instance(self, instance: _O) -> Tuple[Any, ...]: + """Return the list of primary key values for the given + instance. + + If the instance's state is expired, calling this method + will result in a database check to see if the object has been deleted. + If the row no longer exists, + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + """ + state = attributes.instance_state(instance) + identity_key = self._identity_key_from_state( + state, PassiveFlag.PASSIVE_OFF + ) + return identity_key[1] + + @HasMemoized.memoized_attribute + def _persistent_sortkey_fn(self): + key_fns = [col.type.sort_key_function for col in self.primary_key] + + if set(key_fns).difference([None]): + + def key(state): + return tuple( + key_fn(val) if key_fn is not None else val + for key_fn, val in zip(key_fns, state.key[1]) + ) + + else: + + def key(state): + return state.key[1] + + return key + + @HasMemoized.memoized_attribute + def _identity_key_props(self): + return [self._columntoproperty[col] for col in self.primary_key] + + @HasMemoized.memoized_attribute + def _all_pk_cols(self): + collection: Set[ColumnClause[Any]] = set() + for table in self.tables: + collection.update(self._pks_by_table[table]) + return collection + + @HasMemoized.memoized_attribute + def _should_undefer_in_wildcard(self): + cols: Set[ColumnElement[Any]] = set(self.primary_key) + if self.polymorphic_on is not None: + cols.add(self.polymorphic_on) + return cols + + @HasMemoized.memoized_attribute + def _primary_key_propkeys(self): + return {self._columntoproperty[col].key for col in self._all_pk_cols} + + def _get_state_attr_by_column( + self, + state: InstanceState[_O], + dict_: _InstanceDict, + column: ColumnElement[Any], + passive: PassiveFlag = PassiveFlag.PASSIVE_RETURN_NO_VALUE, + ) -> Any: + prop = self._columntoproperty[column] + return state.manager[prop.key].impl.get(state, dict_, passive=passive) + + def _set_committed_state_attr_by_column(self, state, dict_, column, value): + prop = self._columntoproperty[column] + state.manager[prop.key].impl.set_committed_value(state, dict_, value) + + def _set_state_attr_by_column(self, state, dict_, column, value): + prop = self._columntoproperty[column] + state.manager[prop.key].impl.set(state, dict_, value, None) + + def _get_committed_attr_by_column(self, obj, column): + state = attributes.instance_state(obj) + dict_ = attributes.instance_dict(obj) + return self._get_committed_state_attr_by_column( + state, dict_, column, passive=PassiveFlag.PASSIVE_OFF + ) + + def _get_committed_state_attr_by_column( + self, state, dict_, column, passive=PassiveFlag.PASSIVE_RETURN_NO_VALUE + ): + prop = self._columntoproperty[column] + return state.manager[prop.key].impl.get_committed_value( + state, dict_, passive=passive + ) + + def _optimized_get_statement(self, state, attribute_names): + """assemble a WHERE clause which retrieves a given state by primary + key, using a minimized set of tables. + + Applies to a joined-table inheritance mapper where the + requested attribute names are only present on joined tables, + not the base table. The WHERE clause attempts to include + only those tables to minimize joins. + + """ + props = self._props + + col_attribute_names = set(attribute_names).intersection( + state.mapper.column_attrs.keys() + ) + tables: Set[FromClause] = set( + chain( + *[ + sql_util.find_tables(c, check_columns=True) + for key in col_attribute_names + for c in props[key].columns + ] + ) + ) + + if self.base_mapper.local_table in tables: + return None + + def visit_binary(binary): + leftcol = binary.left + rightcol = binary.right + if leftcol is None or rightcol is None: + return + + if leftcol.table not in tables: + leftval = self._get_committed_state_attr_by_column( + state, + state.dict, + leftcol, + passive=PassiveFlag.PASSIVE_NO_INITIALIZE, + ) + if leftval in orm_util._none_set: + raise _OptGetColumnsNotAvailable() + binary.left = sql.bindparam( + None, leftval, type_=binary.right.type + ) + elif rightcol.table not in tables: + rightval = self._get_committed_state_attr_by_column( + state, + state.dict, + rightcol, + passive=PassiveFlag.PASSIVE_NO_INITIALIZE, + ) + if rightval in orm_util._none_set: + raise _OptGetColumnsNotAvailable() + binary.right = sql.bindparam( + None, rightval, type_=binary.right.type + ) + + allconds: List[ColumnElement[bool]] = [] + + start = False + + # as of #7507, from the lowest base table on upwards, + # we include all intermediary tables. + + for mapper in reversed(list(self.iterate_to_root())): + if mapper.local_table in tables: + start = True + elif not isinstance(mapper.local_table, expression.TableClause): + return None + if start and not mapper.single: + assert mapper.inherits + assert not mapper.concrete + assert mapper.inherit_condition is not None + allconds.append(mapper.inherit_condition) + tables.add(mapper.local_table) + + # only the bottom table needs its criteria to be altered to fit + # the primary key ident - the rest of the tables upwards to the + # descendant-most class should all be present and joined to each + # other. + try: + _traversed = visitors.cloned_traverse( + allconds[0], {}, {"binary": visit_binary} + ) + except _OptGetColumnsNotAvailable: + return None + else: + allconds[0] = _traversed + + cond = sql.and_(*allconds) + + cols = [] + for key in col_attribute_names: + cols.extend(props[key].columns) + return ( + sql.select(*cols) + .where(cond) + .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + ) + + def _iterate_to_target_viawpoly(self, mapper): + if self.isa(mapper): + prev = self + for m in self.iterate_to_root(): + yield m + + if m is not prev and prev not in m._with_polymorphic_mappers: + break + + prev = m + if m is mapper: + break + + @HasMemoized.memoized_attribute + def _would_selectinload_combinations_cache(self): + return {} + + def _would_selectin_load_only_from_given_mapper(self, super_mapper): + """return True if this mapper would "selectin" polymorphic load based + on the given super mapper, and not from a setting from a subclass. + + given:: + + class A: + ... + + class B(A): + __mapper_args__ = {"polymorphic_load": "selectin"} + + class C(B): + ... + + class D(B): + __mapper_args__ = {"polymorphic_load": "selectin"} + + ``inspect(C)._would_selectin_load_only_from_given_mapper(inspect(B))`` + returns True, because C does selectin loading because of B's setting. + + OTOH, ``inspect(D) + ._would_selectin_load_only_from_given_mapper(inspect(B))`` + returns False, because D does selectin loading because of its own + setting; when we are doing a selectin poly load from B, we want to + filter out D because it would already have its own selectin poly load + set up separately. + + Added as part of #9373. + + """ + cache = self._would_selectinload_combinations_cache + + try: + return cache[super_mapper] + except KeyError: + pass + + # assert that given object is a supermapper, meaning we already + # strong reference it directly or indirectly. this allows us + # to not worry that we are creating new strongrefs to unrelated + # mappers or other objects. + assert self.isa(super_mapper) + + mapper = super_mapper + for m in self._iterate_to_target_viawpoly(mapper): + if m.polymorphic_load == "selectin": + retval = m is super_mapper + break + else: + retval = False + + cache[super_mapper] = retval + return retval + + def _should_selectin_load(self, enabled_via_opt, polymorphic_from): + if not enabled_via_opt: + # common case, takes place for all polymorphic loads + mapper = polymorphic_from + for m in self._iterate_to_target_viawpoly(mapper): + if m.polymorphic_load == "selectin": + return m + else: + # uncommon case, selectin load options were used + enabled_via_opt = set(enabled_via_opt) + enabled_via_opt_mappers = {e.mapper: e for e in enabled_via_opt} + for entity in enabled_via_opt.union([polymorphic_from]): + mapper = entity.mapper + for m in self._iterate_to_target_viawpoly(mapper): + if ( + m.polymorphic_load == "selectin" + or m in enabled_via_opt_mappers + ): + return enabled_via_opt_mappers.get(m, m) + + return None + + @util.preload_module("sqlalchemy.orm.strategy_options") + def _subclass_load_via_in(self, entity, polymorphic_from): + """Assemble a that can load the columns local to + this subclass as a SELECT with IN. + + """ + + strategy_options = util.preloaded.orm_strategy_options + + assert self.inherits + + if self.polymorphic_on is not None: + polymorphic_prop = self._columntoproperty[self.polymorphic_on] + keep_props = set([polymorphic_prop] + self._identity_key_props) + else: + keep_props = set(self._identity_key_props) + + disable_opt = strategy_options.Load(entity) + enable_opt = strategy_options.Load(entity) + + classes_to_include = {self} + m: Optional[Mapper[Any]] = self.inherits + while ( + m is not None + and m is not polymorphic_from + and m.polymorphic_load == "selectin" + ): + classes_to_include.add(m) + m = m.inherits + + for prop in self.column_attrs + self.relationships: + # skip prop keys that are not instrumented on the mapped class. + # this is primarily the "_sa_polymorphic_on" property that gets + # created for an ad-hoc polymorphic_on SQL expression, issue #8704 + if prop.key not in self.class_manager: + continue + + if prop.parent in classes_to_include or prop in keep_props: + # "enable" options, to turn on the properties that we want to + # load by default (subject to options from the query) + if not isinstance(prop, StrategizedProperty): + continue + + enable_opt = enable_opt._set_generic_strategy( + # convert string name to an attribute before passing + # to loader strategy. note this must be in terms + # of given entity, such as AliasedClass, etc. + (getattr(entity.entity_namespace, prop.key),), + dict(prop.strategy_key), + _reconcile_to_other=True, + ) + else: + # "disable" options, to turn off the properties from the + # superclass that we *don't* want to load, applied after + # the options from the query to override them + disable_opt = disable_opt._set_generic_strategy( + # convert string name to an attribute before passing + # to loader strategy. note this must be in terms + # of given entity, such as AliasedClass, etc. + (getattr(entity.entity_namespace, prop.key),), + {"do_nothing": True}, + _reconcile_to_other=False, + ) + + primary_key = [ + sql_util._deep_annotate(pk, {"_orm_adapt": True}) + for pk in self.primary_key + ] + + in_expr: ColumnElement[Any] + + if len(primary_key) > 1: + in_expr = sql.tuple_(*primary_key) + else: + in_expr = primary_key[0] + + if entity.is_aliased_class: + assert entity.mapper is self + + q = sql.select(entity).set_label_style( + LABEL_STYLE_TABLENAME_PLUS_COL + ) + + in_expr = entity._adapter.traverse(in_expr) + primary_key = [entity._adapter.traverse(k) for k in primary_key] + q = q.where( + in_expr.in_(sql.bindparam("primary_keys", expanding=True)) + ).order_by(*primary_key) + else: + q = sql.select(self).set_label_style( + LABEL_STYLE_TABLENAME_PLUS_COL + ) + q = q.where( + in_expr.in_(sql.bindparam("primary_keys", expanding=True)) + ).order_by(*primary_key) + + return q, enable_opt, disable_opt + + @HasMemoized.memoized_attribute + def _subclass_load_via_in_mapper(self): + # the default is loading this mapper against the basemost mapper + return self._subclass_load_via_in(self, self.base_mapper) + + def cascade_iterator( + self, + type_: str, + state: InstanceState[_O], + halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None, + ) -> Iterator[ + Tuple[object, Mapper[Any], InstanceState[Any], _InstanceDict] + ]: + r"""Iterate each element and its mapper in an object graph, + for all relationships that meet the given cascade rule. + + :param type\_: + The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``, + etc.). + + .. note:: the ``"all"`` cascade is not accepted here. For a generic + object traversal function, see :ref:`faq_walk_objects`. + + :param state: + The lead InstanceState. child items will be processed per + the relationships defined for this object's mapper. + + :return: the method yields individual object instances. + + .. seealso:: + + :ref:`unitofwork_cascades` + + :ref:`faq_walk_objects` - illustrates a generic function to + traverse all objects without relying on cascades. + + """ + visited_states: Set[InstanceState[Any]] = set() + prp, mpp = object(), object() + + assert state.mapper.isa(self) + + # this is actually a recursive structure, fully typing it seems + # a little too difficult for what it's worth here + visitables: Deque[ + Tuple[ + Deque[Any], + object, + Optional[InstanceState[Any]], + Optional[_InstanceDict], + ] + ] + + visitables = deque( + [(deque(state.mapper._props.values()), prp, state, state.dict)] + ) + + while visitables: + iterator, item_type, parent_state, parent_dict = visitables[-1] + if not iterator: + visitables.pop() + continue + + if item_type is prp: + prop = iterator.popleft() + if not prop.cascade or type_ not in prop.cascade: + continue + assert parent_state is not None + assert parent_dict is not None + queue = deque( + prop.cascade_iterator( + type_, + parent_state, + parent_dict, + visited_states, + halt_on, + ) + ) + if queue: + visitables.append((queue, mpp, None, None)) + elif item_type is mpp: + ( + instance, + instance_mapper, + corresponding_state, + corresponding_dict, + ) = iterator.popleft() + yield ( + instance, + instance_mapper, + corresponding_state, + corresponding_dict, + ) + visitables.append( + ( + deque(instance_mapper._props.values()), + prp, + corresponding_state, + corresponding_dict, + ) + ) + + @HasMemoized.memoized_attribute + def _compiled_cache(self): + return util.LRUCache(self._compiled_cache_size) + + @HasMemoized.memoized_attribute + def _multiple_persistence_tables(self): + return len(self.tables) > 1 + + @HasMemoized.memoized_attribute + def _sorted_tables(self): + table_to_mapper: Dict[TableClause, Mapper[Any]] = {} + + for mapper in self.base_mapper.self_and_descendants: + for t in mapper.tables: + table_to_mapper.setdefault(t, mapper) + + extra_dependencies = [] + for table, mapper in table_to_mapper.items(): + super_ = mapper.inherits + if super_: + extra_dependencies.extend( + [(super_table, table) for super_table in super_.tables] + ) + + def skip(fk): + # attempt to skip dependencies that are not + # significant to the inheritance chain + # for two tables that are related by inheritance. + # while that dependency may be important, it's technically + # not what we mean to sort on here. + parent = table_to_mapper.get(fk.parent.table) + dep = table_to_mapper.get(fk.column.table) + if ( + parent is not None + and dep is not None + and dep is not parent + and dep.inherit_condition is not None + ): + cols = set(sql_util._find_columns(dep.inherit_condition)) + if parent.inherit_condition is not None: + cols = cols.union( + sql_util._find_columns(parent.inherit_condition) + ) + return fk.parent not in cols and fk.column not in cols + else: + return fk.parent not in cols + return False + + sorted_ = sql_util.sort_tables( + table_to_mapper, + skip_fn=skip, + extra_dependencies=extra_dependencies, + ) + + ret = util.OrderedDict() + for t in sorted_: + ret[t] = table_to_mapper[t] + return ret + + def _memo(self, key: Any, callable_: Callable[[], _T]) -> _T: + if key in self._memoized_values: + return cast(_T, self._memoized_values[key]) + else: + self._memoized_values[key] = value = callable_() + return value + + @util.memoized_property + def _table_to_equated(self): + """memoized map of tables to collections of columns to be + synchronized upwards to the base mapper.""" + + result: util.defaultdict[ + Table, + List[ + Tuple[ + Mapper[Any], + List[Tuple[ColumnElement[Any], ColumnElement[Any]]], + ] + ], + ] = util.defaultdict(list) + + def set_union(x, y): + return x.union(y) + + for table in self._sorted_tables: + cols = set(table.c) + + for m in self.iterate_to_root(): + if m._inherits_equated_pairs and cols.intersection( + reduce( + set_union, + [l.proxy_set for l, r in m._inherits_equated_pairs], + ) + ): + result[table].append((m, m._inherits_equated_pairs)) + + return result + + +class _OptGetColumnsNotAvailable(Exception): + pass + + +def configure_mappers() -> None: + """Initialize the inter-mapper relationships of all mappers that + have been constructed thus far across all :class:`_orm.registry` + collections. + + The configure step is used to reconcile and initialize the + :func:`_orm.relationship` linkages between mapped classes, as well as to + invoke configuration events such as the + :meth:`_orm.MapperEvents.before_configured` and + :meth:`_orm.MapperEvents.after_configured`, which may be used by ORM + extensions or user-defined extension hooks. + + Mapper configuration is normally invoked automatically, the first time + mappings from a particular :class:`_orm.registry` are used, as well as + whenever mappings are used and additional not-yet-configured mappers have + been constructed. The automatic configuration process however is local only + to the :class:`_orm.registry` involving the target mapper and any related + :class:`_orm.registry` objects which it may depend on; this is + equivalent to invoking the :meth:`_orm.registry.configure` method + on a particular :class:`_orm.registry`. + + By contrast, the :func:`_orm.configure_mappers` function will invoke the + configuration process on all :class:`_orm.registry` objects that + exist in memory, and may be useful for scenarios where many individual + :class:`_orm.registry` objects that are nonetheless interrelated are + in use. + + .. versionchanged:: 1.4 + + As of SQLAlchemy 1.4.0b2, this function works on a + per-:class:`_orm.registry` basis, locating all :class:`_orm.registry` + objects present and invoking the :meth:`_orm.registry.configure` method + on each. The :meth:`_orm.registry.configure` method may be preferred to + limit the configuration of mappers to those local to a particular + :class:`_orm.registry` and/or declarative base class. + + Points at which automatic configuration is invoked include when a mapped + class is instantiated into an instance, as well as when ORM queries + are emitted using :meth:`.Session.query` or :meth:`_orm.Session.execute` + with an ORM-enabled statement. + + The mapper configure process, whether invoked by + :func:`_orm.configure_mappers` or from :meth:`_orm.registry.configure`, + provides several event hooks that can be used to augment the mapper + configuration step. These hooks include: + + * :meth:`.MapperEvents.before_configured` - called once before + :func:`.configure_mappers` or :meth:`_orm.registry.configure` does any + work; this can be used to establish additional options, properties, or + related mappings before the operation proceeds. + + * :meth:`.MapperEvents.mapper_configured` - called as each individual + :class:`_orm.Mapper` is configured within the process; will include all + mapper state except for backrefs set up by other mappers that are still + to be configured. + + * :meth:`.MapperEvents.after_configured` - called once after + :func:`.configure_mappers` or :meth:`_orm.registry.configure` is + complete; at this stage, all :class:`_orm.Mapper` objects that fall + within the scope of the configuration operation will be fully configured. + Note that the calling application may still have other mappings that + haven't been produced yet, such as if they are in modules as yet + unimported, and may also have mappings that are still to be configured, + if they are in other :class:`_orm.registry` collections not part of the + current scope of configuration. + + """ + + _configure_registries(_all_registries(), cascade=True) + + +def _configure_registries( + registries: Set[_RegistryType], cascade: bool +) -> None: + for reg in registries: + if reg._new_mappers: + break + else: + return + + with _CONFIGURE_MUTEX: + global _already_compiling + if _already_compiling: + return + _already_compiling = True + try: + # double-check inside mutex + for reg in registries: + if reg._new_mappers: + break + else: + return + + Mapper.dispatch._for_class(Mapper).before_configured() # type: ignore # noqa: E501 + # initialize properties on all mappers + # note that _mapper_registry is unordered, which + # may randomly conceal/reveal issues related to + # the order of mapper compilation + + _do_configure_registries(registries, cascade) + finally: + _already_compiling = False + Mapper.dispatch._for_class(Mapper).after_configured() # type: ignore + + +@util.preload_module("sqlalchemy.orm.decl_api") +def _do_configure_registries( + registries: Set[_RegistryType], cascade: bool +) -> None: + registry = util.preloaded.orm_decl_api.registry + + orig = set(registries) + + for reg in registry._recurse_with_dependencies(registries): + has_skip = False + + for mapper in reg._mappers_to_configure(): + run_configure = None + + for fn in mapper.dispatch.before_mapper_configured: + run_configure = fn(mapper, mapper.class_) + if run_configure is EXT_SKIP: + has_skip = True + break + if run_configure is EXT_SKIP: + continue + + if getattr(mapper, "_configure_failed", False): + e = sa_exc.InvalidRequestError( + "One or more mappers failed to initialize - " + "can't proceed with initialization of other " + "mappers. Triggering mapper: '%s'. " + "Original exception was: %s" + % (mapper, mapper._configure_failed) + ) + e._configure_failed = mapper._configure_failed # type: ignore + raise e + + if not mapper.configured: + try: + mapper._post_configure_properties() + mapper._expire_memoizations() + mapper.dispatch.mapper_configured(mapper, mapper.class_) + except Exception: + exc = sys.exc_info()[1] + if not hasattr(exc, "_configure_failed"): + mapper._configure_failed = exc + raise + if not has_skip: + reg._new_mappers = False + + if not cascade and reg._dependencies.difference(orig): + raise sa_exc.InvalidRequestError( + "configure was called with cascade=False but " + "additional registries remain" + ) + + +@util.preload_module("sqlalchemy.orm.decl_api") +def _dispose_registries(registries: Set[_RegistryType], cascade: bool) -> None: + registry = util.preloaded.orm_decl_api.registry + + orig = set(registries) + + for reg in registry._recurse_with_dependents(registries): + if not cascade and reg._dependents.difference(orig): + raise sa_exc.InvalidRequestError( + "Registry has dependent registries that are not disposed; " + "pass cascade=True to clear these also" + ) + + while reg._managers: + try: + manager, _ = reg._managers.popitem() + except KeyError: + # guard against race between while and popitem + pass + else: + reg._dispose_manager_and_mapper(manager) + + reg._non_primary_mappers.clear() + reg._dependents.clear() + for dep in reg._dependencies: + dep._dependents.discard(reg) + reg._dependencies.clear() + # this wasn't done in the 1.3 clear_mappers() and in fact it + # was a bug, as it could cause configure_mappers() to invoke + # the "before_configured" event even though mappers had all been + # disposed. + reg._new_mappers = False + + +def reconstructor(fn): + """Decorate a method as the 'reconstructor' hook. + + Designates a single method as the "reconstructor", an ``__init__``-like + method that will be called by the ORM after the instance has been + loaded from the database or otherwise reconstituted. + + .. tip:: + + The :func:`_orm.reconstructor` decorator makes use of the + :meth:`_orm.InstanceEvents.load` event hook, which can be + used directly. + + The reconstructor will be invoked with no arguments. Scalar + (non-collection) database-mapped attributes of the instance will + be available for use within the function. Eagerly-loaded + collections are generally not yet available and will usually only + contain the first element. ORM state changes made to objects at + this stage will not be recorded for the next flush() operation, so + the activity within a reconstructor should be conservative. + + .. seealso:: + + :meth:`.InstanceEvents.load` + + """ + fn.__sa_reconstructor__ = True + return fn + + +def validates( + *names: str, include_removes: bool = False, include_backrefs: bool = True +) -> Callable[[_Fn], _Fn]: + r"""Decorate a method as a 'validator' for one or more named properties. + + Designates a method as a validator, a method which receives the + name of the attribute as well as a value to be assigned, or in the + case of a collection, the value to be added to the collection. + The function can then raise validation exceptions to halt the + process from continuing (where Python's built-in ``ValueError`` + and ``AssertionError`` exceptions are reasonable choices), or can + modify or replace the value before proceeding. The function should + otherwise return the given value. + + Note that a validator for a collection **cannot** issue a load of that + collection within the validation routine - this usage raises + an assertion to avoid recursion overflows. This is a reentrant + condition which is not supported. + + :param \*names: list of attribute names to be validated. + :param include_removes: if True, "remove" events will be + sent as well - the validation function must accept an additional + argument "is_remove" which will be a boolean. + + :param include_backrefs: defaults to ``True``; if ``False``, the + validation function will not emit if the originator is an attribute + event related via a backref. This can be used for bi-directional + :func:`.validates` usage where only one validator should emit per + attribute operation. + + .. versionchanged:: 2.0.16 This paramter inadvertently defaulted to + ``False`` for releases 2.0.0 through 2.0.15. Its correct default + of ``True`` is restored in 2.0.16. + + .. seealso:: + + :ref:`simple_validators` - usage examples for :func:`.validates` + + """ + + def wrap(fn: _Fn) -> _Fn: + fn.__sa_validators__ = names # type: ignore[attr-defined] + fn.__sa_validation_opts__ = { # type: ignore[attr-defined] + "include_removes": include_removes, + "include_backrefs": include_backrefs, + } + return fn + + return wrap + + +def _event_on_load(state, ctx): + instrumenting_mapper = state.manager.mapper + + if instrumenting_mapper._reconstructor: + instrumenting_mapper._reconstructor(state.obj()) + + +def _event_on_init(state, args, kwargs): + """Run init_instance hooks. + + This also includes mapper compilation, normally not needed + here but helps with some piecemeal configuration + scenarios (such as in the ORM tutorial). + + """ + + instrumenting_mapper = state.manager.mapper + if instrumenting_mapper: + instrumenting_mapper._check_configure() + if instrumenting_mapper._set_polymorphic_identity: + instrumenting_mapper._set_polymorphic_identity(state) + + +class _ColumnMapping(Dict["ColumnElement[Any]", "MapperProperty[Any]"]): + """Error reporting helper for mapper._columntoproperty.""" + + __slots__ = ("mapper",) + + def __init__(self, mapper): + # TODO: weakref would be a good idea here + self.mapper = mapper + + def __missing__(self, column): + prop = self.mapper._props.get(column) + if prop: + raise orm_exc.UnmappedColumnError( + "Column '%s.%s' is not available, due to " + "conflicting property '%s':%r" + % (column.table.name, column.name, column.key, prop) + ) + raise orm_exc.UnmappedColumnError( + "No column %s is configured on mapper %s..." + % (column, self.mapper) + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py new file mode 100644 index 00000000..4ee8ac71 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py @@ -0,0 +1,811 @@ +# orm/path_registry.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +"""Path tracking utilities, representing mapper graph traversals. + +""" + +from __future__ import annotations + +from functools import reduce +from itertools import chain +import logging +import operator +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterator +from typing import List +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from . import base as orm_base +from ._typing import insp_is_mapper_property +from .. import exc +from .. import util +from ..sql import visitors +from ..sql.cache_key import HasCacheKey + +if TYPE_CHECKING: + from ._typing import _InternalEntityType + from .interfaces import StrategizedProperty + from .mapper import Mapper + from .relationships import RelationshipProperty + from .util import AliasedInsp + from ..sql.cache_key import _CacheKeyTraversalType + from ..sql.elements import BindParameter + from ..sql.visitors import anon_map + from ..util.typing import _LiteralStar + from ..util.typing import TypeGuard + + def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: ... + + def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ... + +else: + is_root = operator.attrgetter("is_root") + is_entity = operator.attrgetter("is_entity") + + +_SerializedPath = List[Any] +_StrPathToken = str +_PathElementType = Union[ + _StrPathToken, "_InternalEntityType[Any]", "StrategizedProperty[Any]" +] + +# the representation is in fact +# a tuple with alternating: +# [_InternalEntityType[Any], Union[str, StrategizedProperty[Any]], +# _InternalEntityType[Any], Union[str, StrategizedProperty[Any]], ...] +# this might someday be a tuple of 2-tuples instead, but paths can be +# chopped at odd intervals as well so this is less flexible +_PathRepresentation = Tuple[_PathElementType, ...] + +# NOTE: these names are weird since the array is 0-indexed, +# the "_Odd" entries are at 0, 2, 4, etc +_OddPathRepresentation = Sequence["_InternalEntityType[Any]"] +_EvenPathRepresentation = Sequence[Union["StrategizedProperty[Any]", str]] + + +log = logging.getLogger(__name__) + + +def _unreduce_path(path: _SerializedPath) -> PathRegistry: + return PathRegistry.deserialize(path) + + +_WILDCARD_TOKEN: _LiteralStar = "*" +_DEFAULT_TOKEN = "_sa_default" + + +class PathRegistry(HasCacheKey): + """Represent query load paths and registry functions. + + Basically represents structures like: + + (, "orders", , "items", ) + + These structures are generated by things like + query options (joinedload(), subqueryload(), etc.) and are + used to compose keys stored in the query._attributes dictionary + for various options. + + They are then re-composed at query compile/result row time as + the query is formed and as rows are fetched, where they again + serve to compose keys to look up options in the context.attributes + dictionary, which is copied from query._attributes. + + The path structure has a limited amount of caching, where each + "root" ultimately pulls from a fixed registry associated with + the first mapper, that also contains elements for each of its + property keys. However paths longer than two elements, which + are the exception rather than the rule, are generated on an + as-needed basis. + + """ + + __slots__ = () + + is_token = False + is_root = False + has_entity = False + is_property = False + is_entity = False + + is_unnatural: bool + + path: _PathRepresentation + natural_path: _PathRepresentation + parent: Optional[PathRegistry] + root: RootRegistry + + _cache_key_traversal: _CacheKeyTraversalType = [ + ("path", visitors.ExtendedInternalTraversal.dp_has_cache_key_list) + ] + + def __eq__(self, other: Any) -> bool: + try: + return other is not None and self.path == other._path_for_compare + except AttributeError: + util.warn( + "Comparison of PathRegistry to %r is not supported" + % (type(other)) + ) + return False + + def __ne__(self, other: Any) -> bool: + try: + return other is None or self.path != other._path_for_compare + except AttributeError: + util.warn( + "Comparison of PathRegistry to %r is not supported" + % (type(other)) + ) + return True + + @property + def _path_for_compare(self) -> Optional[_PathRepresentation]: + return self.path + + def odd_element(self, index: int) -> _InternalEntityType[Any]: + return self.path[index] # type: ignore + + def set(self, attributes: Dict[Any, Any], key: Any, value: Any) -> None: + log.debug("set '%s' on path '%s' to '%s'", key, self, value) + attributes[(key, self.natural_path)] = value + + def setdefault( + self, attributes: Dict[Any, Any], key: Any, value: Any + ) -> None: + log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value) + attributes.setdefault((key, self.natural_path), value) + + def get( + self, attributes: Dict[Any, Any], key: Any, value: Optional[Any] = None + ) -> Any: + key = (key, self.natural_path) + if key in attributes: + return attributes[key] + else: + return value + + def __len__(self) -> int: + return len(self.path) + + def __hash__(self) -> int: + return id(self) + + @overload + def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: ... + + @overload + def __getitem__(self, entity: int) -> _PathElementType: ... + + @overload + def __getitem__(self, entity: slice) -> _PathRepresentation: ... + + @overload + def __getitem__( + self, entity: _InternalEntityType[Any] + ) -> AbstractEntityRegistry: ... + + @overload + def __getitem__( + self, entity: StrategizedProperty[Any] + ) -> PropRegistry: ... + + def __getitem__( + self, + entity: Union[ + _StrPathToken, + int, + slice, + _InternalEntityType[Any], + StrategizedProperty[Any], + ], + ) -> Union[ + TokenRegistry, + _PathElementType, + _PathRepresentation, + PropRegistry, + AbstractEntityRegistry, + ]: + raise NotImplementedError() + + # TODO: what are we using this for? + @property + def length(self) -> int: + return len(self.path) + + def pairs( + self, + ) -> Iterator[ + Tuple[_InternalEntityType[Any], Union[str, StrategizedProperty[Any]]] + ]: + odd_path = cast(_OddPathRepresentation, self.path) + even_path = cast(_EvenPathRepresentation, odd_path) + for i in range(0, len(odd_path), 2): + yield odd_path[i], even_path[i + 1] + + def contains_mapper(self, mapper: Mapper[Any]) -> bool: + _m_path = cast(_OddPathRepresentation, self.path) + for path_mapper in [_m_path[i] for i in range(0, len(_m_path), 2)]: + if path_mapper.mapper.isa(mapper): + return True + else: + return False + + def contains(self, attributes: Dict[Any, Any], key: Any) -> bool: + return (key, self.path) in attributes + + def __reduce__(self) -> Any: + return _unreduce_path, (self.serialize(),) + + @classmethod + def _serialize_path(cls, path: _PathRepresentation) -> _SerializedPath: + _m_path = cast(_OddPathRepresentation, path) + _p_path = cast(_EvenPathRepresentation, path) + + return list( + zip( + tuple( + m.class_ if (m.is_mapper or m.is_aliased_class) else str(m) + for m in [_m_path[i] for i in range(0, len(_m_path), 2)] + ), + tuple( + p.key if insp_is_mapper_property(p) else str(p) + for p in [_p_path[i] for i in range(1, len(_p_path), 2)] + ) + + (None,), + ) + ) + + @classmethod + def _deserialize_path(cls, path: _SerializedPath) -> _PathRepresentation: + def _deserialize_mapper_token(mcls: Any) -> Any: + return ( + # note: we likely dont want configure=True here however + # this is maintained at the moment for backwards compatibility + orm_base._inspect_mapped_class(mcls, configure=True) + if mcls not in PathToken._intern + else PathToken._intern[mcls] + ) + + def _deserialize_key_token(mcls: Any, key: Any) -> Any: + if key is None: + return None + elif key in PathToken._intern: + return PathToken._intern[key] + else: + mp = orm_base._inspect_mapped_class(mcls, configure=True) + assert mp is not None + return mp.attrs[key] + + p = tuple( + chain( + *[ + ( + _deserialize_mapper_token(mcls), + _deserialize_key_token(mcls, key), + ) + for mcls, key in path + ] + ) + ) + if p and p[-1] is None: + p = p[0:-1] + return p + + def serialize(self) -> _SerializedPath: + path = self.path + return self._serialize_path(path) + + @classmethod + def deserialize(cls, path: _SerializedPath) -> PathRegistry: + assert path is not None + p = cls._deserialize_path(path) + return cls.coerce(p) + + @overload + @classmethod + def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: ... + + @overload + @classmethod + def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: ... + + @classmethod + def per_mapper( + cls, mapper: _InternalEntityType[Any] + ) -> AbstractEntityRegistry: + if mapper.is_mapper: + return CachingEntityRegistry(cls.root, mapper) + else: + return SlotsEntityRegistry(cls.root, mapper) + + @classmethod + def coerce(cls, raw: _PathRepresentation) -> PathRegistry: + def _red(prev: PathRegistry, next_: _PathElementType) -> PathRegistry: + return prev[next_] + + # can't quite get mypy to appreciate this one :) + return reduce(_red, raw, cls.root) # type: ignore + + def __add__(self, other: PathRegistry) -> PathRegistry: + def _red(prev: PathRegistry, next_: _PathElementType) -> PathRegistry: + return prev[next_] + + return reduce(_red, other.path, self) + + def __str__(self) -> str: + return f"ORM Path[{' -> '.join(str(elem) for elem in self.path)}]" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.path!r})" + + +class CreatesToken(PathRegistry): + __slots__ = () + + is_aliased_class: bool + is_root: bool + + def token(self, token: _StrPathToken) -> TokenRegistry: + if token.endswith(f":{_WILDCARD_TOKEN}"): + return TokenRegistry(self, token) + elif token.endswith(f":{_DEFAULT_TOKEN}"): + return TokenRegistry(self.root, token) + else: + raise exc.ArgumentError(f"invalid token: {token}") + + +class RootRegistry(CreatesToken): + """Root registry, defers to mappers so that + paths are maintained per-root-mapper. + + """ + + __slots__ = () + + inherit_cache = True + + path = natural_path = () + has_entity = False + is_aliased_class = False + is_root = True + is_unnatural = False + + def _getitem( + self, entity: Any + ) -> Union[TokenRegistry, AbstractEntityRegistry]: + if entity in PathToken._intern: + if TYPE_CHECKING: + assert isinstance(entity, _StrPathToken) + return TokenRegistry(self, PathToken._intern[entity]) + else: + try: + return entity._path_registry # type: ignore + except AttributeError: + raise IndexError( + f"invalid argument for RootRegistry.__getitem__: {entity}" + ) + + def _truncate_recursive(self) -> RootRegistry: + return self + + if not TYPE_CHECKING: + __getitem__ = _getitem + + +PathRegistry.root = RootRegistry() + + +class PathToken(orm_base.InspectionAttr, HasCacheKey, str): + """cacheable string token""" + + _intern: Dict[str, PathToken] = {} + + def _gen_cache_key( + self, anon_map: anon_map, bindparams: List[BindParameter[Any]] + ) -> Tuple[Any, ...]: + return (str(self),) + + @property + def _path_for_compare(self) -> Optional[_PathRepresentation]: + return None + + @classmethod + def intern(cls, strvalue: str) -> PathToken: + if strvalue in cls._intern: + return cls._intern[strvalue] + else: + cls._intern[strvalue] = result = PathToken(strvalue) + return result + + +class TokenRegistry(PathRegistry): + __slots__ = ("token", "parent", "path", "natural_path") + + inherit_cache = True + + token: _StrPathToken + parent: CreatesToken + + def __init__(self, parent: CreatesToken, token: _StrPathToken): + token = PathToken.intern(token) + + self.token = token + self.parent = parent + self.path = parent.path + (token,) + self.natural_path = parent.natural_path + (token,) + + has_entity = False + + is_token = True + + def generate_for_superclasses(self) -> Iterator[PathRegistry]: + # NOTE: this method is no longer used. consider removal + parent = self.parent + if is_root(parent): + yield self + return + + if TYPE_CHECKING: + assert isinstance(parent, AbstractEntityRegistry) + if not parent.is_aliased_class: + for mp_ent in parent.mapper.iterate_to_root(): + yield TokenRegistry(parent.parent[mp_ent], self.token) + elif ( + parent.is_aliased_class + and cast( + "AliasedInsp[Any]", + parent.entity, + )._is_with_polymorphic + ): + yield self + for ent in cast( + "AliasedInsp[Any]", parent.entity + )._with_polymorphic_entities: + yield TokenRegistry(parent.parent[ent], self.token) + else: + yield self + + def _generate_natural_for_superclasses( + self, + ) -> Iterator[_PathRepresentation]: + parent = self.parent + if is_root(parent): + yield self.natural_path + return + + if TYPE_CHECKING: + assert isinstance(parent, AbstractEntityRegistry) + for mp_ent in parent.mapper.iterate_to_root(): + yield TokenRegistry(parent.parent[mp_ent], self.token).natural_path + if ( + parent.is_aliased_class + and cast( + "AliasedInsp[Any]", + parent.entity, + )._is_with_polymorphic + ): + yield self.natural_path + for ent in cast( + "AliasedInsp[Any]", parent.entity + )._with_polymorphic_entities: + yield ( + TokenRegistry(parent.parent[ent], self.token).natural_path + ) + else: + yield self.natural_path + + def _getitem(self, entity: Any) -> Any: + try: + return self.path[entity] + except TypeError as err: + raise IndexError(f"{entity}") from err + + if not TYPE_CHECKING: + __getitem__ = _getitem + + +class PropRegistry(PathRegistry): + __slots__ = ( + "prop", + "parent", + "path", + "natural_path", + "has_entity", + "entity", + "mapper", + "_wildcard_path_loader_key", + "_default_path_loader_key", + "_loader_key", + "is_unnatural", + ) + inherit_cache = True + is_property = True + + prop: StrategizedProperty[Any] + mapper: Optional[Mapper[Any]] + entity: Optional[_InternalEntityType[Any]] + + def __init__( + self, parent: AbstractEntityRegistry, prop: StrategizedProperty[Any] + ): + + # restate this path in terms of the + # given StrategizedProperty's parent. + insp = cast("_InternalEntityType[Any]", parent[-1]) + natural_parent: AbstractEntityRegistry = parent + + # inherit "is_unnatural" from the parent + self.is_unnatural = parent.parent.is_unnatural or bool( + parent.mapper.inherits + ) + + if not insp.is_aliased_class or insp._use_mapper_path: # type: ignore + parent = natural_parent = parent.parent[prop.parent] + elif ( + insp.is_aliased_class + and insp.with_polymorphic_mappers + and prop.parent in insp.with_polymorphic_mappers + ): + subclass_entity: _InternalEntityType[Any] = parent[-1]._entity_for_mapper(prop.parent) # type: ignore # noqa: E501 + parent = parent.parent[subclass_entity] + + # when building a path where with_polymorphic() is in use, + # special logic to determine the "natural path" when subclass + # entities are used. + # + # here we are trying to distinguish between a path that starts + # on a the with_polymorhpic entity vs. one that starts on a + # normal entity that introduces a with_polymorphic() in the + # middle using of_type(): + # + # # as in test_polymorphic_rel-> + # # test_subqueryload_on_subclass_uses_path_correctly + # wp = with_polymorphic(RegularEntity, "*") + # sess.query(wp).options(someload(wp.SomeSubEntity.foos)) + # + # vs + # + # # as in test_relationship->JoinedloadWPolyOfTypeContinued + # wp = with_polymorphic(SomeFoo, "*") + # sess.query(RegularEntity).options( + # someload(RegularEntity.foos.of_type(wp)) + # .someload(wp.SubFoo.bar) + # ) + # + # in the former case, the Query as it generates a path that we + # want to match will be in terms of the with_polymorphic at the + # beginning. in the latter case, Query will generate simple + # paths that don't know about this with_polymorphic, so we must + # use a separate natural path. + # + # + if parent.parent: + natural_parent = parent.parent[subclass_entity.mapper] + self.is_unnatural = True + else: + natural_parent = parent + elif ( + natural_parent.parent + and insp.is_aliased_class + and prop.parent # this should always be the case here + is not insp.mapper + and insp.mapper.isa(prop.parent) + ): + natural_parent = parent.parent[prop.parent] + + self.prop = prop + self.parent = parent + self.path = parent.path + (prop,) + self.natural_path = natural_parent.natural_path + (prop,) + + self.has_entity = prop._links_to_entity + if prop._is_relationship: + if TYPE_CHECKING: + assert isinstance(prop, RelationshipProperty) + self.entity = prop.entity + self.mapper = prop.mapper + else: + self.entity = None + self.mapper = None + + self._wildcard_path_loader_key = ( + "loader", + parent.natural_path + self.prop._wildcard_token, + ) + self._default_path_loader_key = self.prop._default_path_loader_key + self._loader_key = ("loader", self.natural_path) + + def _truncate_recursive(self) -> PropRegistry: + earliest = None + for i, token in enumerate(reversed(self.path[:-1])): + if token is self.prop: + earliest = i + + if earliest is None: + return self + else: + return self.coerce(self.path[0 : -(earliest + 1)]) # type: ignore + + @property + def entity_path(self) -> AbstractEntityRegistry: + assert self.entity is not None + return self[self.entity] + + def _getitem( + self, entity: Union[int, slice, _InternalEntityType[Any]] + ) -> Union[AbstractEntityRegistry, _PathElementType, _PathRepresentation]: + if isinstance(entity, (int, slice)): + return self.path[entity] + else: + return SlotsEntityRegistry(self, entity) + + if not TYPE_CHECKING: + __getitem__ = _getitem + + +class AbstractEntityRegistry(CreatesToken): + __slots__ = ( + "key", + "parent", + "is_aliased_class", + "path", + "entity", + "natural_path", + ) + + has_entity = True + is_entity = True + + parent: Union[RootRegistry, PropRegistry] + key: _InternalEntityType[Any] + entity: _InternalEntityType[Any] + is_aliased_class: bool + + def __init__( + self, + parent: Union[RootRegistry, PropRegistry], + entity: _InternalEntityType[Any], + ): + self.key = entity + self.parent = parent + self.is_aliased_class = entity.is_aliased_class + self.entity = entity + self.path = parent.path + (entity,) + + # the "natural path" is the path that we get when Query is traversing + # from the lead entities into the various relationships; it corresponds + # to the structure of mappers and relationships. when we are given a + # path that comes from loader options, as of 1.3 it can have ac-hoc + # with_polymorphic() and other AliasedInsp objects inside of it, which + # are usually not present in mappings. So here we track both the + # "enhanced" path in self.path and the "natural" path that doesn't + # include those objects so these two traversals can be matched up. + + # the test here for "(self.is_aliased_class or parent.is_unnatural)" + # are to avoid the more expensive conditional logic that follows if we + # know we don't have to do it. This conditional can just as well be + # "if parent.path:", it just is more function calls. + # + # This is basically the only place that the "is_unnatural" flag + # actually changes behavior. + if parent.path and (self.is_aliased_class or parent.is_unnatural): + # this is an infrequent code path used only for loader strategies + # that also make use of of_type(). + if entity.mapper.isa(parent.natural_path[-1].mapper): # type: ignore # noqa: E501 + self.natural_path = parent.natural_path + (entity.mapper,) + else: + self.natural_path = parent.natural_path + ( + parent.natural_path[-1].entity, # type: ignore + ) + # it seems to make sense that since these paths get mixed up + # with statements that are cached or not, we should make + # sure the natural path is cacheable across different occurrences + # of equivalent AliasedClass objects. however, so far this + # does not seem to be needed for whatever reason. + # elif not parent.path and self.is_aliased_class: + # self.natural_path = (self.entity._generate_cache_key()[0], ) + else: + self.natural_path = self.path + + def _truncate_recursive(self) -> AbstractEntityRegistry: + return self.parent._truncate_recursive()[self.entity] + + @property + def root_entity(self) -> _InternalEntityType[Any]: + return self.odd_element(0) + + @property + def entity_path(self) -> PathRegistry: + return self + + @property + def mapper(self) -> Mapper[Any]: + return self.entity.mapper + + def __bool__(self) -> bool: + return True + + def _getitem( + self, entity: Any + ) -> Union[_PathElementType, _PathRepresentation, PathRegistry]: + if isinstance(entity, (int, slice)): + return self.path[entity] + elif entity in PathToken._intern: + return TokenRegistry(self, PathToken._intern[entity]) + else: + return PropRegistry(self, entity) + + if not TYPE_CHECKING: + __getitem__ = _getitem + + +class SlotsEntityRegistry(AbstractEntityRegistry): + # for aliased class, return lightweight, no-cycles created + # version + inherit_cache = True + + +class _ERDict(Dict[Any, Any]): + def __init__(self, registry: CachingEntityRegistry): + self.registry = registry + + def __missing__(self, key: Any) -> PropRegistry: + self[key] = item = PropRegistry(self.registry, key) + + return item + + +class CachingEntityRegistry(AbstractEntityRegistry): + # for long lived mapper, return dict based caching + # version that creates reference cycles + + __slots__ = ("_cache",) + + inherit_cache = True + + def __init__( + self, + parent: Union[RootRegistry, PropRegistry], + entity: _InternalEntityType[Any], + ): + super().__init__(parent, entity) + self._cache = _ERDict(self) + + def pop(self, key: Any, default: Any) -> Any: + return self._cache.pop(key, default) + + def _getitem(self, entity: Any) -> Any: + if isinstance(entity, (int, slice)): + return self.path[entity] + elif isinstance(entity, PathToken): + return TokenRegistry(self, entity) + else: + return self._cache[entity] + + if not TYPE_CHECKING: + __getitem__ = _getitem + + +if TYPE_CHECKING: + + def path_is_entity( + path: PathRegistry, + ) -> TypeGuard[AbstractEntityRegistry]: ... + + def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: ... + +else: + path_is_entity = operator.attrgetter("is_entity") + path_is_property = operator.attrgetter("is_property") diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py new file mode 100644 index 00000000..369fc599 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py @@ -0,0 +1,1782 @@ +# orm/persistence.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""private module containing functions used to emit INSERT, UPDATE +and DELETE statements on behalf of a :class:`_orm.Mapper` and its descending +mappers. + +The functions here are called only by the unit of work functions +in unitofwork.py. + +""" +from __future__ import annotations + +from itertools import chain +from itertools import groupby +from itertools import zip_longest +import operator + +from . import attributes +from . import exc as orm_exc +from . import loading +from . import sync +from .base import state_str +from .. import exc as sa_exc +from .. import future +from .. import sql +from .. import util +from ..engine import cursor as _cursor +from ..sql import operators +from ..sql.elements import BooleanClauseList +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL + + +def save_obj(base_mapper, states, uowtransaction, single=False): + """Issue ``INSERT`` and/or ``UPDATE`` statements for a list + of objects. + + This is called within the context of a UOWTransaction during a + flush operation, given a list of states to be flushed. The + base mapper in an inheritance hierarchy handles the inserts/ + updates for all descendant mappers. + + """ + + # if batch=false, call _save_obj separately for each object + if not single and not base_mapper.batch: + for state in _sort_states(base_mapper, states): + save_obj(base_mapper, [state], uowtransaction, single=True) + return + + states_to_update = [] + states_to_insert = [] + + for ( + state, + dict_, + mapper, + connection, + has_identity, + row_switch, + update_version_id, + ) in _organize_states_for_save(base_mapper, states, uowtransaction): + if has_identity or row_switch: + states_to_update.append( + (state, dict_, mapper, connection, update_version_id) + ) + else: + states_to_insert.append((state, dict_, mapper, connection)) + + for table, mapper in base_mapper._sorted_tables.items(): + if table not in mapper._pks_by_table: + continue + insert = _collect_insert_commands(table, states_to_insert) + + update = _collect_update_commands( + uowtransaction, table, states_to_update + ) + + _emit_update_statements( + base_mapper, + uowtransaction, + mapper, + table, + update, + ) + + _emit_insert_statements( + base_mapper, + uowtransaction, + mapper, + table, + insert, + ) + + _finalize_insert_update_commands( + base_mapper, + uowtransaction, + chain( + ( + (state, state_dict, mapper, connection, False) + for (state, state_dict, mapper, connection) in states_to_insert + ), + ( + (state, state_dict, mapper, connection, True) + for ( + state, + state_dict, + mapper, + connection, + update_version_id, + ) in states_to_update + ), + ), + ) + + +def post_update(base_mapper, states, uowtransaction, post_update_cols): + """Issue UPDATE statements on behalf of a relationship() which + specifies post_update. + + """ + + states_to_update = list( + _organize_states_for_post_update(base_mapper, states, uowtransaction) + ) + + for table, mapper in base_mapper._sorted_tables.items(): + if table not in mapper._pks_by_table: + continue + + update = ( + ( + state, + state_dict, + sub_mapper, + connection, + ( + mapper._get_committed_state_attr_by_column( + state, state_dict, mapper.version_id_col + ) + if mapper.version_id_col is not None + else None + ), + ) + for state, state_dict, sub_mapper, connection in states_to_update + if table in sub_mapper._pks_by_table + ) + + update = _collect_post_update_commands( + base_mapper, uowtransaction, table, update, post_update_cols + ) + + _emit_post_update_statements( + base_mapper, + uowtransaction, + mapper, + table, + update, + ) + + +def delete_obj(base_mapper, states, uowtransaction): + """Issue ``DELETE`` statements for a list of objects. + + This is called within the context of a UOWTransaction during a + flush operation. + + """ + + states_to_delete = list( + _organize_states_for_delete(base_mapper, states, uowtransaction) + ) + + table_to_mapper = base_mapper._sorted_tables + + for table in reversed(list(table_to_mapper.keys())): + mapper = table_to_mapper[table] + if table not in mapper._pks_by_table: + continue + elif mapper.inherits and mapper.passive_deletes: + continue + + delete = _collect_delete_commands( + base_mapper, uowtransaction, table, states_to_delete + ) + + _emit_delete_statements( + base_mapper, + uowtransaction, + mapper, + table, + delete, + ) + + for ( + state, + state_dict, + mapper, + connection, + update_version_id, + ) in states_to_delete: + mapper.dispatch.after_delete(mapper, connection, state) + + +def _organize_states_for_save(base_mapper, states, uowtransaction): + """Make an initial pass across a set of states for INSERT or + UPDATE. + + This includes splitting out into distinct lists for + each, calling before_insert/before_update, obtaining + key information for each state including its dictionary, + mapper, the connection to use for the execution per state, + and the identity flag. + + """ + + for state, dict_, mapper, connection in _connections_for_states( + base_mapper, uowtransaction, states + ): + has_identity = bool(state.key) + + instance_key = state.key or mapper._identity_key_from_state(state) + + row_switch = update_version_id = None + + # call before_XXX extensions + if not has_identity: + mapper.dispatch.before_insert(mapper, connection, state) + else: + mapper.dispatch.before_update(mapper, connection, state) + + if mapper._validate_polymorphic_identity: + mapper._validate_polymorphic_identity(mapper, state, dict_) + + # detect if we have a "pending" instance (i.e. has + # no instance_key attached to it), and another instance + # with the same identity key already exists as persistent. + # convert to an UPDATE if so. + if ( + not has_identity + and instance_key in uowtransaction.session.identity_map + ): + instance = uowtransaction.session.identity_map[instance_key] + existing = attributes.instance_state(instance) + + if not uowtransaction.was_already_deleted(existing): + if not uowtransaction.is_deleted(existing): + util.warn( + "New instance %s with identity key %s conflicts " + "with persistent instance %s" + % (state_str(state), instance_key, state_str(existing)) + ) + else: + base_mapper._log_debug( + "detected row switch for identity %s. " + "will update %s, remove %s from " + "transaction", + instance_key, + state_str(state), + state_str(existing), + ) + + # remove the "delete" flag from the existing element + uowtransaction.remove_state_actions(existing) + row_switch = existing + + if (has_identity or row_switch) and mapper.version_id_col is not None: + update_version_id = mapper._get_committed_state_attr_by_column( + row_switch if row_switch else state, + row_switch.dict if row_switch else dict_, + mapper.version_id_col, + ) + + yield ( + state, + dict_, + mapper, + connection, + has_identity, + row_switch, + update_version_id, + ) + + +def _organize_states_for_post_update(base_mapper, states, uowtransaction): + """Make an initial pass across a set of states for UPDATE + corresponding to post_update. + + This includes obtaining key information for each state + including its dictionary, mapper, the connection to use for + the execution per state. + + """ + return _connections_for_states(base_mapper, uowtransaction, states) + + +def _organize_states_for_delete(base_mapper, states, uowtransaction): + """Make an initial pass across a set of states for DELETE. + + This includes calling out before_delete and obtaining + key information for each state including its dictionary, + mapper, the connection to use for the execution per state. + + """ + for state, dict_, mapper, connection in _connections_for_states( + base_mapper, uowtransaction, states + ): + mapper.dispatch.before_delete(mapper, connection, state) + + if mapper.version_id_col is not None: + update_version_id = mapper._get_committed_state_attr_by_column( + state, dict_, mapper.version_id_col + ) + else: + update_version_id = None + + yield (state, dict_, mapper, connection, update_version_id) + + +def _collect_insert_commands( + table, + states_to_insert, + *, + bulk=False, + return_defaults=False, + render_nulls=False, + include_bulk_keys=(), +): + """Identify sets of values to use in INSERT statements for a + list of states. + + """ + for state, state_dict, mapper, connection in states_to_insert: + if table not in mapper._pks_by_table: + continue + + params = {} + value_params = {} + + propkey_to_col = mapper._propkey_to_col[table] + + eval_none = mapper._insert_cols_evaluating_none[table] + + for propkey in set(propkey_to_col).intersection(state_dict): + value = state_dict[propkey] + col = propkey_to_col[propkey] + if value is None and col not in eval_none and not render_nulls: + continue + elif not bulk and ( + hasattr(value, "__clause_element__") + or isinstance(value, sql.ClauseElement) + ): + value_params[col] = ( + value.__clause_element__() + if hasattr(value, "__clause_element__") + else value + ) + else: + params[col.key] = value + + if not bulk: + # for all the columns that have no default and we don't have + # a value and where "None" is not a special value, add + # explicit None to the INSERT. This is a legacy behavior + # which might be worth removing, as it should not be necessary + # and also produces confusion, given that "missing" and None + # now have distinct meanings + for colkey in ( + mapper._insert_cols_as_none[table] + .difference(params) + .difference([c.key for c in value_params]) + ): + params[colkey] = None + + if not bulk or return_defaults: + # params are in terms of Column key objects, so + # compare to pk_keys_by_table + has_all_pks = mapper._pk_keys_by_table[table].issubset(params) + + if mapper.base_mapper._prefer_eager_defaults( + connection.dialect, table + ): + has_all_defaults = mapper._server_default_col_keys[ + table + ].issubset(params) + else: + has_all_defaults = True + else: + has_all_defaults = has_all_pks = True + + if ( + mapper.version_id_generator is not False + and mapper.version_id_col is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ): + params[mapper.version_id_col.key] = mapper.version_id_generator( + None + ) + + if bulk: + if mapper._set_polymorphic_identity: + params.setdefault( + mapper._polymorphic_attr_key, mapper.polymorphic_identity + ) + + if include_bulk_keys: + params.update((k, state_dict[k]) for k in include_bulk_keys) + + yield ( + state, + state_dict, + params, + mapper, + connection, + value_params, + has_all_pks, + has_all_defaults, + ) + + +def _collect_update_commands( + uowtransaction, + table, + states_to_update, + *, + bulk=False, + use_orm_update_stmt=None, + include_bulk_keys=(), +): + """Identify sets of values to use in UPDATE statements for a + list of states. + + This function works intricately with the history system + to determine exactly what values should be updated + as well as how the row should be matched within an UPDATE + statement. Includes some tricky scenarios where the primary + key of an object might have been changed. + + """ + + for ( + state, + state_dict, + mapper, + connection, + update_version_id, + ) in states_to_update: + if table not in mapper._pks_by_table: + continue + + pks = mapper._pks_by_table[table] + + if use_orm_update_stmt is not None: + # TODO: ordered values, etc + value_params = use_orm_update_stmt._values + else: + value_params = {} + + propkey_to_col = mapper._propkey_to_col[table] + + if bulk: + # keys here are mapped attribute keys, so + # look at mapper attribute keys for pk + params = { + propkey_to_col[propkey].key: state_dict[propkey] + for propkey in set(propkey_to_col) + .intersection(state_dict) + .difference(mapper._pk_attr_keys_by_table[table]) + } + has_all_defaults = True + else: + params = {} + for propkey in set(propkey_to_col).intersection( + state.committed_state + ): + value = state_dict[propkey] + col = propkey_to_col[propkey] + + if hasattr(value, "__clause_element__") or isinstance( + value, sql.ClauseElement + ): + value_params[col] = ( + value.__clause_element__() + if hasattr(value, "__clause_element__") + else value + ) + # guard against values that generate non-__nonzero__ + # objects for __eq__() + elif ( + state.manager[propkey].impl.is_equal( + value, state.committed_state[propkey] + ) + is not True + ): + params[col.key] = value + + if mapper.base_mapper.eager_defaults is True: + has_all_defaults = ( + mapper._server_onupdate_default_col_keys[table] + ).issubset(params) + else: + has_all_defaults = True + + if ( + update_version_id is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ): + if not bulk and not (params or value_params): + # HACK: check for history in other tables, in case the + # history is only in a different table than the one + # where the version_id_col is. This logic was lost + # from 0.9 -> 1.0.0 and restored in 1.0.6. + for prop in mapper._columntoproperty.values(): + history = state.manager[prop.key].impl.get_history( + state, state_dict, attributes.PASSIVE_NO_INITIALIZE + ) + if history.added: + break + else: + # no net change, break + continue + + col = mapper.version_id_col + no_params = not params and not value_params + params[col._label] = update_version_id + + if ( + bulk or col.key not in params + ) and mapper.version_id_generator is not False: + val = mapper.version_id_generator(update_version_id) + params[col.key] = val + elif mapper.version_id_generator is False and no_params: + # no version id generator, no values set on the table, + # and version id wasn't manually incremented. + # set version id to itself so we get an UPDATE + # statement + params[col.key] = update_version_id + + elif not (params or value_params): + continue + + has_all_pks = True + expect_pk_cascaded = False + if bulk: + # keys here are mapped attribute keys, so + # look at mapper attribute keys for pk + pk_params = { + propkey_to_col[propkey]._label: state_dict.get(propkey) + for propkey in set(propkey_to_col).intersection( + mapper._pk_attr_keys_by_table[table] + ) + } + if util.NONE_SET.intersection(pk_params.values()): + raise sa_exc.InvalidRequestError( + f"No primary key value supplied for column(s) " + f"""{ + ', '.join( + str(c) for c in pks if pk_params[c._label] is None + ) + }; """ + "per-row ORM Bulk UPDATE by Primary Key requires that " + "records contain primary key values", + code="bupq", + ) + + else: + pk_params = {} + for col in pks: + propkey = mapper._columntoproperty[col].key + + history = state.manager[propkey].impl.get_history( + state, state_dict, attributes.PASSIVE_OFF + ) + + if history.added: + if ( + not history.deleted + or ("pk_cascaded", state, col) + in uowtransaction.attributes + ): + expect_pk_cascaded = True + pk_params[col._label] = history.added[0] + params.pop(col.key, None) + else: + # else, use the old value to locate the row + pk_params[col._label] = history.deleted[0] + if col in value_params: + has_all_pks = False + else: + pk_params[col._label] = history.unchanged[0] + if pk_params[col._label] is None: + raise orm_exc.FlushError( + "Can't update table %s using NULL for primary " + "key value on column %s" % (table, col) + ) + + if include_bulk_keys: + params.update((k, state_dict[k]) for k in include_bulk_keys) + + if params or value_params: + params.update(pk_params) + yield ( + state, + state_dict, + params, + mapper, + connection, + value_params, + has_all_defaults, + has_all_pks, + ) + elif expect_pk_cascaded: + # no UPDATE occurs on this table, but we expect that CASCADE rules + # have changed the primary key of the row; propagate this event to + # other columns that expect to have been modified. this normally + # occurs after the UPDATE is emitted however we invoke it here + # explicitly in the absence of our invoking an UPDATE + for m, equated_pairs in mapper._table_to_equated[table]: + sync.populate( + state, + m, + state, + m, + equated_pairs, + uowtransaction, + mapper.passive_updates, + ) + + +def _collect_post_update_commands( + base_mapper, uowtransaction, table, states_to_update, post_update_cols +): + """Identify sets of values to use in UPDATE statements for a + list of states within a post_update operation. + + """ + + for ( + state, + state_dict, + mapper, + connection, + update_version_id, + ) in states_to_update: + # assert table in mapper._pks_by_table + + pks = mapper._pks_by_table[table] + params = {} + hasdata = False + + for col in mapper._cols_by_table[table]: + if col in pks: + params[col._label] = mapper._get_state_attr_by_column( + state, state_dict, col, passive=attributes.PASSIVE_OFF + ) + + elif col in post_update_cols or col.onupdate is not None: + prop = mapper._columntoproperty[col] + history = state.manager[prop.key].impl.get_history( + state, state_dict, attributes.PASSIVE_NO_INITIALIZE + ) + if history.added: + value = history.added[0] + params[col.key] = value + hasdata = True + if hasdata: + if ( + update_version_id is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ): + col = mapper.version_id_col + params[col._label] = update_version_id + + if ( + bool(state.key) + and col.key not in params + and mapper.version_id_generator is not False + ): + val = mapper.version_id_generator(update_version_id) + params[col.key] = val + yield state, state_dict, mapper, connection, params + + +def _collect_delete_commands( + base_mapper, uowtransaction, table, states_to_delete +): + """Identify values to use in DELETE statements for a list of + states to be deleted.""" + + for ( + state, + state_dict, + mapper, + connection, + update_version_id, + ) in states_to_delete: + if table not in mapper._pks_by_table: + continue + + params = {} + for col in mapper._pks_by_table[table]: + params[col.key] = value = ( + mapper._get_committed_state_attr_by_column( + state, state_dict, col + ) + ) + if value is None: + raise orm_exc.FlushError( + "Can't delete from table %s " + "using NULL for primary " + "key value on column %s" % (table, col) + ) + + if ( + update_version_id is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ): + params[mapper.version_id_col.key] = update_version_id + yield params, connection + + +def _emit_update_statements( + base_mapper, + uowtransaction, + mapper, + table, + update, + *, + bookkeeping=True, + use_orm_update_stmt=None, + enable_check_rowcount=True, +): + """Emit UPDATE statements corresponding to value lists collected + by _collect_update_commands().""" + + needs_version_id = ( + mapper.version_id_col is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ) + + execution_options = {"compiled_cache": base_mapper._compiled_cache} + + def update_stmt(existing_stmt=None): + clauses = BooleanClauseList._construct_raw(operators.and_) + + for col in mapper._pks_by_table[table]: + clauses._append_inplace( + col == sql.bindparam(col._label, type_=col.type) + ) + + if needs_version_id: + clauses._append_inplace( + mapper.version_id_col + == sql.bindparam( + mapper.version_id_col._label, + type_=mapper.version_id_col.type, + ) + ) + + if existing_stmt is not None: + stmt = existing_stmt.where(clauses) + else: + stmt = table.update().where(clauses) + return stmt + + if use_orm_update_stmt is not None: + cached_stmt = update_stmt(use_orm_update_stmt) + + else: + cached_stmt = base_mapper._memo(("update", table), update_stmt) + + for ( + (connection, paramkeys, hasvalue, has_all_defaults, has_all_pks), + records, + ) in groupby( + update, + lambda rec: ( + rec[4], # connection + set(rec[2]), # set of parameter keys + bool(rec[5]), # whether or not we have "value" parameters + rec[6], # has_all_defaults + rec[7], # has all pks + ), + ): + rows = 0 + records = list(records) + + statement = cached_stmt + + if use_orm_update_stmt is not None: + statement = statement._annotate( + { + "_emit_update_table": table, + "_emit_update_mapper": mapper, + } + ) + + return_defaults = False + + if not has_all_pks: + statement = statement.return_defaults(*mapper._pks_by_table[table]) + return_defaults = True + + if ( + bookkeeping + and not has_all_defaults + and mapper.base_mapper.eager_defaults is True + # change as of #8889 - if RETURNING is not going to be used anyway, + # (applies to MySQL, MariaDB which lack UPDATE RETURNING) ensure + # we can do an executemany UPDATE which is more efficient + and table.implicit_returning + and connection.dialect.update_returning + ): + statement = statement.return_defaults( + *mapper._server_onupdate_default_cols[table] + ) + return_defaults = True + + if mapper._version_id_has_server_side_value: + statement = statement.return_defaults(mapper.version_id_col) + return_defaults = True + + assert_singlerow = connection.dialect.supports_sane_rowcount + + assert_multirow = ( + assert_singlerow + and connection.dialect.supports_sane_multi_rowcount + ) + + # change as of #8889 - if RETURNING is not going to be used anyway, + # (applies to MySQL, MariaDB which lack UPDATE RETURNING) ensure + # we can do an executemany UPDATE which is more efficient + allow_executemany = not return_defaults and not needs_version_id + + if hasvalue: + for ( + state, + state_dict, + params, + mapper, + connection, + value_params, + has_all_defaults, + has_all_pks, + ) in records: + c = connection.execute( + statement.values(value_params), + params, + execution_options=execution_options, + ) + if bookkeeping: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params, + True, + c.returned_defaults, + ) + rows += c.rowcount + check_rowcount = enable_check_rowcount and assert_singlerow + else: + if not allow_executemany: + check_rowcount = enable_check_rowcount and assert_singlerow + for ( + state, + state_dict, + params, + mapper, + connection, + value_params, + has_all_defaults, + has_all_pks, + ) in records: + c = connection.execute( + statement, params, execution_options=execution_options + ) + + # TODO: why with bookkeeping=False? + if bookkeeping: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params, + True, + c.returned_defaults, + ) + rows += c.rowcount + else: + multiparams = [rec[2] for rec in records] + + check_rowcount = enable_check_rowcount and ( + assert_multirow + or (assert_singlerow and len(multiparams) == 1) + ) + + c = connection.execute( + statement, multiparams, execution_options=execution_options + ) + + rows += c.rowcount + + for ( + state, + state_dict, + params, + mapper, + connection, + value_params, + has_all_defaults, + has_all_pks, + ) in records: + if bookkeeping: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params, + True, + ( + c.returned_defaults + if not c.context.executemany + else None + ), + ) + + if check_rowcount: + if rows != len(records): + raise orm_exc.StaleDataError( + "UPDATE statement on table '%s' expected to " + "update %d row(s); %d were matched." + % (table.description, len(records), rows) + ) + + elif needs_version_id: + util.warn( + "Dialect %s does not support updated rowcount " + "- versioning cannot be verified." + % c.dialect.dialect_description + ) + + +def _emit_insert_statements( + base_mapper, + uowtransaction, + mapper, + table, + insert, + *, + bookkeeping=True, + use_orm_insert_stmt=None, + execution_options=None, +): + """Emit INSERT statements corresponding to value lists collected + by _collect_insert_commands().""" + + if use_orm_insert_stmt is not None: + cached_stmt = use_orm_insert_stmt + exec_opt = util.EMPTY_DICT + + # if a user query with RETURNING was passed, we definitely need + # to use RETURNING. + returning_is_required_anyway = bool(use_orm_insert_stmt._returning) + deterministic_results_reqd = ( + returning_is_required_anyway + and use_orm_insert_stmt._sort_by_parameter_order + ) or bookkeeping + else: + returning_is_required_anyway = False + deterministic_results_reqd = bookkeeping + cached_stmt = base_mapper._memo(("insert", table), table.insert) + exec_opt = {"compiled_cache": base_mapper._compiled_cache} + + if execution_options: + execution_options = util.EMPTY_DICT.merge_with( + exec_opt, execution_options + ) + else: + execution_options = exec_opt + + return_result = None + + for ( + (connection, _, hasvalue, has_all_pks, has_all_defaults), + records, + ) in groupby( + insert, + lambda rec: ( + rec[4], # connection + set(rec[2]), # parameter keys + bool(rec[5]), # whether we have "value" parameters + rec[6], + rec[7], + ), + ): + statement = cached_stmt + + if use_orm_insert_stmt is not None: + statement = statement._annotate( + { + "_emit_insert_table": table, + "_emit_insert_mapper": mapper, + } + ) + + if ( + ( + not bookkeeping + or ( + has_all_defaults + or not base_mapper._prefer_eager_defaults( + connection.dialect, table + ) + or not table.implicit_returning + or not connection.dialect.insert_returning + ) + ) + and not returning_is_required_anyway + and has_all_pks + and not hasvalue + ): + # the "we don't need newly generated values back" section. + # here we have all the PKs, all the defaults or we don't want + # to fetch them, or the dialect doesn't support RETURNING at all + # so we have to post-fetch / use lastrowid anyway. + records = list(records) + multiparams = [rec[2] for rec in records] + + result = connection.execute( + statement, multiparams, execution_options=execution_options + ) + if bookkeeping: + for ( + ( + state, + state_dict, + params, + mapper_rec, + conn, + value_params, + has_all_pks, + has_all_defaults, + ), + last_inserted_params, + ) in zip(records, result.context.compiled_parameters): + if state: + _postfetch( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + result, + last_inserted_params, + value_params, + False, + ( + result.returned_defaults + if not result.context.executemany + else None + ), + ) + else: + _postfetch_bulk_save(mapper_rec, state_dict, table) + + else: + # here, we need defaults and/or pk values back or we otherwise + # know that we are using RETURNING in any case + + records = list(records) + + if returning_is_required_anyway or ( + table.implicit_returning and not hasvalue and len(records) > 1 + ): + if ( + deterministic_results_reqd + and connection.dialect.insert_executemany_returning_sort_by_parameter_order # noqa: E501 + ) or ( + not deterministic_results_reqd + and connection.dialect.insert_executemany_returning + ): + do_executemany = True + elif returning_is_required_anyway: + if deterministic_results_reqd: + dt = " with RETURNING and sort by parameter order" + else: + dt = " with RETURNING" + raise sa_exc.InvalidRequestError( + f"Can't use explicit RETURNING for bulk INSERT " + f"operation with " + f"{connection.dialect.dialect_description} backend; " + f"executemany{dt} is not enabled for this dialect." + ) + else: + do_executemany = False + else: + do_executemany = False + + if use_orm_insert_stmt is None: + if ( + not has_all_defaults + and base_mapper._prefer_eager_defaults( + connection.dialect, table + ) + ): + statement = statement.return_defaults( + *mapper._server_default_cols[table], + sort_by_parameter_order=bookkeeping, + ) + + if mapper.version_id_col is not None: + statement = statement.return_defaults( + mapper.version_id_col, + sort_by_parameter_order=bookkeeping, + ) + elif do_executemany: + statement = statement.return_defaults( + *table.primary_key, sort_by_parameter_order=bookkeeping + ) + + if do_executemany: + multiparams = [rec[2] for rec in records] + + result = connection.execute( + statement, multiparams, execution_options=execution_options + ) + + if use_orm_insert_stmt is not None: + if return_result is None: + return_result = result + else: + return_result = return_result.splice_vertically(result) + + if bookkeeping: + for ( + ( + state, + state_dict, + params, + mapper_rec, + conn, + value_params, + has_all_pks, + has_all_defaults, + ), + last_inserted_params, + inserted_primary_key, + returned_defaults, + ) in zip_longest( + records, + result.context.compiled_parameters, + result.inserted_primary_key_rows, + result.returned_defaults_rows or (), + ): + if inserted_primary_key is None: + # this is a real problem and means that we didn't + # get back as many PK rows. we can't continue + # since this indicates PK rows were missing, which + # means we likely mis-populated records starting + # at that point with incorrectly matched PK + # values. + raise orm_exc.FlushError( + "Multi-row INSERT statement for %s did not " + "produce " + "the correct number of INSERTed rows for " + "RETURNING. Ensure there are no triggers or " + "special driver issues preventing INSERT from " + "functioning properly." % mapper_rec + ) + + for pk, col in zip( + inserted_primary_key, + mapper._pks_by_table[table], + ): + prop = mapper_rec._columntoproperty[col] + if state_dict.get(prop.key) is None: + state_dict[prop.key] = pk + + if state: + _postfetch( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + result, + last_inserted_params, + value_params, + False, + returned_defaults, + ) + else: + _postfetch_bulk_save(mapper_rec, state_dict, table) + else: + assert not returning_is_required_anyway + + for ( + state, + state_dict, + params, + mapper_rec, + connection, + value_params, + has_all_pks, + has_all_defaults, + ) in records: + if value_params: + result = connection.execute( + statement.values(value_params), + params, + execution_options=execution_options, + ) + else: + result = connection.execute( + statement, + params, + execution_options=execution_options, + ) + + primary_key = result.inserted_primary_key + if primary_key is None: + raise orm_exc.FlushError( + "Single-row INSERT statement for %s " + "did not produce a " + "new primary key result " + "being invoked. Ensure there are no triggers or " + "special driver issues preventing INSERT from " + "functioning properly." % (mapper_rec,) + ) + for pk, col in zip( + primary_key, mapper._pks_by_table[table] + ): + prop = mapper_rec._columntoproperty[col] + if ( + col in value_params + or state_dict.get(prop.key) is None + ): + state_dict[prop.key] = pk + if bookkeeping: + if state: + _postfetch( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + result, + result.context.compiled_parameters[0], + value_params, + False, + ( + result.returned_defaults + if not result.context.executemany + else None + ), + ) + else: + _postfetch_bulk_save(mapper_rec, state_dict, table) + + if use_orm_insert_stmt is not None: + if return_result is None: + return _cursor.null_dml_result() + else: + return return_result + + +def _emit_post_update_statements( + base_mapper, uowtransaction, mapper, table, update +): + """Emit UPDATE statements corresponding to value lists collected + by _collect_post_update_commands().""" + + execution_options = {"compiled_cache": base_mapper._compiled_cache} + + needs_version_id = ( + mapper.version_id_col is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ) + + def update_stmt(): + clauses = BooleanClauseList._construct_raw(operators.and_) + + for col in mapper._pks_by_table[table]: + clauses._append_inplace( + col == sql.bindparam(col._label, type_=col.type) + ) + + if needs_version_id: + clauses._append_inplace( + mapper.version_id_col + == sql.bindparam( + mapper.version_id_col._label, + type_=mapper.version_id_col.type, + ) + ) + + stmt = table.update().where(clauses) + + return stmt + + statement = base_mapper._memo(("post_update", table), update_stmt) + + if mapper._version_id_has_server_side_value: + statement = statement.return_defaults(mapper.version_id_col) + + # execute each UPDATE in the order according to the original + # list of states to guarantee row access order, but + # also group them into common (connection, cols) sets + # to support executemany(). + for key, records in groupby( + update, + lambda rec: (rec[3], set(rec[4])), # connection # parameter keys + ): + rows = 0 + + records = list(records) + connection = key[0] + + assert_singlerow = connection.dialect.supports_sane_rowcount + assert_multirow = ( + assert_singlerow + and connection.dialect.supports_sane_multi_rowcount + ) + allow_executemany = not needs_version_id or assert_multirow + + if not allow_executemany: + check_rowcount = assert_singlerow + for state, state_dict, mapper_rec, connection, params in records: + c = connection.execute( + statement, params, execution_options=execution_options + ) + + _postfetch_post_update( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + ) + rows += c.rowcount + else: + multiparams = [ + params + for state, state_dict, mapper_rec, conn, params in records + ] + + check_rowcount = assert_multirow or ( + assert_singlerow and len(multiparams) == 1 + ) + + c = connection.execute( + statement, multiparams, execution_options=execution_options + ) + + rows += c.rowcount + for state, state_dict, mapper_rec, connection, params in records: + _postfetch_post_update( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + ) + + if check_rowcount: + if rows != len(records): + raise orm_exc.StaleDataError( + "UPDATE statement on table '%s' expected to " + "update %d row(s); %d were matched." + % (table.description, len(records), rows) + ) + + elif needs_version_id: + util.warn( + "Dialect %s does not support updated rowcount " + "- versioning cannot be verified." + % c.dialect.dialect_description + ) + + +def _emit_delete_statements( + base_mapper, uowtransaction, mapper, table, delete +): + """Emit DELETE statements corresponding to value lists collected + by _collect_delete_commands().""" + + need_version_id = ( + mapper.version_id_col is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ) + + def delete_stmt(): + clauses = BooleanClauseList._construct_raw(operators.and_) + + for col in mapper._pks_by_table[table]: + clauses._append_inplace( + col == sql.bindparam(col.key, type_=col.type) + ) + + if need_version_id: + clauses._append_inplace( + mapper.version_id_col + == sql.bindparam( + mapper.version_id_col.key, type_=mapper.version_id_col.type + ) + ) + + return table.delete().where(clauses) + + statement = base_mapper._memo(("delete", table), delete_stmt) + for connection, recs in groupby(delete, lambda rec: rec[1]): # connection + del_objects = [params for params, connection in recs] + + execution_options = {"compiled_cache": base_mapper._compiled_cache} + expected = len(del_objects) + rows_matched = -1 + only_warn = False + + if ( + need_version_id + and not connection.dialect.supports_sane_multi_rowcount + ): + if connection.dialect.supports_sane_rowcount: + rows_matched = 0 + # execute deletes individually so that versioned + # rows can be verified + for params in del_objects: + c = connection.execute( + statement, params, execution_options=execution_options + ) + rows_matched += c.rowcount + else: + util.warn( + "Dialect %s does not support deleted rowcount " + "- versioning cannot be verified." + % connection.dialect.dialect_description + ) + connection.execute( + statement, del_objects, execution_options=execution_options + ) + else: + c = connection.execute( + statement, del_objects, execution_options=execution_options + ) + + if not need_version_id: + only_warn = True + + rows_matched = c.rowcount + + if ( + base_mapper.confirm_deleted_rows + and rows_matched > -1 + and expected != rows_matched + and ( + connection.dialect.supports_sane_multi_rowcount + or len(del_objects) == 1 + ) + ): + # TODO: why does this "only warn" if versioning is turned off, + # whereas the UPDATE raises? + if only_warn: + util.warn( + "DELETE statement on table '%s' expected to " + "delete %d row(s); %d were matched. Please set " + "confirm_deleted_rows=False within the mapper " + "configuration to prevent this warning." + % (table.description, expected, rows_matched) + ) + else: + raise orm_exc.StaleDataError( + "DELETE statement on table '%s' expected to " + "delete %d row(s); %d were matched. Please set " + "confirm_deleted_rows=False within the mapper " + "configuration to prevent this warning." + % (table.description, expected, rows_matched) + ) + + +def _finalize_insert_update_commands(base_mapper, uowtransaction, states): + """finalize state on states that have been inserted or updated, + including calling after_insert/after_update events. + + """ + for state, state_dict, mapper, connection, has_identity in states: + if mapper._readonly_props: + readonly = state.unmodified_intersection( + [ + p.key + for p in mapper._readonly_props + if ( + p.expire_on_flush + and (not p.deferred or p.key in state.dict) + ) + or ( + not p.expire_on_flush + and not p.deferred + and p.key not in state.dict + ) + ] + ) + if readonly: + state._expire_attributes(state.dict, readonly) + + # if eager_defaults option is enabled, load + # all expired cols. Else if we have a version_id_col, make sure + # it isn't expired. + toload_now = [] + + # this is specifically to emit a second SELECT for eager_defaults, + # so only if it's set to True, not "auto" + if base_mapper.eager_defaults is True: + toload_now.extend( + state._unloaded_non_object.intersection( + mapper._server_default_plus_onupdate_propkeys + ) + ) + + if ( + mapper.version_id_col is not None + and mapper.version_id_generator is False + ): + if mapper._version_id_prop.key in state.unloaded: + toload_now.extend([mapper._version_id_prop.key]) + + if toload_now: + state.key = base_mapper._identity_key_from_state(state) + stmt = future.select(mapper).set_label_style( + LABEL_STYLE_TABLENAME_PLUS_COL + ) + loading.load_on_ident( + uowtransaction.session, + stmt, + state.key, + refresh_state=state, + only_load_props=toload_now, + ) + + # call after_XXX extensions + if not has_identity: + mapper.dispatch.after_insert(mapper, connection, state) + else: + mapper.dispatch.after_update(mapper, connection, state) + + if ( + mapper.version_id_generator is False + and mapper.version_id_col is not None + ): + if state_dict[mapper._version_id_prop.key] is None: + raise orm_exc.FlushError( + "Instance does not contain a non-NULL version value" + ) + + +def _postfetch_post_update( + mapper, uowtransaction, table, state, dict_, result, params +): + needs_version_id = ( + mapper.version_id_col is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ) + + if not uowtransaction.is_deleted(state): + # post updating after a regular INSERT or UPDATE, do a full postfetch + prefetch_cols = result.context.compiled.prefetch + postfetch_cols = result.context.compiled.postfetch + elif needs_version_id: + # post updating before a DELETE with a version_id_col, need to + # postfetch just version_id_col + prefetch_cols = postfetch_cols = () + else: + # post updating before a DELETE without a version_id_col, + # don't need to postfetch + return + + if needs_version_id: + prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] + + refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) + if refresh_flush: + load_evt_attrs = [] + + for c in prefetch_cols: + if c.key in params and c in mapper._columntoproperty: + dict_[mapper._columntoproperty[c].key] = params[c.key] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[c].key) + + if refresh_flush and load_evt_attrs: + mapper.class_manager.dispatch.refresh_flush( + state, uowtransaction, load_evt_attrs + ) + + if postfetch_cols: + state._expire_attributes( + state.dict, + [ + mapper._columntoproperty[c].key + for c in postfetch_cols + if c in mapper._columntoproperty + ], + ) + + +def _postfetch( + mapper, + uowtransaction, + table, + state, + dict_, + result, + params, + value_params, + isupdate, + returned_defaults, +): + """Expire attributes in need of newly persisted database state, + after an INSERT or UPDATE statement has proceeded for that + state.""" + + prefetch_cols = result.context.compiled.prefetch + postfetch_cols = result.context.compiled.postfetch + returning_cols = result.context.compiled.effective_returning + + if ( + mapper.version_id_col is not None + and mapper.version_id_col in mapper._cols_by_table[table] + ): + prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] + + refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) + if refresh_flush: + load_evt_attrs = [] + + if returning_cols: + row = returned_defaults + if row is not None: + for row_value, col in zip(row, returning_cols): + # pk cols returned from insert are handled + # distinctly, don't step on the values here + if col.primary_key and result.context.isinsert: + continue + + # note that columns can be in the "return defaults" that are + # not mapped to this mapper, typically because they are + # "excluded", which can be specified directly or also occurs + # when using declarative w/ single table inheritance + prop = mapper._columntoproperty.get(col) + if prop: + dict_[prop.key] = row_value + if refresh_flush: + load_evt_attrs.append(prop.key) + + for c in prefetch_cols: + if c.key in params and c in mapper._columntoproperty: + pkey = mapper._columntoproperty[c].key + + # set prefetched value in dict and also pop from committed_state, + # since this is new database state that replaces whatever might + # have previously been fetched (see #10800). this is essentially a + # shorthand version of set_committed_value(), which could also be + # used here directly (with more overhead) + dict_[pkey] = params[c.key] + state.committed_state.pop(pkey, None) + + if refresh_flush: + load_evt_attrs.append(pkey) + + if refresh_flush and load_evt_attrs: + mapper.class_manager.dispatch.refresh_flush( + state, uowtransaction, load_evt_attrs + ) + + if isupdate and value_params: + # explicitly suit the use case specified by + # [ticket:3801], PK SQL expressions for UPDATE on non-RETURNING + # database which are set to themselves in order to do a version bump. + postfetch_cols.extend( + [ + col + for col in value_params + if col.primary_key and col not in returning_cols + ] + ) + + if postfetch_cols: + state._expire_attributes( + state.dict, + [ + mapper._columntoproperty[c].key + for c in postfetch_cols + if c in mapper._columntoproperty + ], + ) + + # synchronize newly inserted ids from one table to the next + # TODO: this still goes a little too often. would be nice to + # have definitive list of "columns that changed" here + for m, equated_pairs in mapper._table_to_equated[table]: + sync.populate( + state, + m, + state, + m, + equated_pairs, + uowtransaction, + mapper.passive_updates, + ) + + +def _postfetch_bulk_save(mapper, dict_, table): + for m, equated_pairs in mapper._table_to_equated[table]: + sync.bulk_populate_inherit_keys(dict_, m, equated_pairs) + + +def _connections_for_states(base_mapper, uowtransaction, states): + """Return an iterator of (state, state.dict, mapper, connection). + + The states are sorted according to _sort_states, then paired + with the connection they should be using for the given + unit of work transaction. + + """ + # if session has a connection callable, + # organize individual states with the connection + # to use for update + if uowtransaction.session.connection_callable: + connection_callable = uowtransaction.session.connection_callable + else: + connection = uowtransaction.transaction.connection(base_mapper) + connection_callable = None + + for state in _sort_states(base_mapper, states): + if connection_callable: + connection = connection_callable(base_mapper, state.obj()) + + mapper = state.manager.mapper + + yield state, state.dict, mapper, connection + + +def _sort_states(mapper, states): + pending = set(states) + persistent = {s for s in pending if s.key is not None} + pending.difference_update(persistent) + + try: + persistent_sorted = sorted( + persistent, key=mapper._persistent_sortkey_fn + ) + except TypeError as err: + raise sa_exc.InvalidRequestError( + "Could not sort objects by primary key; primary key " + "values must be sortable in Python (was: %s)" % err + ) from err + return ( + sorted(pending, key=operator.attrgetter("insert_order")) + + persistent_sorted + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py new file mode 100644 index 00000000..5c49222b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py @@ -0,0 +1,886 @@ +# orm/properties.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""MapperProperty implementations. + +This is a private module which defines the behavior of individual ORM- +mapped attributes. + +""" + +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Dict +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import attributes +from . import strategy_options +from .base import _DeclarativeMapped +from .base import class_mapper +from .descriptor_props import CompositeProperty +from .descriptor_props import ConcreteInheritedProperty +from .descriptor_props import SynonymProperty +from .interfaces import _AttributeOptions +from .interfaces import _DEFAULT_ATTRIBUTE_OPTIONS +from .interfaces import _IntrospectsAnnotations +from .interfaces import _MapsColumns +from .interfaces import MapperProperty +from .interfaces import PropComparator +from .interfaces import StrategizedProperty +from .relationships import RelationshipProperty +from .util import de_stringify_annotation +from .util import de_stringify_union_elements +from .. import exc as sa_exc +from .. import ForeignKey +from .. import log +from .. import util +from ..sql import coercions +from ..sql import roles +from ..sql.base import _NoArg +from ..sql.schema import Column +from ..sql.schema import SchemaConst +from ..sql.type_api import TypeEngine +from ..util.typing import de_optionalize_union_types +from ..util.typing import is_fwd_ref +from ..util.typing import is_optional_union +from ..util.typing import is_pep593 +from ..util.typing import is_pep695 +from ..util.typing import is_union +from ..util.typing import Self +from ..util.typing import typing_get_args + +if TYPE_CHECKING: + from ._typing import _IdentityKeyType + from ._typing import _InstanceDict + from ._typing import _ORMColumnExprArgument + from ._typing import _RegistryType + from .base import Mapped + from .decl_base import _ClassScanMapperConfig + from .mapper import Mapper + from .session import Session + from .state import _InstallLoaderCallableProto + from .state import InstanceState + from ..sql._typing import _InfoType + from ..sql.elements import ColumnElement + from ..sql.elements import NamedColumn + from ..sql.operators import OperatorType + from ..util.typing import _AnnotationScanType + from ..util.typing import RODescriptorReference + +_T = TypeVar("_T", bound=Any) +_PT = TypeVar("_PT", bound=Any) +_NC = TypeVar("_NC", bound="NamedColumn[Any]") + +__all__ = [ + "ColumnProperty", + "CompositeProperty", + "ConcreteInheritedProperty", + "RelationshipProperty", + "SynonymProperty", +] + + +@log.class_logger +class ColumnProperty( + _MapsColumns[_T], + StrategizedProperty[_T], + _IntrospectsAnnotations, + log.Identified, +): + """Describes an object attribute that corresponds to a table column + or other column expression. + + Public constructor is the :func:`_orm.column_property` function. + + """ + + strategy_wildcard_key = strategy_options._COLUMN_TOKEN + inherit_cache = True + """:meta private:""" + + _links_to_entity = False + + columns: List[NamedColumn[Any]] + + _is_polymorphic_discriminator: bool + + _mapped_by_synonym: Optional[str] + + comparator_factory: Type[PropComparator[_T]] + + __slots__ = ( + "columns", + "group", + "deferred", + "instrument", + "comparator_factory", + "active_history", + "expire_on_flush", + "_creation_order", + "_is_polymorphic_discriminator", + "_mapped_by_synonym", + "_deferred_column_loader", + "_raise_column_loader", + "_renders_in_subqueries", + "raiseload", + ) + + def __init__( + self, + column: _ORMColumnExprArgument[_T], + *additional_columns: _ORMColumnExprArgument[Any], + attribute_options: Optional[_AttributeOptions] = None, + group: Optional[str] = None, + deferred: bool = False, + raiseload: bool = False, + comparator_factory: Optional[Type[PropComparator[_T]]] = None, + active_history: bool = False, + expire_on_flush: bool = True, + info: Optional[_InfoType] = None, + doc: Optional[str] = None, + _instrument: bool = True, + _assume_readonly_dc_attributes: bool = False, + ): + super().__init__( + attribute_options=attribute_options, + _assume_readonly_dc_attributes=_assume_readonly_dc_attributes, + ) + columns = (column,) + additional_columns + self.columns = [ + coercions.expect(roles.LabeledColumnExprRole, c) for c in columns + ] + self.group = group + self.deferred = deferred + self.raiseload = raiseload + self.instrument = _instrument + self.comparator_factory = ( + comparator_factory + if comparator_factory is not None + else self.__class__.Comparator + ) + self.active_history = active_history + self.expire_on_flush = expire_on_flush + + if info is not None: + self.info.update(info) + + if doc is not None: + self.doc = doc + else: + for col in reversed(self.columns): + doc = getattr(col, "doc", None) + if doc is not None: + self.doc = doc + break + else: + self.doc = None + + util.set_creation_order(self) + + self.strategy_key = ( + ("deferred", self.deferred), + ("instrument", self.instrument), + ) + if self.raiseload: + self.strategy_key += (("raiseload", True),) + + def declarative_scan( + self, + decl_scan: _ClassScanMapperConfig, + registry: _RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + mapped_container: Optional[Type[Mapped[Any]]], + annotation: Optional[_AnnotationScanType], + extracted_mapped_annotation: Optional[_AnnotationScanType], + is_dataclass_field: bool, + ) -> None: + column = self.columns[0] + if column.key is None: + column.key = key + if column.name is None: + column.name = key + + @property + def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]: + return self + + @property + def columns_to_assign(self) -> List[Tuple[Column[Any], int]]: + # mypy doesn't care about the isinstance here + return [ + (c, 0) # type: ignore + for c in self.columns + if isinstance(c, Column) and c.table is None + ] + + def _memoized_attr__renders_in_subqueries(self) -> bool: + if ("query_expression", True) in self.strategy_key: + return self.strategy._have_default_expression # type: ignore + + return ("deferred", True) not in self.strategy_key or ( + self not in self.parent._readonly_props # type: ignore + ) + + @util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies") + def _memoized_attr__deferred_column_loader( + self, + ) -> _InstallLoaderCallableProto[Any]: + state = util.preloaded.orm_state + strategies = util.preloaded.orm_strategies + return state.InstanceState._instance_level_callable_processor( + self.parent.class_manager, + strategies.LoadDeferredColumns(self.key), + self.key, + ) + + @util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies") + def _memoized_attr__raise_column_loader( + self, + ) -> _InstallLoaderCallableProto[Any]: + state = util.preloaded.orm_state + strategies = util.preloaded.orm_strategies + return state.InstanceState._instance_level_callable_processor( + self.parent.class_manager, + strategies.LoadDeferredColumns(self.key, True), + self.key, + ) + + def __clause_element__(self) -> roles.ColumnsClauseRole: + """Allow the ColumnProperty to work in expression before it is turned + into an instrumented attribute. + """ + + return self.expression + + @property + def expression(self) -> roles.ColumnsClauseRole: + """Return the primary column or expression for this ColumnProperty. + + E.g.:: + + + class File(Base): + # ... + + name = Column(String(64)) + extension = Column(String(8)) + filename = column_property(name + '.' + extension) + path = column_property('C:/' + filename.expression) + + .. seealso:: + + :ref:`mapper_column_property_sql_expressions_composed` + + """ + return self.columns[0] + + def instrument_class(self, mapper: Mapper[Any]) -> None: + if not self.instrument: + return + + attributes.register_descriptor( + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), + parententity=mapper, + doc=self.doc, + ) + + def do_init(self) -> None: + super().do_init() + + if len(self.columns) > 1 and set(self.parent.primary_key).issuperset( + self.columns + ): + util.warn( + ( + "On mapper %s, primary key column '%s' is being combined " + "with distinct primary key column '%s' in attribute '%s'. " + "Use explicit properties to give each column its own " + "mapped attribute name." + ) + % (self.parent, self.columns[1], self.columns[0], self.key) + ) + + def copy(self) -> ColumnProperty[_T]: + return ColumnProperty( + *self.columns, + deferred=self.deferred, + group=self.group, + active_history=self.active_history, + ) + + def merge( + self, + session: Session, + source_state: InstanceState[Any], + source_dict: _InstanceDict, + dest_state: InstanceState[Any], + dest_dict: _InstanceDict, + load: bool, + _recursive: Dict[Any, object], + _resolve_conflict_map: Dict[_IdentityKeyType[Any], object], + ) -> None: + if not self.instrument: + return + elif self.key in source_dict: + value = source_dict[self.key] + + if not load: + dest_dict[self.key] = value + else: + impl = dest_state.get_impl(self.key) + impl.set(dest_state, dest_dict, value, None) + elif dest_state.has_identity and self.key not in dest_dict: + dest_state._expire_attributes( + dest_dict, [self.key], no_loader=True + ) + + class Comparator(util.MemoizedSlots, PropComparator[_PT]): + """Produce boolean, comparison, and other operators for + :class:`.ColumnProperty` attributes. + + See the documentation for :class:`.PropComparator` for a brief + overview. + + .. seealso:: + + :class:`.PropComparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + if not TYPE_CHECKING: + # prevent pylance from being clever about slots + __slots__ = "__clause_element__", "info", "expressions" + + prop: RODescriptorReference[ColumnProperty[_PT]] + + expressions: Sequence[NamedColumn[Any]] + """The full sequence of columns referenced by this + attribute, adjusted for any aliasing in progress. + + .. versionadded:: 1.3.17 + + .. seealso:: + + :ref:`maptojoin` - usage example + """ + + def _orm_annotate_column(self, column: _NC) -> _NC: + """annotate and possibly adapt a column to be returned + as the mapped-attribute exposed version of the column. + + The column in this context needs to act as much like the + column in an ORM mapped context as possible, so includes + annotations to give hints to various ORM functions as to + the source entity of this column. It also adapts it + to the mapper's with_polymorphic selectable if one is + present. + + """ + + pe = self._parententity + annotations: Dict[str, Any] = { + "entity_namespace": pe, + "parententity": pe, + "parentmapper": pe, + "proxy_key": self.prop.key, + } + + col = column + + # for a mapper with polymorphic_on and an adapter, return + # the column against the polymorphic selectable. + # see also orm.util._orm_downgrade_polymorphic_columns + # for the reverse operation. + if self._parentmapper._polymorphic_adapter: + mapper_local_col = col + col = self._parentmapper._polymorphic_adapter.traverse(col) + + # this is a clue to the ORM Query etc. that this column + # was adapted to the mapper's polymorphic_adapter. the + # ORM uses this hint to know which column its adapting. + annotations["adapt_column"] = mapper_local_col + + return col._annotate(annotations)._set_propagate_attrs( + {"compile_state_plugin": "orm", "plugin_subject": pe} + ) + + if TYPE_CHECKING: + + def __clause_element__(self) -> NamedColumn[_PT]: ... + + def _memoized_method___clause_element__( + self, + ) -> NamedColumn[_PT]: + if self.adapter: + return self.adapter(self.prop.columns[0], self.prop.key) + else: + return self._orm_annotate_column(self.prop.columns[0]) + + def _memoized_attr_info(self) -> _InfoType: + """The .info dictionary for this attribute.""" + + ce = self.__clause_element__() + try: + return ce.info # type: ignore + except AttributeError: + return self.prop.info + + def _memoized_attr_expressions(self) -> Sequence[NamedColumn[Any]]: + """The full sequence of columns referenced by this + attribute, adjusted for any aliasing in progress. + + .. versionadded:: 1.3.17 + + """ + if self.adapter: + return [ + self.adapter(col, self.prop.key) + for col in self.prop.columns + ] + else: + return [ + self._orm_annotate_column(col) for col in self.prop.columns + ] + + def _fallback_getattr(self, key: str) -> Any: + """proxy attribute access down to the mapped column. + + this allows user-defined comparison methods to be accessed. + """ + return getattr(self.__clause_element__(), key) + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(self.__clause_element__(), *other, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + col = self.__clause_element__() + return op(col._bind_param(op, other), col, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def __str__(self) -> str: + if not self.parent or not self.key: + return object.__repr__(self) + return str(self.parent.class_.__name__) + "." + self.key + + +class MappedSQLExpression(ColumnProperty[_T], _DeclarativeMapped[_T]): + """Declarative front-end for the :class:`.ColumnProperty` class. + + Public constructor is the :func:`_orm.column_property` function. + + .. versionchanged:: 2.0 Added :class:`_orm.MappedSQLExpression` as + a Declarative compatible subclass for :class:`_orm.ColumnProperty`. + + .. seealso:: + + :class:`.MappedColumn` + + """ + + inherit_cache = True + """:meta private:""" + + +class MappedColumn( + _IntrospectsAnnotations, + _MapsColumns[_T], + _DeclarativeMapped[_T], +): + """Maps a single :class:`_schema.Column` on a class. + + :class:`_orm.MappedColumn` is a specialization of the + :class:`_orm.ColumnProperty` class and is oriented towards declarative + configuration. + + To construct :class:`_orm.MappedColumn` objects, use the + :func:`_orm.mapped_column` constructor function. + + .. versionadded:: 2.0 + + + """ + + __slots__ = ( + "column", + "_creation_order", + "_sort_order", + "foreign_keys", + "_has_nullable", + "_has_insert_default", + "deferred", + "deferred_group", + "deferred_raiseload", + "active_history", + "_attribute_options", + "_has_dataclass_arguments", + "_use_existing_column", + ) + + deferred: Union[_NoArg, bool] + deferred_raiseload: bool + deferred_group: Optional[str] + + column: Column[_T] + foreign_keys: Optional[Set[ForeignKey]] + _attribute_options: _AttributeOptions + + def __init__(self, *arg: Any, **kw: Any): + self._attribute_options = attr_opts = kw.pop( + "attribute_options", _DEFAULT_ATTRIBUTE_OPTIONS + ) + + self._use_existing_column = kw.pop("use_existing_column", False) + + self._has_dataclass_arguments = ( + attr_opts is not None + and attr_opts != _DEFAULT_ATTRIBUTE_OPTIONS + and any( + attr_opts[i] is not _NoArg.NO_ARG + for i, attr in enumerate(attr_opts._fields) + if attr != "dataclasses_default" + ) + ) + + insert_default = kw.pop("insert_default", _NoArg.NO_ARG) + self._has_insert_default = insert_default is not _NoArg.NO_ARG + + if self._has_insert_default: + kw["default"] = insert_default + elif attr_opts.dataclasses_default is not _NoArg.NO_ARG: + kw["default"] = attr_opts.dataclasses_default + + self.deferred_group = kw.pop("deferred_group", None) + self.deferred_raiseload = kw.pop("deferred_raiseload", None) + self.deferred = kw.pop("deferred", _NoArg.NO_ARG) + self.active_history = kw.pop("active_history", False) + + self._sort_order = kw.pop("sort_order", _NoArg.NO_ARG) + self.column = cast("Column[_T]", Column(*arg, **kw)) + self.foreign_keys = self.column.foreign_keys + self._has_nullable = "nullable" in kw and kw.get("nullable") not in ( + None, + SchemaConst.NULL_UNSPECIFIED, + ) + util.set_creation_order(self) + + def _copy(self, **kw: Any) -> Self: + new = self.__class__.__new__(self.__class__) + new.column = self.column._copy(**kw) + new.deferred = self.deferred + new.deferred_group = self.deferred_group + new.deferred_raiseload = self.deferred_raiseload + new.foreign_keys = new.column.foreign_keys + new.active_history = self.active_history + new._has_nullable = self._has_nullable + new._attribute_options = self._attribute_options + new._has_insert_default = self._has_insert_default + new._has_dataclass_arguments = self._has_dataclass_arguments + new._use_existing_column = self._use_existing_column + new._sort_order = self._sort_order + util.set_creation_order(new) + return new + + @property + def name(self) -> str: + return self.column.name + + @property + def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]: + effective_deferred = self.deferred + if effective_deferred is _NoArg.NO_ARG: + effective_deferred = bool( + self.deferred_group or self.deferred_raiseload + ) + + if effective_deferred or self.active_history: + return ColumnProperty( + self.column, + deferred=effective_deferred, + group=self.deferred_group, + raiseload=self.deferred_raiseload, + attribute_options=self._attribute_options, + active_history=self.active_history, + ) + else: + return None + + @property + def columns_to_assign(self) -> List[Tuple[Column[Any], int]]: + return [ + ( + self.column, + ( + self._sort_order + if self._sort_order is not _NoArg.NO_ARG + else 0 + ), + ) + ] + + def __clause_element__(self) -> Column[_T]: + return self.column + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(self.__clause_element__(), *other, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + col = self.__clause_element__() + return op(col._bind_param(op, other), col, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def found_in_pep593_annotated(self) -> Any: + # return a blank mapped_column(). This mapped_column()'s + # Column will be merged into it in _init_column_for_annotation(). + return MappedColumn() + + def declarative_scan( + self, + decl_scan: _ClassScanMapperConfig, + registry: _RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + mapped_container: Optional[Type[Mapped[Any]]], + annotation: Optional[_AnnotationScanType], + extracted_mapped_annotation: Optional[_AnnotationScanType], + is_dataclass_field: bool, + ) -> None: + column = self.column + + if ( + self._use_existing_column + and decl_scan.inherits + and decl_scan.single + ): + if decl_scan.is_deferred: + raise sa_exc.ArgumentError( + "Can't use use_existing_column with deferred mappers" + ) + supercls_mapper = class_mapper(decl_scan.inherits, False) + + colname = column.name if column.name is not None else key + column = self.column = supercls_mapper.local_table.c.get( # type: ignore[assignment] # noqa: E501 + colname, column + ) + + if column.key is None: + column.key = key + if column.name is None: + column.name = key + + sqltype = column.type + + if extracted_mapped_annotation is None: + if sqltype._isnull and not self.column.foreign_keys: + self._raise_for_required(key, cls) + else: + return + + self._init_column_for_annotation( + cls, + registry, + extracted_mapped_annotation, + originating_module, + ) + + @util.preload_module("sqlalchemy.orm.decl_base") + def declarative_scan_for_composite( + self, + registry: _RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + param_name: str, + param_annotation: _AnnotationScanType, + ) -> None: + decl_base = util.preloaded.orm_decl_base + decl_base._undefer_column_name(param_name, self.column) + self._init_column_for_annotation( + cls, registry, param_annotation, originating_module + ) + + def _init_column_for_annotation( + self, + cls: Type[Any], + registry: _RegistryType, + argument: _AnnotationScanType, + originating_module: Optional[str], + ) -> None: + sqltype = self.column.type + + if isinstance(argument, str) or is_fwd_ref( + argument, check_generic=True + ): + assert originating_module is not None + argument = de_stringify_annotation( + cls, argument, originating_module, include_generic=True + ) + + if is_union(argument): + assert originating_module is not None + argument = de_stringify_union_elements( + cls, argument, originating_module + ) + + nullable = is_optional_union(argument) + + if not self._has_nullable: + self.column.nullable = nullable + + our_type = de_optionalize_union_types(argument) + + use_args_from = None + + our_original_type = our_type + + if is_pep695(our_type): + our_type = our_type.__value__ + + if is_pep593(our_type): + our_type_is_pep593 = True + + pep_593_components = typing_get_args(our_type) + raw_pep_593_type = pep_593_components[0] + if is_optional_union(raw_pep_593_type): + raw_pep_593_type = de_optionalize_union_types(raw_pep_593_type) + + nullable = True + if not self._has_nullable: + self.column.nullable = nullable + for elem in pep_593_components[1:]: + if isinstance(elem, MappedColumn): + use_args_from = elem + break + else: + our_type_is_pep593 = False + raw_pep_593_type = None + + if use_args_from is not None: + if ( + not self._has_insert_default + and use_args_from.column.default is not None + ): + self.column.default = None + + use_args_from.column._merge(self.column) + sqltype = self.column.type + + if ( + use_args_from.deferred is not _NoArg.NO_ARG + and self.deferred is _NoArg.NO_ARG + ): + self.deferred = use_args_from.deferred + + if ( + use_args_from.deferred_group is not None + and self.deferred_group is None + ): + self.deferred_group = use_args_from.deferred_group + + if ( + use_args_from.deferred_raiseload is not None + and self.deferred_raiseload is None + ): + self.deferred_raiseload = use_args_from.deferred_raiseload + + if ( + use_args_from._use_existing_column + and not self._use_existing_column + ): + self._use_existing_column = True + + if use_args_from.active_history: + self.active_history = use_args_from.active_history + + if ( + use_args_from._sort_order is not None + and self._sort_order is _NoArg.NO_ARG + ): + self._sort_order = use_args_from._sort_order + + if ( + use_args_from.column.key is not None + or use_args_from.column.name is not None + ): + util.warn_deprecated( + "Can't use the 'key' or 'name' arguments in " + "Annotated with mapped_column(); this will be ignored", + "2.0.22", + ) + + if use_args_from._has_dataclass_arguments: + for idx, arg in enumerate( + use_args_from._attribute_options._fields + ): + if ( + use_args_from._attribute_options[idx] + is not _NoArg.NO_ARG + ): + arg = arg.replace("dataclasses_", "") + util.warn_deprecated( + f"Argument '{arg}' is a dataclass argument and " + "cannot be specified within a mapped_column() " + "bundled inside of an Annotated object", + "2.0.22", + ) + + if sqltype._isnull and not self.column.foreign_keys: + new_sqltype = None + + if our_type_is_pep593: + checks = [our_original_type, raw_pep_593_type] + else: + checks = [our_original_type] + + for check_type in checks: + new_sqltype = registry._resolve_type(check_type) + if new_sqltype is not None: + break + else: + if isinstance(our_type, TypeEngine) or ( + isinstance(our_type, type) + and issubclass(our_type, TypeEngine) + ): + raise sa_exc.ArgumentError( + f"The type provided inside the {self.column.key!r} " + "attribute Mapped annotation is the SQLAlchemy type " + f"{our_type}. Expected a Python type instead" + ) + else: + raise sa_exc.ArgumentError( + "Could not locate SQLAlchemy Core type for Python " + f"type {our_type} inside the {self.column.key!r} " + "attribute Mapped annotation" + ) + + self.column._set_type(new_sqltype) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py new file mode 100644 index 00000000..5c1a45b5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py @@ -0,0 +1,3396 @@ +# orm/query.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""The Query class and support. + +Defines the :class:`_query.Query` class, the central +construct used by the ORM to construct database queries. + +The :class:`_query.Query` class should not be confused with the +:class:`_expression.Select` class, which defines database +SELECT operations at the SQL (non-ORM) level. ``Query`` differs from +``Select`` in that it returns ORM-mapped objects and interacts with an +ORM session, whereas the ``Select`` construct interacts directly with the +database to return iterable result sets. + +""" +from __future__ import annotations + +import collections.abc as collections_abc +import operator +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import attributes +from . import interfaces +from . import loading +from . import util as orm_util +from ._typing import _O +from .base import _assertions +from .context import _column_descriptions +from .context import _determine_last_joined_entity +from .context import _legacy_filter_by_entity_zero +from .context import FromStatement +from .context import ORMCompileState +from .context import QueryContext +from .interfaces import ORMColumnDescription +from .interfaces import ORMColumnsClauseRole +from .util import AliasedClass +from .util import object_mapper +from .util import with_parent +from .. import exc as sa_exc +from .. import inspect +from .. import inspection +from .. import log +from .. import sql +from .. import util +from ..engine import Result +from ..engine import Row +from ..event import dispatcher +from ..event import EventTarget +from ..sql import coercions +from ..sql import expression +from ..sql import roles +from ..sql import Select +from ..sql import util as sql_util +from ..sql import visitors +from ..sql._typing import _FromClauseArgument +from ..sql._typing import _TP +from ..sql.annotation import SupportsCloneAnnotations +from ..sql.base import _entity_namespace_key +from ..sql.base import _generative +from ..sql.base import _NoArg +from ..sql.base import Executable +from ..sql.base import Generative +from ..sql.elements import BooleanClauseList +from ..sql.expression import Exists +from ..sql.selectable import _MemoizedSelectEntities +from ..sql.selectable import _SelectFromElements +from ..sql.selectable import ForUpdateArg +from ..sql.selectable import HasHints +from ..sql.selectable import HasPrefixes +from ..sql.selectable import HasSuffixes +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from ..sql.selectable import SelectLabelStyle +from ..util.typing import Literal +from ..util.typing import Self + + +if TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _ExternalEntityType + from ._typing import _InternalEntityType + from ._typing import SynchronizeSessionArgument + from .mapper import Mapper + from .path_registry import PathRegistry + from .session import _PKIdentityArgument + from .session import Session + from .state import InstanceState + from ..engine.cursor import CursorResult + from ..engine.interfaces import _ImmutableExecuteOptions + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import IsolationLevel + from ..engine.interfaces import SchemaTranslateMapType + from ..engine.result import FrozenResult + from ..engine.result import ScalarResult + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _ColumnExpressionOrStrLabelArgument + from ..sql._typing import _ColumnsClauseArgument + from ..sql._typing import _DMLColumnArgument + from ..sql._typing import _JoinTargetArgument + from ..sql._typing import _LimitOffsetType + from ..sql._typing import _MAYBE_ENTITY + from ..sql._typing import _no_kw + from ..sql._typing import _NOT_ENTITY + from ..sql._typing import _OnClauseArgument + from ..sql._typing import _PropagateAttrsType + from ..sql._typing import _T0 + from ..sql._typing import _T1 + from ..sql._typing import _T2 + from ..sql._typing import _T3 + from ..sql._typing import _T4 + from ..sql._typing import _T5 + from ..sql._typing import _T6 + from ..sql._typing import _T7 + from ..sql._typing import _TypedColumnClauseArgument as _TCCA + from ..sql.base import CacheableOptions + from ..sql.base import ExecutableOption + from ..sql.elements import ColumnElement + from ..sql.elements import Label + from ..sql.selectable import _ForUpdateOfArgument + from ..sql.selectable import _JoinTargetElement + from ..sql.selectable import _SetupJoinsElement + from ..sql.selectable import Alias + from ..sql.selectable import CTE + from ..sql.selectable import ExecutableReturnsRows + from ..sql.selectable import FromClause + from ..sql.selectable import ScalarSelect + from ..sql.selectable import Subquery + + +__all__ = ["Query", "QueryContext"] + +_T = TypeVar("_T", bound=Any) + + +@inspection._self_inspects +@log.class_logger +class Query( + _SelectFromElements, + SupportsCloneAnnotations, + HasPrefixes, + HasSuffixes, + HasHints, + EventTarget, + log.Identified, + Generative, + Executable, + Generic[_T], +): + """ORM-level SQL construction object. + + .. legacy:: The ORM :class:`.Query` object is a legacy construct + as of SQLAlchemy 2.0. See the notes at the top of + :ref:`query_api_toplevel` for an overview, including links to migration + documentation. + + :class:`_query.Query` objects are normally initially generated using the + :meth:`~.Session.query` method of :class:`.Session`, and in + less common cases by instantiating the :class:`_query.Query` directly and + associating with a :class:`.Session` using the + :meth:`_query.Query.with_session` + method. + + """ + + # elements that are in Core and can be cached in the same way + _where_criteria: Tuple[ColumnElement[Any], ...] = () + _having_criteria: Tuple[ColumnElement[Any], ...] = () + + _order_by_clauses: Tuple[ColumnElement[Any], ...] = () + _group_by_clauses: Tuple[ColumnElement[Any], ...] = () + _limit_clause: Optional[ColumnElement[Any]] = None + _offset_clause: Optional[ColumnElement[Any]] = None + + _distinct: bool = False + _distinct_on: Tuple[ColumnElement[Any], ...] = () + + _for_update_arg: Optional[ForUpdateArg] = None + _correlate: Tuple[FromClause, ...] = () + _auto_correlate: bool = True + _from_obj: Tuple[FromClause, ...] = () + _setup_joins: Tuple[_SetupJoinsElement, ...] = () + + _label_style: SelectLabelStyle = SelectLabelStyle.LABEL_STYLE_LEGACY_ORM + + _memoized_select_entities = () + + _compile_options: Union[Type[CacheableOptions], CacheableOptions] = ( + ORMCompileState.default_compile_options + ) + + _with_options: Tuple[ExecutableOption, ...] + load_options = QueryContext.default_load_options + { + "_legacy_uniquing": True + } + + _params: util.immutabledict[str, Any] = util.EMPTY_DICT + + # local Query builder state, not needed for + # compilation or execution + _enable_assertions = True + + _statement: Optional[ExecutableReturnsRows] = None + + session: Session + + dispatch: dispatcher[Query[_T]] + + # mirrors that of ClauseElement, used to propagate the "orm" + # plugin as well as the "subject" of the plugin, e.g. the mapper + # we are querying against. + @util.memoized_property + def _propagate_attrs(self) -> _PropagateAttrsType: + return util.EMPTY_DICT + + def __init__( + self, + entities: Union[ + _ColumnsClauseArgument[Any], Sequence[_ColumnsClauseArgument[Any]] + ], + session: Optional[Session] = None, + ): + """Construct a :class:`_query.Query` directly. + + E.g.:: + + q = Query([User, Address], session=some_session) + + The above is equivalent to:: + + q = some_session.query(User, Address) + + :param entities: a sequence of entities and/or SQL expressions. + + :param session: a :class:`.Session` with which the + :class:`_query.Query` + will be associated. Optional; a :class:`_query.Query` + can be associated + with a :class:`.Session` generatively via the + :meth:`_query.Query.with_session` method as well. + + .. seealso:: + + :meth:`.Session.query` + + :meth:`_query.Query.with_session` + + """ + + # session is usually present. There's one case in subqueryloader + # where it stores a Query without a Session and also there are tests + # for the query(Entity).with_session(session) API which is likely in + # some old recipes, however these are legacy as select() can now be + # used. + self.session = session # type: ignore + self._set_entities(entities) + + def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self: + self._propagate_attrs = util.immutabledict(values) + return self + + def _set_entities( + self, + entities: Union[ + _ColumnsClauseArgument[Any], Iterable[_ColumnsClauseArgument[Any]] + ], + ) -> None: + self._raw_columns = [ + coercions.expect( + roles.ColumnsClauseRole, + ent, + apply_propagate_attrs=self, + post_inspect=True, + ) + for ent in util.to_list(entities) + ] + + def tuples(self: Query[_O]) -> Query[Tuple[_O]]: + """return a tuple-typed form of this :class:`.Query`. + + This method invokes the :meth:`.Query.only_return_tuples` + method with a value of ``True``, which by itself ensures that this + :class:`.Query` will always return :class:`.Row` objects, even + if the query is made against a single entity. It then also + at the typing level will return a "typed" query, if possible, + that will type result rows as ``Tuple`` objects with typed + elements. + + This method can be compared to the :meth:`.Result.tuples` method, + which returns "self", but from a typing perspective returns an object + that will yield typed ``Tuple`` objects for results. Typing + takes effect only if this :class:`.Query` object is a typed + query object already. + + .. versionadded:: 2.0 + + .. seealso:: + + :meth:`.Result.tuples` - v2 equivalent method. + + """ + return self.only_return_tuples(True) # type: ignore + + def _entity_from_pre_ent_zero(self) -> Optional[_InternalEntityType[Any]]: + if not self._raw_columns: + return None + + ent = self._raw_columns[0] + + if "parententity" in ent._annotations: + return ent._annotations["parententity"] # type: ignore + elif "bundle" in ent._annotations: + return ent._annotations["bundle"] # type: ignore + else: + # label, other SQL expression + for element in visitors.iterate(ent): + if "parententity" in element._annotations: + return element._annotations["parententity"] # type: ignore # noqa: E501 + else: + return None + + def _only_full_mapper_zero(self, methname: str) -> Mapper[Any]: + if ( + len(self._raw_columns) != 1 + or "parententity" not in self._raw_columns[0]._annotations + or not self._raw_columns[0].is_selectable + ): + raise sa_exc.InvalidRequestError( + "%s() can only be used against " + "a single mapped class." % methname + ) + + return self._raw_columns[0]._annotations["parententity"] # type: ignore # noqa: E501 + + def _set_select_from( + self, obj: Iterable[_FromClauseArgument], set_base_alias: bool + ) -> None: + fa = [ + coercions.expect( + roles.StrictFromClauseRole, + elem, + allow_select=True, + apply_propagate_attrs=self, + ) + for elem in obj + ] + + self._compile_options += {"_set_base_alias": set_base_alias} + self._from_obj = tuple(fa) + + @_generative + def _set_lazyload_from(self, state: InstanceState[Any]) -> Self: + self.load_options += {"_lazy_loaded_from": state} + return self + + def _get_condition(self) -> None: + """used by legacy BakedQuery""" + self._no_criterion_condition("get", order_by=False, distinct=False) + + def _get_existing_condition(self) -> None: + self._no_criterion_assertion("get", order_by=False, distinct=False) + + def _no_criterion_assertion( + self, meth: str, order_by: bool = True, distinct: bool = True + ) -> None: + if not self._enable_assertions: + return + if ( + self._where_criteria + or self._statement is not None + or self._from_obj + or self._setup_joins + or self._limit_clause is not None + or self._offset_clause is not None + or self._group_by_clauses + or (order_by and self._order_by_clauses) + or (distinct and self._distinct) + ): + raise sa_exc.InvalidRequestError( + "Query.%s() being called on a " + "Query with existing criterion. " % meth + ) + + def _no_criterion_condition( + self, meth: str, order_by: bool = True, distinct: bool = True + ) -> None: + self._no_criterion_assertion(meth, order_by, distinct) + + self._from_obj = self._setup_joins = () + if self._statement is not None: + self._compile_options += {"_statement": None} + self._where_criteria = () + self._distinct = False + + self._order_by_clauses = self._group_by_clauses = () + + def _no_clauseelement_condition(self, meth: str) -> None: + if not self._enable_assertions: + return + if self._order_by_clauses: + raise sa_exc.InvalidRequestError( + "Query.%s() being called on a " + "Query with existing criterion. " % meth + ) + self._no_criterion_condition(meth) + + def _no_statement_condition(self, meth: str) -> None: + if not self._enable_assertions: + return + if self._statement is not None: + raise sa_exc.InvalidRequestError( + ( + "Query.%s() being called on a Query with an existing full " + "statement - can't apply criterion." + ) + % meth + ) + + def _no_limit_offset(self, meth: str) -> None: + if not self._enable_assertions: + return + if self._limit_clause is not None or self._offset_clause is not None: + raise sa_exc.InvalidRequestError( + "Query.%s() being called on a Query which already has LIMIT " + "or OFFSET applied. Call %s() before limit() or offset() " + "are applied." % (meth, meth) + ) + + @property + def _has_row_limiting_clause(self) -> bool: + return ( + self._limit_clause is not None or self._offset_clause is not None + ) + + def _get_options( + self, + populate_existing: Optional[bool] = None, + version_check: Optional[bool] = None, + only_load_props: Optional[Sequence[str]] = None, + refresh_state: Optional[InstanceState[Any]] = None, + identity_token: Optional[Any] = None, + ) -> Self: + load_options: Dict[str, Any] = {} + compile_options: Dict[str, Any] = {} + + if version_check: + load_options["_version_check"] = version_check + if populate_existing: + load_options["_populate_existing"] = populate_existing + if refresh_state: + load_options["_refresh_state"] = refresh_state + compile_options["_for_refresh_state"] = True + if only_load_props: + compile_options["_only_load_props"] = frozenset(only_load_props) + if identity_token: + load_options["_identity_token"] = identity_token + + if load_options: + self.load_options += load_options + if compile_options: + self._compile_options += compile_options + + return self + + def _clone(self, **kw: Any) -> Self: + return self._generate() + + def _get_select_statement_only(self) -> Select[_T]: + if self._statement is not None: + raise sa_exc.InvalidRequestError( + "Can't call this method on a Query that uses from_statement()" + ) + return cast("Select[_T]", self.statement) + + @property + def statement(self) -> Union[Select[_T], FromStatement[_T]]: + """The full SELECT statement represented by this Query. + + The statement by default will not have disambiguating labels + applied to the construct unless with_labels(True) is called + first. + + """ + + # .statement can return the direct future.Select() construct here, as + # long as we are not using subsequent adaption features that + # are made against raw entities, e.g. from_self(), with_polymorphic(), + # select_entity_from(). If these features are being used, then + # the Select() we return will not have the correct .selected_columns + # collection and will not embed in subsequent queries correctly. + # We could find a way to make this collection "correct", however + # this would not be too different from doing the full compile as + # we are doing in any case, the Select() would still not have the + # proper state for other attributes like whereclause, order_by, + # and these features are all deprecated in any case. + # + # for these reasons, Query is not a Select, it remains an ORM + # object for which __clause_element__() must be called in order for + # it to provide a real expression object. + # + # from there, it starts to look much like Query itself won't be + # passed into the execute process and won't generate its own cache + # key; this will all occur in terms of the ORM-enabled Select. + if not self._compile_options._set_base_alias: + # if we don't have legacy top level aliasing features in use + # then convert to a future select() directly + stmt = self._statement_20(for_statement=True) + else: + stmt = self._compile_state(for_statement=True).statement + + if self._params: + stmt = stmt.params(self._params) + + return stmt + + def _final_statement(self, legacy_query_style: bool = True) -> Select[Any]: + """Return the 'final' SELECT statement for this :class:`.Query`. + + This is used by the testing suite only and is fairly inefficient. + + This is the Core-only select() that will be rendered by a complete + compilation of this query, and is what .statement used to return + in 1.3. + + + """ + + q = self._clone() + + return q._compile_state( + use_legacy_query_style=legacy_query_style + ).statement # type: ignore + + def _statement_20( + self, for_statement: bool = False, use_legacy_query_style: bool = True + ) -> Union[Select[_T], FromStatement[_T]]: + # TODO: this event needs to be deprecated, as it currently applies + # only to ORM query and occurs at this spot that is now more + # or less an artificial spot + if self.dispatch.before_compile: + for fn in self.dispatch.before_compile: + new_query = fn(self) + if new_query is not None and new_query is not self: + self = new_query + if not fn._bake_ok: # type: ignore + self._compile_options += {"_bake_ok": False} + + compile_options = self._compile_options + compile_options += { + "_for_statement": for_statement, + "_use_legacy_query_style": use_legacy_query_style, + } + + stmt: Union[Select[_T], FromStatement[_T]] + + if self._statement is not None: + stmt = FromStatement(self._raw_columns, self._statement) + stmt.__dict__.update( + _with_options=self._with_options, + _with_context_options=self._with_context_options, + _compile_options=compile_options, + _execution_options=self._execution_options, + _propagate_attrs=self._propagate_attrs, + ) + else: + # Query / select() internal attributes are 99% cross-compatible + stmt = Select._create_raw_select(**self.__dict__) + stmt.__dict__.update( + _label_style=self._label_style, + _compile_options=compile_options, + _propagate_attrs=self._propagate_attrs, + ) + stmt.__dict__.pop("session", None) + + # ensure the ORM context is used to compile the statement, even + # if it has no ORM entities. This is so ORM-only things like + # _legacy_joins are picked up that wouldn't be picked up by the + # Core statement context + if "compile_state_plugin" not in stmt._propagate_attrs: + stmt._propagate_attrs = stmt._propagate_attrs.union( + {"compile_state_plugin": "orm", "plugin_subject": None} + ) + + return stmt + + def subquery( + self, + name: Optional[str] = None, + with_labels: bool = False, + reduce_columns: bool = False, + ) -> Subquery: + """Return the full SELECT statement represented by + this :class:`_query.Query`, embedded within an + :class:`_expression.Alias`. + + Eager JOIN generation within the query is disabled. + + .. seealso:: + + :meth:`_sql.Select.subquery` - v2 comparable method. + + :param name: string name to be assigned as the alias; + this is passed through to :meth:`_expression.FromClause.alias`. + If ``None``, a name will be deterministically generated + at compile time. + + :param with_labels: if True, :meth:`.with_labels` will be called + on the :class:`_query.Query` first to apply table-qualified labels + to all columns. + + :param reduce_columns: if True, + :meth:`_expression.Select.reduce_columns` will + be called on the resulting :func:`_expression.select` construct, + to remove same-named columns where one also refers to the other + via foreign key or WHERE clause equivalence. + + """ + q = self.enable_eagerloads(False) + if with_labels: + q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + + stmt = q._get_select_statement_only() + + if TYPE_CHECKING: + assert isinstance(stmt, Select) + + if reduce_columns: + stmt = stmt.reduce_columns() + return stmt.subquery(name=name) + + def cte( + self, + name: Optional[str] = None, + recursive: bool = False, + nesting: bool = False, + ) -> CTE: + r"""Return the full SELECT statement represented by this + :class:`_query.Query` represented as a common table expression (CTE). + + Parameters and usage are the same as those of the + :meth:`_expression.SelectBase.cte` method; see that method for + further details. + + Here is the `PostgreSQL WITH + RECURSIVE example + `_. + Note that, in this example, the ``included_parts`` cte and the + ``incl_alias`` alias of it are Core selectables, which + means the columns are accessed via the ``.c.`` attribute. The + ``parts_alias`` object is an :func:`_orm.aliased` instance of the + ``Part`` entity, so column-mapped attributes are available + directly:: + + from sqlalchemy.orm import aliased + + class Part(Base): + __tablename__ = 'part' + part = Column(String, primary_key=True) + sub_part = Column(String, primary_key=True) + quantity = Column(Integer) + + included_parts = session.query( + Part.sub_part, + Part.part, + Part.quantity).\ + filter(Part.part=="our part").\ + cte(name="included_parts", recursive=True) + + incl_alias = aliased(included_parts, name="pr") + parts_alias = aliased(Part, name="p") + included_parts = included_parts.union_all( + session.query( + parts_alias.sub_part, + parts_alias.part, + parts_alias.quantity).\ + filter(parts_alias.part==incl_alias.c.sub_part) + ) + + q = session.query( + included_parts.c.sub_part, + func.sum(included_parts.c.quantity). + label('total_quantity') + ).\ + group_by(included_parts.c.sub_part) + + .. seealso:: + + :meth:`_sql.Select.cte` - v2 equivalent method. + + """ + return ( + self.enable_eagerloads(False) + ._get_select_statement_only() + .cte(name=name, recursive=recursive, nesting=nesting) + ) + + def label(self, name: Optional[str]) -> Label[Any]: + """Return the full SELECT statement represented by this + :class:`_query.Query`, converted + to a scalar subquery with a label of the given name. + + .. seealso:: + + :meth:`_sql.Select.label` - v2 comparable method. + + """ + + return ( + self.enable_eagerloads(False) + ._get_select_statement_only() + .label(name) + ) + + @overload + def as_scalar( # type: ignore[overload-overlap] + self: Query[Tuple[_MAYBE_ENTITY]], + ) -> ScalarSelect[_MAYBE_ENTITY]: ... + + @overload + def as_scalar( + self: Query[Tuple[_NOT_ENTITY]], + ) -> ScalarSelect[_NOT_ENTITY]: ... + + @overload + def as_scalar(self) -> ScalarSelect[Any]: ... + + @util.deprecated( + "1.4", + "The :meth:`_query.Query.as_scalar` method is deprecated and will be " + "removed in a future release. Please refer to " + ":meth:`_query.Query.scalar_subquery`.", + ) + def as_scalar(self) -> ScalarSelect[Any]: + """Return the full SELECT statement represented by this + :class:`_query.Query`, converted to a scalar subquery. + + """ + return self.scalar_subquery() + + @overload + def scalar_subquery( + self: Query[Tuple[_MAYBE_ENTITY]], + ) -> ScalarSelect[Any]: ... + + @overload + def scalar_subquery( + self: Query[Tuple[_NOT_ENTITY]], + ) -> ScalarSelect[_NOT_ENTITY]: ... + + @overload + def scalar_subquery(self) -> ScalarSelect[Any]: ... + + def scalar_subquery(self) -> ScalarSelect[Any]: + """Return the full SELECT statement represented by this + :class:`_query.Query`, converted to a scalar subquery. + + Analogous to + :meth:`sqlalchemy.sql.expression.SelectBase.scalar_subquery`. + + .. versionchanged:: 1.4 The :meth:`_query.Query.scalar_subquery` + method replaces the :meth:`_query.Query.as_scalar` method. + + .. seealso:: + + :meth:`_sql.Select.scalar_subquery` - v2 comparable method. + + """ + + return ( + self.enable_eagerloads(False) + ._get_select_statement_only() + .scalar_subquery() + ) + + @property + def selectable(self) -> Union[Select[_T], FromStatement[_T]]: + """Return the :class:`_expression.Select` object emitted by this + :class:`_query.Query`. + + Used for :func:`_sa.inspect` compatibility, this is equivalent to:: + + query.enable_eagerloads(False).with_labels().statement + + """ + return self.__clause_element__() + + def __clause_element__(self) -> Union[Select[_T], FromStatement[_T]]: + return ( + self._with_compile_options( + _enable_eagerloads=False, _render_for_subquery=True + ) + .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + .statement + ) + + @overload + def only_return_tuples( + self: Query[_O], value: Literal[True] + ) -> RowReturningQuery[Tuple[_O]]: ... + + @overload + def only_return_tuples( + self: Query[_O], value: Literal[False] + ) -> Query[_O]: ... + + @_generative + def only_return_tuples(self, value: bool) -> Query[Any]: + """When set to True, the query results will always be a + :class:`.Row` object. + + This can change a query that normally returns a single entity + as a scalar to return a :class:`.Row` result in all cases. + + .. seealso:: + + :meth:`.Query.tuples` - returns tuples, but also at the typing + level will type results as ``Tuple``. + + :meth:`_query.Query.is_single_entity` + + :meth:`_engine.Result.tuples` - v2 comparable method. + + """ + self.load_options += dict(_only_return_tuples=value) + return self + + @property + def is_single_entity(self) -> bool: + """Indicates if this :class:`_query.Query` + returns tuples or single entities. + + Returns True if this query returns a single entity for each instance + in its result list, and False if this query returns a tuple of entities + for each result. + + .. versionadded:: 1.3.11 + + .. seealso:: + + :meth:`_query.Query.only_return_tuples` + + """ + return ( + not self.load_options._only_return_tuples + and len(self._raw_columns) == 1 + and "parententity" in self._raw_columns[0]._annotations + and isinstance( + self._raw_columns[0]._annotations["parententity"], + ORMColumnsClauseRole, + ) + ) + + @_generative + def enable_eagerloads(self, value: bool) -> Self: + """Control whether or not eager joins and subqueries are + rendered. + + When set to False, the returned Query will not render + eager joins regardless of :func:`~sqlalchemy.orm.joinedload`, + :func:`~sqlalchemy.orm.subqueryload` options + or mapper-level ``lazy='joined'``/``lazy='subquery'`` + configurations. + + This is used primarily when nesting the Query's + statement into a subquery or other + selectable, or when using :meth:`_query.Query.yield_per`. + + """ + self._compile_options += {"_enable_eagerloads": value} + return self + + @_generative + def _with_compile_options(self, **opt: Any) -> Self: + self._compile_options += opt + return self + + @util.became_legacy_20( + ":meth:`_orm.Query.with_labels` and :meth:`_orm.Query.apply_labels`", + alternative="Use set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) " + "instead.", + ) + def with_labels(self) -> Self: + return self.set_label_style( + SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL + ) + + apply_labels = with_labels + + @property + def get_label_style(self) -> SelectLabelStyle: + """ + Retrieve the current label style. + + .. versionadded:: 1.4 + + .. seealso:: + + :meth:`_sql.Select.get_label_style` - v2 equivalent method. + + """ + return self._label_style + + def set_label_style(self, style: SelectLabelStyle) -> Self: + """Apply column labels to the return value of Query.statement. + + Indicates that this Query's `statement` accessor should return + a SELECT statement that applies labels to all columns in the + form _; this is commonly used to + disambiguate columns from multiple tables which have the same + name. + + When the `Query` actually issues SQL to load rows, it always + uses column labeling. + + .. note:: The :meth:`_query.Query.set_label_style` method *only* applies + the output of :attr:`_query.Query.statement`, and *not* to any of + the result-row invoking systems of :class:`_query.Query` itself, + e.g. + :meth:`_query.Query.first`, :meth:`_query.Query.all`, etc. + To execute + a query using :meth:`_query.Query.set_label_style`, invoke the + :attr:`_query.Query.statement` using :meth:`.Session.execute`:: + + result = session.execute( + query + .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + .statement + ) + + .. versionadded:: 1.4 + + + .. seealso:: + + :meth:`_sql.Select.set_label_style` - v2 equivalent method. + + """ # noqa + if self._label_style is not style: + self = self._generate() + self._label_style = style + return self + + @_generative + def enable_assertions(self, value: bool) -> Self: + """Control whether assertions are generated. + + When set to False, the returned Query will + not assert its state before certain operations, + including that LIMIT/OFFSET has not been applied + when filter() is called, no criterion exists + when get() is called, and no "from_statement()" + exists when filter()/order_by()/group_by() etc. + is called. This more permissive mode is used by + custom Query subclasses to specify criterion or + other modifiers outside of the usual usage patterns. + + Care should be taken to ensure that the usage + pattern is even possible. A statement applied + by from_statement() will override any criterion + set by filter() or order_by(), for example. + + """ + self._enable_assertions = value + return self + + @property + def whereclause(self) -> Optional[ColumnElement[bool]]: + """A readonly attribute which returns the current WHERE criterion for + this Query. + + This returned value is a SQL expression construct, or ``None`` if no + criterion has been established. + + .. seealso:: + + :attr:`_sql.Select.whereclause` - v2 equivalent property. + + """ + return BooleanClauseList._construct_for_whereclause( + self._where_criteria + ) + + @_generative + def _with_current_path(self, path: PathRegistry) -> Self: + """indicate that this query applies to objects loaded + within a certain path. + + Used by deferred loaders (see strategies.py) which transfer + query options from an originating query to a newly generated + query intended for the deferred load. + + """ + self._compile_options += {"_current_path": path} + return self + + @_generative + def yield_per(self, count: int) -> Self: + r"""Yield only ``count`` rows at a time. + + The purpose of this method is when fetching very large result sets + (> 10K rows), to batch results in sub-collections and yield them + out partially, so that the Python interpreter doesn't need to declare + very large areas of memory which is both time consuming and leads + to excessive memory use. The performance from fetching hundreds of + thousands of rows can often double when a suitable yield-per setting + (e.g. approximately 1000) is used, even with DBAPIs that buffer + rows (which are most). + + As of SQLAlchemy 1.4, the :meth:`_orm.Query.yield_per` method is + equivalent to using the ``yield_per`` execution option at the ORM + level. See the section :ref:`orm_queryguide_yield_per` for further + background on this option. + + .. seealso:: + + :ref:`orm_queryguide_yield_per` + + """ + self.load_options += {"_yield_per": count} + return self + + @util.became_legacy_20( + ":meth:`_orm.Query.get`", + alternative="The method is now available as :meth:`_orm.Session.get`", + ) + def get(self, ident: _PKIdentityArgument) -> Optional[Any]: + """Return an instance based on the given primary key identifier, + or ``None`` if not found. + + E.g.:: + + my_user = session.query(User).get(5) + + some_object = session.query(VersionedFoo).get((5, 10)) + + some_object = session.query(VersionedFoo).get( + {"id": 5, "version_id": 10}) + + :meth:`_query.Query.get` is special in that it provides direct + access to the identity map of the owning :class:`.Session`. + If the given primary key identifier is present + in the local identity map, the object is returned + directly from this collection and no SQL is emitted, + unless the object has been marked fully expired. + If not present, + a SELECT is performed in order to locate the object. + + :meth:`_query.Query.get` also will perform a check if + the object is present in the identity map and + marked as expired - a SELECT + is emitted to refresh the object as well as to + ensure that the row is still present. + If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + :meth:`_query.Query.get` is only used to return a single + mapped instance, not multiple instances or + individual column constructs, and strictly + on a single primary key value. The originating + :class:`_query.Query` must be constructed in this way, + i.e. against a single mapped entity, + with no additional filtering criterion. Loading + options via :meth:`_query.Query.options` may be applied + however, and will be used if the object is not + yet locally present. + + :param ident: A scalar, tuple, or dictionary representing the + primary key. For a composite (e.g. multiple column) primary key, + a tuple or dictionary should be passed. + + For a single-column primary key, the scalar calling form is typically + the most expedient. If the primary key of a row is the value "5", + the call looks like:: + + my_object = query.get(5) + + The tuple form contains primary key values typically in + the order in which they correspond to the mapped + :class:`_schema.Table` + object's primary key columns, or if the + :paramref:`_orm.Mapper.primary_key` configuration parameter were + used, in + the order used for that parameter. For example, if the primary key + of a row is represented by the integer + digits "5, 10" the call would look like:: + + my_object = query.get((5, 10)) + + The dictionary form should include as keys the mapped attribute names + corresponding to each element of the primary key. If the mapped class + has the attributes ``id``, ``version_id`` as the attributes which + store the object's primary key value, the call would look like:: + + my_object = query.get({"id": 5, "version_id": 10}) + + .. versionadded:: 1.3 the :meth:`_query.Query.get` + method now optionally + accepts a dictionary of attribute names to values in order to + indicate a primary key identifier. + + + :return: The object instance, or ``None``. + + """ + self._no_criterion_assertion("get", order_by=False, distinct=False) + + # we still implement _get_impl() so that baked query can override + # it + return self._get_impl(ident, loading.load_on_pk_identity) + + def _get_impl( + self, + primary_key_identity: _PKIdentityArgument, + db_load_fn: Callable[..., Any], + identity_token: Optional[Any] = None, + ) -> Optional[Any]: + mapper = self._only_full_mapper_zero("get") + return self.session._get_impl( + mapper, + primary_key_identity, + db_load_fn, + populate_existing=self.load_options._populate_existing, + with_for_update=self._for_update_arg, + options=self._with_options, + identity_token=identity_token, + execution_options=self._execution_options, + ) + + @property + def lazy_loaded_from(self) -> Optional[InstanceState[Any]]: + """An :class:`.InstanceState` that is using this :class:`_query.Query` + for a lazy load operation. + + .. deprecated:: 1.4 This attribute should be viewed via the + :attr:`.ORMExecuteState.lazy_loaded_from` attribute, within + the context of the :meth:`.SessionEvents.do_orm_execute` + event. + + .. seealso:: + + :attr:`.ORMExecuteState.lazy_loaded_from` + + """ + return self.load_options._lazy_loaded_from # type: ignore + + @property + def _current_path(self) -> PathRegistry: + return self._compile_options._current_path # type: ignore + + @_generative + def correlate( + self, + *fromclauses: Union[Literal[None, False], _FromClauseArgument], + ) -> Self: + """Return a :class:`.Query` construct which will correlate the given + FROM clauses to that of an enclosing :class:`.Query` or + :func:`~.expression.select`. + + The method here accepts mapped classes, :func:`.aliased` constructs, + and :class:`_orm.Mapper` constructs as arguments, which are resolved + into expression constructs, in addition to appropriate expression + constructs. + + The correlation arguments are ultimately passed to + :meth:`_expression.Select.correlate` + after coercion to expression constructs. + + The correlation arguments take effect in such cases + as when :meth:`_query.Query.from_self` is used, or when + a subquery as returned by :meth:`_query.Query.subquery` is + embedded in another :func:`_expression.select` construct. + + .. seealso:: + + :meth:`_sql.Select.correlate` - v2 equivalent method. + + """ + + self._auto_correlate = False + if fromclauses and fromclauses[0] in {None, False}: + self._correlate = () + else: + self._correlate = self._correlate + tuple( + coercions.expect(roles.FromClauseRole, f) for f in fromclauses + ) + return self + + @_generative + def autoflush(self, setting: bool) -> Self: + """Return a Query with a specific 'autoflush' setting. + + As of SQLAlchemy 1.4, the :meth:`_orm.Query.autoflush` method + is equivalent to using the ``autoflush`` execution option at the + ORM level. See the section :ref:`orm_queryguide_autoflush` for + further background on this option. + + """ + self.load_options += {"_autoflush": setting} + return self + + @_generative + def populate_existing(self) -> Self: + """Return a :class:`_query.Query` + that will expire and refresh all instances + as they are loaded, or reused from the current :class:`.Session`. + + As of SQLAlchemy 1.4, the :meth:`_orm.Query.populate_existing` method + is equivalent to using the ``populate_existing`` execution option at + the ORM level. See the section :ref:`orm_queryguide_populate_existing` + for further background on this option. + + """ + self.load_options += {"_populate_existing": True} + return self + + @_generative + def _with_invoke_all_eagers(self, value: bool) -> Self: + """Set the 'invoke all eagers' flag which causes joined- and + subquery loaders to traverse into already-loaded related objects + and collections. + + Default is that of :attr:`_query.Query._invoke_all_eagers`. + + """ + self.load_options += {"_invoke_all_eagers": value} + return self + + @util.became_legacy_20( + ":meth:`_orm.Query.with_parent`", + alternative="Use the :func:`_orm.with_parent` standalone construct.", + ) + @util.preload_module("sqlalchemy.orm.relationships") + def with_parent( + self, + instance: object, + property: Optional[ # noqa: A002 + attributes.QueryableAttribute[Any] + ] = None, + from_entity: Optional[_ExternalEntityType[Any]] = None, + ) -> Self: + """Add filtering criterion that relates the given instance + to a child object or collection, using its attribute state + as well as an established :func:`_orm.relationship()` + configuration. + + The method uses the :func:`.with_parent` function to generate + the clause, the result of which is passed to + :meth:`_query.Query.filter`. + + Parameters are the same as :func:`.with_parent`, with the exception + that the given property can be None, in which case a search is + performed against this :class:`_query.Query` object's target mapper. + + :param instance: + An instance which has some :func:`_orm.relationship`. + + :param property: + Class bound attribute which indicates + what relationship from the instance should be used to reconcile the + parent/child relationship. + + :param from_entity: + Entity in which to consider as the left side. This defaults to the + "zero" entity of the :class:`_query.Query` itself. + + """ + relationships = util.preloaded.orm_relationships + + if from_entity: + entity_zero = inspect(from_entity) + else: + entity_zero = _legacy_filter_by_entity_zero(self) + if property is None: + # TODO: deprecate, property has to be supplied + mapper = object_mapper(instance) + + for prop in mapper.iterate_properties: + if ( + isinstance(prop, relationships.RelationshipProperty) + and prop.mapper is entity_zero.mapper # type: ignore + ): + property = prop # type: ignore # noqa: A001 + break + else: + raise sa_exc.InvalidRequestError( + "Could not locate a property which relates instances " + "of class '%s' to instances of class '%s'" + % ( + entity_zero.mapper.class_.__name__, # type: ignore + instance.__class__.__name__, + ) + ) + + return self.filter( + with_parent( + instance, + property, # type: ignore + entity_zero.entity, # type: ignore + ) + ) + + @_generative + def add_entity( + self, + entity: _EntityType[Any], + alias: Optional[Union[Alias, Subquery]] = None, + ) -> Query[Any]: + """add a mapped entity to the list of result columns + to be returned. + + .. seealso:: + + :meth:`_sql.Select.add_columns` - v2 comparable method. + """ + + if alias is not None: + # TODO: deprecate + entity = AliasedClass(entity, alias) + + self._raw_columns = list(self._raw_columns) + + self._raw_columns.append( + coercions.expect( + roles.ColumnsClauseRole, entity, apply_propagate_attrs=self + ) + ) + return self + + @_generative + def with_session(self, session: Session) -> Self: + """Return a :class:`_query.Query` that will use the given + :class:`.Session`. + + While the :class:`_query.Query` + object is normally instantiated using the + :meth:`.Session.query` method, it is legal to build the + :class:`_query.Query` + directly without necessarily using a :class:`.Session`. Such a + :class:`_query.Query` object, or any :class:`_query.Query` + already associated + with a different :class:`.Session`, can produce a new + :class:`_query.Query` + object associated with a target session using this method:: + + from sqlalchemy.orm import Query + + query = Query([MyClass]).filter(MyClass.id == 5) + + result = query.with_session(my_session).one() + + """ + + self.session = session + return self + + def _legacy_from_self( + self, *entities: _ColumnsClauseArgument[Any] + ) -> Self: + # used for query.count() as well as for the same + # function in BakedQuery, as well as some old tests in test_baked.py. + + fromclause = ( + self.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + .correlate(None) + .subquery() + ._anonymous_fromclause() + ) + + q = self._from_selectable(fromclause) + + if entities: + q._set_entities(entities) + return q + + @_generative + def _set_enable_single_crit(self, val: bool) -> Self: + self._compile_options += {"_enable_single_crit": val} + return self + + @_generative + def _from_selectable( + self, fromclause: FromClause, set_entity_from: bool = True + ) -> Self: + for attr in ( + "_where_criteria", + "_order_by_clauses", + "_group_by_clauses", + "_limit_clause", + "_offset_clause", + "_last_joined_entity", + "_setup_joins", + "_memoized_select_entities", + "_distinct", + "_distinct_on", + "_having_criteria", + "_prefixes", + "_suffixes", + ): + self.__dict__.pop(attr, None) + self._set_select_from([fromclause], set_entity_from) + self._compile_options += { + "_enable_single_crit": False, + } + + return self + + @util.deprecated( + "1.4", + ":meth:`_query.Query.values` " + "is deprecated and will be removed in a " + "future release. Please use :meth:`_query.Query.with_entities`", + ) + def values(self, *columns: _ColumnsClauseArgument[Any]) -> Iterable[Any]: + """Return an iterator yielding result tuples corresponding + to the given list of columns + + """ + return self._values_no_warn(*columns) + + _values = values + + def _values_no_warn( + self, *columns: _ColumnsClauseArgument[Any] + ) -> Iterable[Any]: + if not columns: + return iter(()) + q = self._clone().enable_eagerloads(False) + q._set_entities(columns) + if not q.load_options._yield_per: + q.load_options += {"_yield_per": 10} + return iter(q) + + @util.deprecated( + "1.4", + ":meth:`_query.Query.value` " + "is deprecated and will be removed in a " + "future release. Please use :meth:`_query.Query.with_entities` " + "in combination with :meth:`_query.Query.scalar`", + ) + def value(self, column: _ColumnExpressionArgument[Any]) -> Any: + """Return a scalar result corresponding to the given + column expression. + + """ + try: + return next(self._values_no_warn(column))[0] # type: ignore + except StopIteration: + return None + + @overload + def with_entities(self, _entity: _EntityType[_O]) -> Query[_O]: ... + + @overload + def with_entities( + self, + _colexpr: roles.TypedColumnsClauseRole[_T], + ) -> RowReturningQuery[Tuple[_T]]: ... + + # START OVERLOADED FUNCTIONS self.with_entities RowReturningQuery 2-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def with_entities( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> RowReturningQuery[Tuple[_T0, _T1]]: ... + + @overload + def with_entities( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ... + + @overload + def with_entities( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def with_entities( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def with_entities( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def with_entities( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def with_entities( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + + # END OVERLOADED FUNCTIONS self.with_entities + + @overload + def with_entities( + self, *entities: _ColumnsClauseArgument[Any] + ) -> Query[Any]: ... + + @_generative + def with_entities( + self, *entities: _ColumnsClauseArgument[Any], **__kw: Any + ) -> Query[Any]: + r"""Return a new :class:`_query.Query` + replacing the SELECT list with the + given entities. + + e.g.:: + + # Users, filtered on some arbitrary criterion + # and then ordered by related email address + q = session.query(User).\ + join(User.address).\ + filter(User.name.like('%ed%')).\ + order_by(Address.email) + + # given *only* User.id==5, Address.email, and 'q', what + # would the *next* User in the result be ? + subq = q.with_entities(Address.email).\ + order_by(None).\ + filter(User.id==5).\ + subquery() + q = q.join((subq, subq.c.email < Address.email)).\ + limit(1) + + .. seealso:: + + :meth:`_sql.Select.with_only_columns` - v2 comparable method. + """ + if __kw: + raise _no_kw() + + # Query has all the same fields as Select for this operation + # this could in theory be based on a protocol but not sure if it's + # worth it + _MemoizedSelectEntities._generate_for_statement(self) # type: ignore + self._set_entities(entities) + return self + + @_generative + def add_columns( + self, *column: _ColumnExpressionArgument[Any] + ) -> Query[Any]: + """Add one or more column expressions to the list + of result columns to be returned. + + .. seealso:: + + :meth:`_sql.Select.add_columns` - v2 comparable method. + """ + + self._raw_columns = list(self._raw_columns) + + self._raw_columns.extend( + coercions.expect( + roles.ColumnsClauseRole, + c, + apply_propagate_attrs=self, + post_inspect=True, + ) + for c in column + ) + return self + + @util.deprecated( + "1.4", + ":meth:`_query.Query.add_column` " + "is deprecated and will be removed in a " + "future release. Please use :meth:`_query.Query.add_columns`", + ) + def add_column(self, column: _ColumnExpressionArgument[Any]) -> Query[Any]: + """Add a column expression to the list of result columns to be + returned. + + """ + return self.add_columns(column) + + @_generative + def options(self, *args: ExecutableOption) -> Self: + """Return a new :class:`_query.Query` object, + applying the given list of + mapper options. + + Most supplied options regard changing how column- and + relationship-mapped attributes are loaded. + + .. seealso:: + + :ref:`loading_columns` + + :ref:`relationship_loader_options` + + """ + + opts = tuple(util.flatten_iterator(args)) + if self._compile_options._current_path: + # opting for lower method overhead for the checks + for opt in opts: + if not opt._is_core and opt._is_legacy_option: # type: ignore + opt.process_query_conditionally(self) # type: ignore + else: + for opt in opts: + if not opt._is_core and opt._is_legacy_option: # type: ignore + opt.process_query(self) # type: ignore + + self._with_options += opts + return self + + def with_transformation( + self, fn: Callable[[Query[Any]], Query[Any]] + ) -> Query[Any]: + """Return a new :class:`_query.Query` object transformed by + the given function. + + E.g.:: + + def filter_something(criterion): + def transform(q): + return q.filter(criterion) + return transform + + q = q.with_transformation(filter_something(x==5)) + + This allows ad-hoc recipes to be created for :class:`_query.Query` + objects. + + """ + return fn(self) + + def get_execution_options(self) -> _ImmutableExecuteOptions: + """Get the non-SQL options which will take effect during execution. + + .. versionadded:: 1.3 + + .. seealso:: + + :meth:`_query.Query.execution_options` + + :meth:`_sql.Select.get_execution_options` - v2 comparable method. + + """ + return self._execution_options + + @overload + def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + no_parameters: bool = False, + stream_results: bool = False, + max_row_buffer: int = ..., + yield_per: int = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + populate_existing: bool = False, + autoflush: bool = False, + preserve_rowcount: bool = False, + **opt: Any, + ) -> Self: ... + + @overload + def execution_options(self, **opt: Any) -> Self: ... + + @_generative + def execution_options(self, **kwargs: Any) -> Self: + """Set non-SQL options which take effect during execution. + + Options allowed here include all of those accepted by + :meth:`_engine.Connection.execution_options`, as well as a series + of ORM specific options: + + ``populate_existing=True`` - equivalent to using + :meth:`_orm.Query.populate_existing` + + ``autoflush=True|False`` - equivalent to using + :meth:`_orm.Query.autoflush` + + ``yield_per=`` - equivalent to using + :meth:`_orm.Query.yield_per` + + Note that the ``stream_results`` execution option is enabled + automatically if the :meth:`~sqlalchemy.orm.query.Query.yield_per()` + method or execution option is used. + + .. versionadded:: 1.4 - added ORM options to + :meth:`_orm.Query.execution_options` + + The execution options may also be specified on a per execution basis + when using :term:`2.0 style` queries via the + :paramref:`_orm.Session.execution_options` parameter. + + .. warning:: The + :paramref:`_engine.Connection.execution_options.stream_results` + parameter should not be used at the level of individual ORM + statement executions, as the :class:`_orm.Session` will not track + objects from different schema translate maps within a single + session. For multiple schema translate maps within the scope of a + single :class:`_orm.Session`, see :ref:`examples_sharding`. + + + .. seealso:: + + :ref:`engine_stream_results` + + :meth:`_query.Query.get_execution_options` + + :meth:`_sql.Select.execution_options` - v2 equivalent method. + + """ + self._execution_options = self._execution_options.union(kwargs) + return self + + @_generative + def with_for_update( + self, + *, + nowait: bool = False, + read: bool = False, + of: Optional[_ForUpdateOfArgument] = None, + skip_locked: bool = False, + key_share: bool = False, + ) -> Self: + """return a new :class:`_query.Query` + with the specified options for the + ``FOR UPDATE`` clause. + + The behavior of this method is identical to that of + :meth:`_expression.GenerativeSelect.with_for_update`. + When called with no arguments, + the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause + appended. When additional arguments are specified, backend-specific + options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE`` + can take effect. + + E.g.:: + + q = sess.query(User).populate_existing().with_for_update(nowait=True, of=User) + + The above query on a PostgreSQL backend will render like:: + + SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT + + .. warning:: + + Using ``with_for_update`` in the context of eager loading + relationships is not officially supported or recommended by + SQLAlchemy and may not work with certain queries on various + database backends. When ``with_for_update`` is successfully used + with a query that involves :func:`_orm.joinedload`, SQLAlchemy will + attempt to emit SQL that locks all involved tables. + + .. note:: It is generally a good idea to combine the use of the + :meth:`_orm.Query.populate_existing` method when using the + :meth:`_orm.Query.with_for_update` method. The purpose of + :meth:`_orm.Query.populate_existing` is to force all the data read + from the SELECT to be populated into the ORM objects returned, + even if these objects are already in the :term:`identity map`. + + .. seealso:: + + :meth:`_expression.GenerativeSelect.with_for_update` + - Core level method with + full argument and behavioral description. + + :meth:`_orm.Query.populate_existing` - overwrites attributes of + objects already loaded in the identity map. + + """ # noqa: E501 + + self._for_update_arg = ForUpdateArg( + read=read, + nowait=nowait, + of=of, + skip_locked=skip_locked, + key_share=key_share, + ) + return self + + @_generative + def params( + self, __params: Optional[Dict[str, Any]] = None, **kw: Any + ) -> Self: + r"""Add values for bind parameters which may have been + specified in filter(). + + Parameters may be specified using \**kwargs, or optionally a single + dictionary as the first positional argument. The reason for both is + that \**kwargs is convenient, however some parameter dictionaries + contain unicode keys in which case \**kwargs cannot be used. + + """ + if __params: + kw.update(__params) + self._params = self._params.union(kw) + return self + + def where(self, *criterion: _ColumnExpressionArgument[bool]) -> Self: + """A synonym for :meth:`.Query.filter`. + + .. versionadded:: 1.4 + + .. seealso:: + + :meth:`_sql.Select.where` - v2 equivalent method. + + """ + return self.filter(*criterion) + + @_generative + @_assertions(_no_statement_condition, _no_limit_offset) + def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self: + r"""Apply the given filtering criterion to a copy + of this :class:`_query.Query`, using SQL expressions. + + e.g.:: + + session.query(MyClass).filter(MyClass.name == 'some name') + + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: + + session.query(MyClass).\ + filter(MyClass.name == 'some name', MyClass.id > 5) + + The criterion is any SQL expression object applicable to the + WHERE clause of a select. String expressions are coerced + into SQL expression constructs via the :func:`_expression.text` + construct. + + .. seealso:: + + :meth:`_query.Query.filter_by` - filter on keyword expressions. + + :meth:`_sql.Select.where` - v2 equivalent method. + + """ + for crit in list(criterion): + crit = coercions.expect( + roles.WhereHavingRole, crit, apply_propagate_attrs=self + ) + + self._where_criteria += (crit,) + return self + + @util.memoized_property + def _last_joined_entity( + self, + ) -> Optional[Union[_InternalEntityType[Any], _JoinTargetElement]]: + if self._setup_joins: + return _determine_last_joined_entity( + self._setup_joins, + ) + else: + return None + + def _filter_by_zero(self) -> Any: + """for the filter_by() method, return the target entity for which + we will attempt to derive an expression from based on string name. + + """ + + if self._setup_joins: + _last_joined_entity = self._last_joined_entity + if _last_joined_entity is not None: + return _last_joined_entity + + # discussion related to #7239 + # special check determines if we should try to derive attributes + # for filter_by() from the "from object", i.e., if the user + # called query.select_from(some selectable).filter_by(some_attr=value). + # We don't want to do that in the case that methods like + # from_self(), select_entity_from(), or a set op like union() were + # called; while these methods also place a + # selectable in the _from_obj collection, they also set up + # the _set_base_alias boolean which turns on the whole "adapt the + # entity to this selectable" thing, meaning the query still continues + # to construct itself in terms of the lead entity that was passed + # to query(), e.g. query(User).from_self() is still in terms of User, + # and not the subquery that from_self() created. This feature of + # "implicitly adapt all occurrences of entity X to some arbitrary + # subquery" is the main thing I am trying to do away with in 2.0 as + # users should now used aliased() for that, but I can't entirely get + # rid of it due to query.union() and other set ops relying upon it. + # + # compare this to the base Select()._filter_by_zero() which can + # just return self._from_obj[0] if present, because there is no + # "_set_base_alias" feature. + # + # IOW, this conditional essentially detects if + # "select_from(some_selectable)" has been called, as opposed to + # "select_entity_from()", "from_self()" + # or "union() / some_set_op()". + if self._from_obj and not self._compile_options._set_base_alias: + return self._from_obj[0] + + return self._raw_columns[0] + + def filter_by(self, **kwargs: Any) -> Self: + r"""Apply the given filtering criterion to a copy + of this :class:`_query.Query`, using keyword expressions. + + e.g.:: + + session.query(MyClass).filter_by(name = 'some name') + + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: + + session.query(MyClass).\ + filter_by(name = 'some name', id = 5) + + The keyword expressions are extracted from the primary + entity of the query, or the last entity that was the + target of a call to :meth:`_query.Query.join`. + + .. seealso:: + + :meth:`_query.Query.filter` - filter on SQL expressions. + + :meth:`_sql.Select.filter_by` - v2 comparable method. + + """ + from_entity = self._filter_by_zero() + + clauses = [ + _entity_namespace_key(from_entity, key) == value + for key, value in kwargs.items() + ] + return self.filter(*clauses) + + @_generative + def order_by( + self, + __first: Union[ + Literal[None, False, _NoArg.NO_ARG], + _ColumnExpressionOrStrLabelArgument[Any], + ] = _NoArg.NO_ARG, + *clauses: _ColumnExpressionOrStrLabelArgument[Any], + ) -> Self: + """Apply one or more ORDER BY criteria to the query and return + the newly resulting :class:`_query.Query`. + + e.g.:: + + q = session.query(Entity).order_by(Entity.id, Entity.name) + + Calling this method multiple times is equivalent to calling it once + with all the clauses concatenated. All existing ORDER BY criteria may + be cancelled by passing ``None`` by itself. New ORDER BY criteria may + then be added by invoking :meth:`_orm.Query.order_by` again, e.g.:: + + # will erase all ORDER BY and ORDER BY new_col alone + q = q.order_by(None).order_by(new_col) + + .. seealso:: + + These sections describe ORDER BY in terms of :term:`2.0 style` + invocation but apply to :class:`_orm.Query` as well: + + :ref:`tutorial_order_by` - in the :ref:`unified_tutorial` + + :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial` + + :meth:`_sql.Select.order_by` - v2 equivalent method. + + """ + + for assertion in (self._no_statement_condition, self._no_limit_offset): + assertion("order_by") + + if not clauses and (__first is None or __first is False): + self._order_by_clauses = () + elif __first is not _NoArg.NO_ARG: + criterion = tuple( + coercions.expect(roles.OrderByRole, clause) + for clause in (__first,) + clauses + ) + self._order_by_clauses += criterion + + return self + + @_generative + def group_by( + self, + __first: Union[ + Literal[None, False, _NoArg.NO_ARG], + _ColumnExpressionOrStrLabelArgument[Any], + ] = _NoArg.NO_ARG, + *clauses: _ColumnExpressionOrStrLabelArgument[Any], + ) -> Self: + """Apply one or more GROUP BY criterion to the query and return + the newly resulting :class:`_query.Query`. + + All existing GROUP BY settings can be suppressed by + passing ``None`` - this will suppress any GROUP BY configured + on mappers as well. + + .. seealso:: + + These sections describe GROUP BY in terms of :term:`2.0 style` + invocation but apply to :class:`_orm.Query` as well: + + :ref:`tutorial_group_by_w_aggregates` - in the + :ref:`unified_tutorial` + + :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial` + + :meth:`_sql.Select.group_by` - v2 equivalent method. + + """ + + for assertion in (self._no_statement_condition, self._no_limit_offset): + assertion("group_by") + + if not clauses and (__first is None or __first is False): + self._group_by_clauses = () + elif __first is not _NoArg.NO_ARG: + criterion = tuple( + coercions.expect(roles.GroupByRole, clause) + for clause in (__first,) + clauses + ) + self._group_by_clauses += criterion + return self + + @_generative + @_assertions(_no_statement_condition, _no_limit_offset) + def having(self, *having: _ColumnExpressionArgument[bool]) -> Self: + r"""Apply a HAVING criterion to the query and return the + newly resulting :class:`_query.Query`. + + :meth:`_query.Query.having` is used in conjunction with + :meth:`_query.Query.group_by`. + + HAVING criterion makes it possible to use filters on aggregate + functions like COUNT, SUM, AVG, MAX, and MIN, eg.:: + + q = session.query(User.id).\ + join(User.addresses).\ + group_by(User.id).\ + having(func.count(Address.id) > 2) + + .. seealso:: + + :meth:`_sql.Select.having` - v2 equivalent method. + + """ + + for criterion in having: + having_criteria = coercions.expect( + roles.WhereHavingRole, criterion + ) + self._having_criteria += (having_criteria,) + return self + + def _set_op(self, expr_fn: Any, *q: Query[Any]) -> Self: + list_of_queries = (self,) + q + return self._from_selectable(expr_fn(*(list_of_queries)).subquery()) + + def union(self, *q: Query[Any]) -> Self: + """Produce a UNION of this Query against one or more queries. + + e.g.:: + + q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar') + q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo') + + q3 = q1.union(q2) + + The method accepts multiple Query objects so as to control + the level of nesting. A series of ``union()`` calls such as:: + + x.union(y).union(z).all() + + will nest on each ``union()``, and produces:: + + SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION + SELECT * FROM y) UNION SELECT * FROM Z) + + Whereas:: + + x.union(y, z).all() + + produces:: + + SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION + SELECT * FROM Z) + + Note that many database backends do not allow ORDER BY to + be rendered on a query called within UNION, EXCEPT, etc. + To disable all ORDER BY clauses including those configured + on mappers, issue ``query.order_by(None)`` - the resulting + :class:`_query.Query` object will not render ORDER BY within + its SELECT statement. + + .. seealso:: + + :meth:`_sql.Select.union` - v2 equivalent method. + + """ + return self._set_op(expression.union, *q) + + def union_all(self, *q: Query[Any]) -> Self: + """Produce a UNION ALL of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + .. seealso:: + + :meth:`_sql.Select.union_all` - v2 equivalent method. + + """ + return self._set_op(expression.union_all, *q) + + def intersect(self, *q: Query[Any]) -> Self: + """Produce an INTERSECT of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + .. seealso:: + + :meth:`_sql.Select.intersect` - v2 equivalent method. + + """ + return self._set_op(expression.intersect, *q) + + def intersect_all(self, *q: Query[Any]) -> Self: + """Produce an INTERSECT ALL of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + .. seealso:: + + :meth:`_sql.Select.intersect_all` - v2 equivalent method. + + """ + return self._set_op(expression.intersect_all, *q) + + def except_(self, *q: Query[Any]) -> Self: + """Produce an EXCEPT of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + .. seealso:: + + :meth:`_sql.Select.except_` - v2 equivalent method. + + """ + return self._set_op(expression.except_, *q) + + def except_all(self, *q: Query[Any]) -> Self: + """Produce an EXCEPT ALL of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + .. seealso:: + + :meth:`_sql.Select.except_all` - v2 equivalent method. + + """ + return self._set_op(expression.except_all, *q) + + @_generative + @_assertions(_no_statement_condition, _no_limit_offset) + def join( + self, + target: _JoinTargetArgument, + onclause: Optional[_OnClauseArgument] = None, + *, + isouter: bool = False, + full: bool = False, + ) -> Self: + r"""Create a SQL JOIN against this :class:`_query.Query` + object's criterion + and apply generatively, returning the newly resulting + :class:`_query.Query`. + + **Simple Relationship Joins** + + Consider a mapping between two classes ``User`` and ``Address``, + with a relationship ``User.addresses`` representing a collection + of ``Address`` objects associated with each ``User``. The most + common usage of :meth:`_query.Query.join` + is to create a JOIN along this + relationship, using the ``User.addresses`` attribute as an indicator + for how this should occur:: + + q = session.query(User).join(User.addresses) + + Where above, the call to :meth:`_query.Query.join` along + ``User.addresses`` will result in SQL approximately equivalent to:: + + SELECT user.id, user.name + FROM user JOIN address ON user.id = address.user_id + + In the above example we refer to ``User.addresses`` as passed to + :meth:`_query.Query.join` as the "on clause", that is, it indicates + how the "ON" portion of the JOIN should be constructed. + + To construct a chain of joins, multiple :meth:`_query.Query.join` + calls may be used. The relationship-bound attribute implies both + the left and right side of the join at once:: + + q = session.query(User).\ + join(User.orders).\ + join(Order.items).\ + join(Item.keywords) + + .. note:: as seen in the above example, **the order in which each + call to the join() method occurs is important**. Query would not, + for example, know how to join correctly if we were to specify + ``User``, then ``Item``, then ``Order``, in our chain of joins; in + such a case, depending on the arguments passed, it may raise an + error that it doesn't know how to join, or it may produce invalid + SQL in which case the database will raise an error. In correct + practice, the + :meth:`_query.Query.join` method is invoked in such a way that lines + up with how we would want the JOIN clauses in SQL to be + rendered, and each call should represent a clear link from what + precedes it. + + **Joins to a Target Entity or Selectable** + + A second form of :meth:`_query.Query.join` allows any mapped entity or + core selectable construct as a target. In this usage, + :meth:`_query.Query.join` will attempt to create a JOIN along the + natural foreign key relationship between two entities:: + + q = session.query(User).join(Address) + + In the above calling form, :meth:`_query.Query.join` is called upon to + create the "on clause" automatically for us. This calling form will + ultimately raise an error if either there are no foreign keys between + the two entities, or if there are multiple foreign key linkages between + the target entity and the entity or entities already present on the + left side such that creating a join requires more information. Note + that when indicating a join to a target without any ON clause, ORM + configured relationships are not taken into account. + + **Joins to a Target with an ON Clause** + + The third calling form allows both the target entity as well + as the ON clause to be passed explicitly. A example that includes + a SQL expression as the ON clause is as follows:: + + q = session.query(User).join(Address, User.id==Address.user_id) + + The above form may also use a relationship-bound attribute as the + ON clause as well:: + + q = session.query(User).join(Address, User.addresses) + + The above syntax can be useful for the case where we wish + to join to an alias of a particular target entity. If we wanted + to join to ``Address`` twice, it could be achieved using two + aliases set up using the :func:`~sqlalchemy.orm.aliased` function:: + + a1 = aliased(Address) + a2 = aliased(Address) + + q = session.query(User).\ + join(a1, User.addresses).\ + join(a2, User.addresses).\ + filter(a1.email_address=='ed@foo.com').\ + filter(a2.email_address=='ed@bar.com') + + The relationship-bound calling form can also specify a target entity + using the :meth:`_orm.PropComparator.of_type` method; a query + equivalent to the one above would be:: + + a1 = aliased(Address) + a2 = aliased(Address) + + q = session.query(User).\ + join(User.addresses.of_type(a1)).\ + join(User.addresses.of_type(a2)).\ + filter(a1.email_address == 'ed@foo.com').\ + filter(a2.email_address == 'ed@bar.com') + + **Augmenting Built-in ON Clauses** + + As a substitute for providing a full custom ON condition for an + existing relationship, the :meth:`_orm.PropComparator.and_` function + may be applied to a relationship attribute to augment additional + criteria into the ON clause; the additional criteria will be combined + with the default criteria using AND:: + + q = session.query(User).join( + User.addresses.and_(Address.email_address != 'foo@bar.com') + ) + + .. versionadded:: 1.4 + + **Joining to Tables and Subqueries** + + + The target of a join may also be any table or SELECT statement, + which may be related to a target entity or not. Use the + appropriate ``.subquery()`` method in order to make a subquery + out of a query:: + + subq = session.query(Address).\ + filter(Address.email_address == 'ed@foo.com').\ + subquery() + + + q = session.query(User).join( + subq, User.id == subq.c.user_id + ) + + Joining to a subquery in terms of a specific relationship and/or + target entity may be achieved by linking the subquery to the + entity using :func:`_orm.aliased`:: + + subq = session.query(Address).\ + filter(Address.email_address == 'ed@foo.com').\ + subquery() + + address_subq = aliased(Address, subq) + + q = session.query(User).join( + User.addresses.of_type(address_subq) + ) + + + **Controlling what to Join From** + + In cases where the left side of the current state of + :class:`_query.Query` is not in line with what we want to join from, + the :meth:`_query.Query.select_from` method may be used:: + + q = session.query(Address).select_from(User).\ + join(User.addresses).\ + filter(User.name == 'ed') + + Which will produce SQL similar to:: + + SELECT address.* FROM user + JOIN address ON user.id=address.user_id + WHERE user.name = :name_1 + + .. seealso:: + + :meth:`_sql.Select.join` - v2 equivalent method. + + :param \*props: Incoming arguments for :meth:`_query.Query.join`, + the props collection in modern use should be considered to be a one + or two argument form, either as a single "target" entity or ORM + attribute-bound relationship, or as a target entity plus an "on + clause" which may be a SQL expression or ORM attribute-bound + relationship. + + :param isouter=False: If True, the join used will be a left outer join, + just as if the :meth:`_query.Query.outerjoin` method were called. + + :param full=False: render FULL OUTER JOIN; implies ``isouter``. + + """ + + join_target = coercions.expect( + roles.JoinTargetRole, + target, + apply_propagate_attrs=self, + legacy=True, + ) + if onclause is not None: + onclause_element = coercions.expect( + roles.OnClauseRole, onclause, legacy=True + ) + else: + onclause_element = None + + self._setup_joins += ( + ( + join_target, + onclause_element, + None, + { + "isouter": isouter, + "full": full, + }, + ), + ) + + self.__dict__.pop("_last_joined_entity", None) + return self + + def outerjoin( + self, + target: _JoinTargetArgument, + onclause: Optional[_OnClauseArgument] = None, + *, + full: bool = False, + ) -> Self: + """Create a left outer join against this ``Query`` object's criterion + and apply generatively, returning the newly resulting ``Query``. + + Usage is the same as the ``join()`` method. + + .. seealso:: + + :meth:`_sql.Select.outerjoin` - v2 equivalent method. + + """ + return self.join(target, onclause=onclause, isouter=True, full=full) + + @_generative + @_assertions(_no_statement_condition) + def reset_joinpoint(self) -> Self: + """Return a new :class:`.Query`, where the "join point" has + been reset back to the base FROM entities of the query. + + This method is usually used in conjunction with the + ``aliased=True`` feature of the :meth:`~.Query.join` + method. See the example in :meth:`~.Query.join` for how + this is used. + + """ + self._last_joined_entity = None + + return self + + @_generative + @_assertions(_no_clauseelement_condition) + def select_from(self, *from_obj: _FromClauseArgument) -> Self: + r"""Set the FROM clause of this :class:`.Query` explicitly. + + :meth:`.Query.select_from` is often used in conjunction with + :meth:`.Query.join` in order to control which entity is selected + from on the "left" side of the join. + + The entity or selectable object here effectively replaces the + "left edge" of any calls to :meth:`~.Query.join`, when no + joinpoint is otherwise established - usually, the default "join + point" is the leftmost entity in the :class:`~.Query` object's + list of entities to be selected. + + A typical example:: + + q = session.query(Address).select_from(User).\ + join(User.addresses).\ + filter(User.name == 'ed') + + Which produces SQL equivalent to:: + + SELECT address.* FROM user + JOIN address ON user.id=address.user_id + WHERE user.name = :name_1 + + :param \*from_obj: collection of one or more entities to apply + to the FROM clause. Entities can be mapped classes, + :class:`.AliasedClass` objects, :class:`.Mapper` objects + as well as core :class:`.FromClause` elements like subqueries. + + .. seealso:: + + :meth:`~.Query.join` + + :meth:`.Query.select_entity_from` + + :meth:`_sql.Select.select_from` - v2 equivalent method. + + """ + + self._set_select_from(from_obj, False) + return self + + def __getitem__(self, item: Any) -> Any: + return orm_util._getitem( + self, + item, + ) + + @_generative + @_assertions(_no_statement_condition) + def slice( + self, + start: int, + stop: int, + ) -> Self: + """Computes the "slice" of the :class:`_query.Query` represented by + the given indices and returns the resulting :class:`_query.Query`. + + The start and stop indices behave like the argument to Python's + built-in :func:`range` function. This method provides an + alternative to using ``LIMIT``/``OFFSET`` to get a slice of the + query. + + For example, :: + + session.query(User).order_by(User.id).slice(1, 3) + + renders as + + .. sourcecode:: sql + + SELECT users.id AS users_id, + users.name AS users_name + FROM users ORDER BY users.id + LIMIT ? OFFSET ? + (2, 1) + + .. seealso:: + + :meth:`_query.Query.limit` + + :meth:`_query.Query.offset` + + :meth:`_sql.Select.slice` - v2 equivalent method. + + """ + + self._limit_clause, self._offset_clause = sql_util._make_slice( + self._limit_clause, self._offset_clause, start, stop + ) + return self + + @_generative + @_assertions(_no_statement_condition) + def limit(self, limit: _LimitOffsetType) -> Self: + """Apply a ``LIMIT`` to the query and return the newly resulting + ``Query``. + + .. seealso:: + + :meth:`_sql.Select.limit` - v2 equivalent method. + + """ + self._limit_clause = sql_util._offset_or_limit_clause(limit) + return self + + @_generative + @_assertions(_no_statement_condition) + def offset(self, offset: _LimitOffsetType) -> Self: + """Apply an ``OFFSET`` to the query and return the newly resulting + ``Query``. + + .. seealso:: + + :meth:`_sql.Select.offset` - v2 equivalent method. + """ + self._offset_clause = sql_util._offset_or_limit_clause(offset) + return self + + @_generative + @_assertions(_no_statement_condition) + def distinct(self, *expr: _ColumnExpressionArgument[Any]) -> Self: + r"""Apply a ``DISTINCT`` to the query and return the newly resulting + ``Query``. + + + .. note:: + + The ORM-level :meth:`.distinct` call includes logic that will + automatically add columns from the ORDER BY of the query to the + columns clause of the SELECT statement, to satisfy the common need + of the database backend that ORDER BY columns be part of the SELECT + list when DISTINCT is used. These columns *are not* added to the + list of columns actually fetched by the :class:`_query.Query`, + however, + so would not affect results. The columns are passed through when + using the :attr:`_query.Query.statement` accessor, however. + + .. deprecated:: 2.0 This logic is deprecated and will be removed + in SQLAlchemy 2.0. See :ref:`migration_20_query_distinct` + for a description of this use case in 2.0. + + .. seealso:: + + :meth:`_sql.Select.distinct` - v2 equivalent method. + + :param \*expr: optional column expressions. When present, + the PostgreSQL dialect will render a ``DISTINCT ON ()`` + construct. + + .. deprecated:: 1.4 Using \*expr in other dialects is deprecated + and will raise :class:`_exc.CompileError` in a future version. + + """ + if expr: + self._distinct = True + self._distinct_on = self._distinct_on + tuple( + coercions.expect(roles.ByOfRole, e) for e in expr + ) + else: + self._distinct = True + return self + + def all(self) -> List[_T]: + """Return the results represented by this :class:`_query.Query` + as a list. + + This results in an execution of the underlying SQL statement. + + .. warning:: The :class:`_query.Query` object, + when asked to return either + a sequence or iterator that consists of full ORM-mapped entities, + will **deduplicate entries based on primary key**. See the FAQ for + more details. + + .. seealso:: + + :ref:`faq_query_deduplicating` + + .. seealso:: + + :meth:`_engine.Result.all` - v2 comparable method. + + :meth:`_engine.Result.scalars` - v2 comparable method. + """ + return self._iter().all() # type: ignore + + @_generative + @_assertions(_no_clauseelement_condition) + def from_statement(self, statement: ExecutableReturnsRows) -> Self: + """Execute the given SELECT statement and return results. + + This method bypasses all internal statement compilation, and the + statement is executed without modification. + + The statement is typically either a :func:`_expression.text` + or :func:`_expression.select` construct, and should return the set + of columns + appropriate to the entity class represented by this + :class:`_query.Query`. + + .. seealso:: + + :meth:`_sql.Select.from_statement` - v2 comparable method. + + """ + statement = coercions.expect( + roles.SelectStatementRole, statement, apply_propagate_attrs=self + ) + self._statement = statement + return self + + def first(self) -> Optional[_T]: + """Return the first result of this ``Query`` or + None if the result doesn't contain any row. + + first() applies a limit of one within the generated SQL, so that + only one primary entity row is generated on the server side + (note this may consist of multiple result rows if join-loaded + collections are present). + + Calling :meth:`_query.Query.first` + results in an execution of the underlying + query. + + .. seealso:: + + :meth:`_query.Query.one` + + :meth:`_query.Query.one_or_none` + + :meth:`_engine.Result.first` - v2 comparable method. + + :meth:`_engine.Result.scalars` - v2 comparable method. + + """ + # replicates limit(1) behavior + if self._statement is not None: + return self._iter().first() # type: ignore + else: + return self.limit(1)._iter().first() # type: ignore + + def one_or_none(self) -> Optional[_T]: + """Return at most one result or raise an exception. + + Returns ``None`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + if multiple object identities are returned, or if multiple + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. + + Calling :meth:`_query.Query.one_or_none` + results in an execution of the + underlying query. + + .. seealso:: + + :meth:`_query.Query.first` + + :meth:`_query.Query.one` + + :meth:`_engine.Result.one_or_none` - v2 comparable method. + + :meth:`_engine.Result.scalar_one_or_none` - v2 comparable method. + + """ + return self._iter().one_or_none() # type: ignore + + def one(self) -> _T: + """Return exactly one result or raise an exception. + + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + if multiple object identities are returned, or if multiple + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. + + Calling :meth:`.one` results in an execution of the underlying query. + + .. seealso:: + + :meth:`_query.Query.first` + + :meth:`_query.Query.one_or_none` + + :meth:`_engine.Result.one` - v2 comparable method. + + :meth:`_engine.Result.scalar_one` - v2 comparable method. + + """ + return self._iter().one() # type: ignore + + def scalar(self) -> Any: + """Return the first element of the first result or None + if no rows present. If multiple rows are returned, + raises MultipleResultsFound. + + >>> session.query(Item).scalar() + + >>> session.query(Item.id).scalar() + 1 + >>> session.query(Item.id).filter(Item.id < 0).scalar() + None + >>> session.query(Item.id, Item.name).scalar() + 1 + >>> session.query(func.count(Parent.id)).scalar() + 20 + + This results in an execution of the underlying query. + + .. seealso:: + + :meth:`_engine.Result.scalar` - v2 comparable method. + + """ + # TODO: not sure why we can't use result.scalar() here + try: + ret = self.one() + if not isinstance(ret, collections_abc.Sequence): + return ret + return ret[0] + except sa_exc.NoResultFound: + return None + + def __iter__(self) -> Iterator[_T]: + result = self._iter() + try: + yield from result # type: ignore + except GeneratorExit: + # issue #8710 - direct iteration is not re-usable after + # an iterable block is broken, so close the result + result._soft_close() + raise + + def _iter(self) -> Union[ScalarResult[_T], Result[_T]]: + # new style execution. + params = self._params + + statement = self._statement_20() + result: Union[ScalarResult[_T], Result[_T]] = self.session.execute( + statement, + params, + execution_options={"_sa_orm_load_options": self.load_options}, + ) + + # legacy: automatically set scalars, unique + if result._attributes.get("is_single_entity", False): + result = cast("Result[_T]", result).scalars() + + if ( + result._attributes.get("filtered", False) + and not self.load_options._yield_per + ): + result = result.unique() + + return result + + def __str__(self) -> str: + statement = self._statement_20() + + try: + bind = ( + self._get_bind_args(statement, self.session.get_bind) + if self.session + else None + ) + except sa_exc.UnboundExecutionError: + bind = None + + return str(statement.compile(bind)) + + def _get_bind_args(self, statement: Any, fn: Any, **kw: Any) -> Any: + return fn(clause=statement, **kw) + + @property + def column_descriptions(self) -> List[ORMColumnDescription]: + """Return metadata about the columns which would be + returned by this :class:`_query.Query`. + + Format is a list of dictionaries:: + + user_alias = aliased(User, name='user2') + q = sess.query(User, User.id, user_alias) + + # this expression: + q.column_descriptions + + # would return: + [ + { + 'name':'User', + 'type':User, + 'aliased':False, + 'expr':User, + 'entity': User + }, + { + 'name':'id', + 'type':Integer(), + 'aliased':False, + 'expr':User.id, + 'entity': User + }, + { + 'name':'user2', + 'type':User, + 'aliased':True, + 'expr':user_alias, + 'entity': user_alias + } + ] + + .. seealso:: + + This API is available using :term:`2.0 style` queries as well, + documented at: + + * :ref:`queryguide_inspection` + + * :attr:`.Select.column_descriptions` + + """ + + return _column_descriptions(self, legacy=True) + + @util.deprecated( + "2.0", + "The :meth:`_orm.Query.instances` method is deprecated and will " + "be removed in a future release. " + "Use the Select.from_statement() method or aliased() construct in " + "conjunction with Session.execute() instead.", + ) + def instances( + self, + result_proxy: CursorResult[Any], + context: Optional[QueryContext] = None, + ) -> Any: + """Return an ORM result given a :class:`_engine.CursorResult` and + :class:`.QueryContext`. + + """ + if context is None: + util.warn_deprecated( + "Using the Query.instances() method without a context " + "is deprecated and will be disallowed in a future release. " + "Please make use of :meth:`_query.Query.from_statement` " + "for linking ORM results to arbitrary select constructs.", + version="1.4", + ) + compile_state = self._compile_state(for_statement=False) + + context = QueryContext( + compile_state, + compile_state.statement, + compile_state.statement, + self._params, + self.session, + self.load_options, + ) + + result = loading.instances(result_proxy, context) + + # legacy: automatically set scalars, unique + if result._attributes.get("is_single_entity", False): + result = result.scalars() # type: ignore + + if result._attributes.get("filtered", False): + result = result.unique() + + # TODO: isn't this supposed to be a list? + return result + + @util.became_legacy_20( + ":meth:`_orm.Query.merge_result`", + alternative="The method is superseded by the " + ":func:`_orm.merge_frozen_result` function.", + enable_warnings=False, # warnings occur via loading.merge_result + ) + def merge_result( + self, + iterator: Union[ + FrozenResult[Any], Iterable[Sequence[Any]], Iterable[object] + ], + load: bool = True, + ) -> Union[FrozenResult[Any], Iterable[Any]]: + """Merge a result into this :class:`_query.Query` object's Session. + + Given an iterator returned by a :class:`_query.Query` + of the same structure + as this one, return an identical iterator of results, with all mapped + instances merged into the session using :meth:`.Session.merge`. This + is an optimized method which will merge all mapped instances, + preserving the structure of the result rows and unmapped columns with + less method overhead than that of calling :meth:`.Session.merge` + explicitly for each value. + + The structure of the results is determined based on the column list of + this :class:`_query.Query` - if these do not correspond, + unchecked errors + will occur. + + The 'load' argument is the same as that of :meth:`.Session.merge`. + + For an example of how :meth:`_query.Query.merge_result` is used, see + the source code for the example :ref:`examples_caching`, where + :meth:`_query.Query.merge_result` is used to efficiently restore state + from a cache back into a target :class:`.Session`. + + """ + + return loading.merge_result(self, iterator, load) + + def exists(self) -> Exists: + """A convenience method that turns a query into an EXISTS subquery + of the form EXISTS (SELECT 1 FROM ... WHERE ...). + + e.g.:: + + q = session.query(User).filter(User.name == 'fred') + session.query(q.exists()) + + Producing SQL similar to:: + + SELECT EXISTS ( + SELECT 1 FROM users WHERE users.name = :name_1 + ) AS anon_1 + + The EXISTS construct is usually used in the WHERE clause:: + + session.query(User.id).filter(q.exists()).scalar() + + Note that some databases such as SQL Server don't allow an + EXISTS expression to be present in the columns clause of a + SELECT. To select a simple boolean value based on the exists + as a WHERE, use :func:`.literal`:: + + from sqlalchemy import literal + + session.query(literal(True)).filter(q.exists()).scalar() + + .. seealso:: + + :meth:`_sql.Select.exists` - v2 comparable method. + + """ + + # .add_columns() for the case that we are a query().select_from(X), + # so that ".statement" can be produced (#2995) but also without + # omitting the FROM clause from a query(X) (#2818); + # .with_only_columns() after we have a core select() so that + # we get just "SELECT 1" without any entities. + + inner = ( + self.enable_eagerloads(False) + .add_columns(sql.literal_column("1")) + .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + ._get_select_statement_only() + .with_only_columns(1) + ) + + ezero = self._entity_from_pre_ent_zero() + if ezero is not None: + inner = inner.select_from(ezero) + + return sql.exists(inner) + + def count(self) -> int: + r"""Return a count of rows this the SQL formed by this :class:`Query` + would return. + + This generates the SQL for this Query as follows:: + + SELECT count(1) AS count_1 FROM ( + SELECT + ) AS anon_1 + + The above SQL returns a single row, which is the aggregate value + of the count function; the :meth:`_query.Query.count` + method then returns + that single integer value. + + .. warning:: + + It is important to note that the value returned by + count() is **not the same as the number of ORM objects that this + Query would return from a method such as the .all() method**. + The :class:`_query.Query` object, + when asked to return full entities, + will **deduplicate entries based on primary key**, meaning if the + same primary key value would appear in the results more than once, + only one object of that primary key would be present. This does + not apply to a query that is against individual columns. + + .. seealso:: + + :ref:`faq_query_deduplicating` + + For fine grained control over specific columns to count, to skip the + usage of a subquery or otherwise control of the FROM clause, or to use + other aggregate functions, use :attr:`~sqlalchemy.sql.expression.func` + expressions in conjunction with :meth:`~.Session.query`, i.e.:: + + from sqlalchemy import func + + # count User records, without + # using a subquery. + session.query(func.count(User.id)) + + # return count of user "id" grouped + # by "name" + session.query(func.count(User.id)).\ + group_by(User.name) + + from sqlalchemy import distinct + + # count distinct "name" values + session.query(func.count(distinct(User.name))) + + .. seealso:: + + :ref:`migration_20_query_usage` + + """ + col = sql.func.count(sql.literal_column("*")) + return ( # type: ignore + self._legacy_from_self(col).enable_eagerloads(False).scalar() + ) + + def delete( + self, synchronize_session: SynchronizeSessionArgument = "auto" + ) -> int: + r"""Perform a DELETE with an arbitrary WHERE clause. + + Deletes rows matched by this query from the database. + + E.g.:: + + sess.query(User).filter(User.age == 25).\ + delete(synchronize_session=False) + + sess.query(User).filter(User.age == 25).\ + delete(synchronize_session='evaluate') + + .. warning:: + + See the section :ref:`orm_expression_update_delete` for important + caveats and warnings, including limitations when using bulk UPDATE + and DELETE with mapper inheritance configurations. + + :param synchronize_session: chooses the strategy to update the + attributes on objects in the session. See the section + :ref:`orm_expression_update_delete` for a discussion of these + strategies. + + :return: the count of rows matched as returned by the database's + "row count" feature. + + .. seealso:: + + :ref:`orm_expression_update_delete` + + """ + + bulk_del = BulkDelete(self) + if self.dispatch.before_compile_delete: + for fn in self.dispatch.before_compile_delete: + new_query = fn(bulk_del.query, bulk_del) + if new_query is not None: + bulk_del.query = new_query + + self = bulk_del.query + + delete_ = sql.delete(*self._raw_columns) # type: ignore + delete_._where_criteria = self._where_criteria + result: CursorResult[Any] = self.session.execute( + delete_, + self._params, + execution_options=self._execution_options.union( + {"synchronize_session": synchronize_session} + ), + ) + bulk_del.result = result # type: ignore + self.session.dispatch.after_bulk_delete(bulk_del) + result.close() + + return result.rowcount + + def update( + self, + values: Dict[_DMLColumnArgument, Any], + synchronize_session: SynchronizeSessionArgument = "auto", + update_args: Optional[Dict[Any, Any]] = None, + ) -> int: + r"""Perform an UPDATE with an arbitrary WHERE clause. + + Updates rows matched by this query in the database. + + E.g.:: + + sess.query(User).filter(User.age == 25).\ + update({User.age: User.age - 10}, synchronize_session=False) + + sess.query(User).filter(User.age == 25).\ + update({"age": User.age - 10}, synchronize_session='evaluate') + + .. warning:: + + See the section :ref:`orm_expression_update_delete` for important + caveats and warnings, including limitations when using arbitrary + UPDATE and DELETE with mapper inheritance configurations. + + :param values: a dictionary with attributes names, or alternatively + mapped attributes or SQL expressions, as keys, and literal + values or sql expressions as values. If :ref:`parameter-ordered + mode ` is desired, the values can + be passed as a list of 2-tuples; this requires that the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag is passed to the :paramref:`.Query.update.update_args` dictionary + as well. + + :param synchronize_session: chooses the strategy to update the + attributes on objects in the session. See the section + :ref:`orm_expression_update_delete` for a discussion of these + strategies. + + :param update_args: Optional dictionary, if present will be passed + to the underlying :func:`_expression.update` + construct as the ``**kw`` for + the object. May be used to pass dialect-specific arguments such + as ``mysql_limit``, as well as other special arguments such as + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`. + + :return: the count of rows matched as returned by the database's + "row count" feature. + + + .. seealso:: + + :ref:`orm_expression_update_delete` + + """ + + update_args = update_args or {} + + bulk_ud = BulkUpdate(self, values, update_args) + + if self.dispatch.before_compile_update: + for fn in self.dispatch.before_compile_update: + new_query = fn(bulk_ud.query, bulk_ud) + if new_query is not None: + bulk_ud.query = new_query + self = bulk_ud.query + + upd = sql.update(*self._raw_columns) # type: ignore + + ppo = update_args.pop("preserve_parameter_order", False) + if ppo: + upd = upd.ordered_values(*values) # type: ignore + else: + upd = upd.values(values) + if update_args: + upd = upd.with_dialect_options(**update_args) + + upd._where_criteria = self._where_criteria + result: CursorResult[Any] = self.session.execute( + upd, + self._params, + execution_options=self._execution_options.union( + {"synchronize_session": synchronize_session} + ), + ) + bulk_ud.result = result # type: ignore + self.session.dispatch.after_bulk_update(bulk_ud) + result.close() + return result.rowcount + + def _compile_state( + self, for_statement: bool = False, **kw: Any + ) -> ORMCompileState: + """Create an out-of-compiler ORMCompileState object. + + The ORMCompileState object is normally created directly as a result + of the SQLCompiler.process() method being handed a Select() + or FromStatement() object that uses the "orm" plugin. This method + provides a means of creating this ORMCompileState object directly + without using the compiler. + + This method is used only for deprecated cases, which include + the .from_self() method for a Query that has multiple levels + of .from_self() in use, as well as the instances() method. It is + also used within the test suite to generate ORMCompileState objects + for test purposes. + + """ + + stmt = self._statement_20(for_statement=for_statement, **kw) + assert for_statement == stmt._compile_options._for_statement + + # this chooses between ORMFromStatementCompileState and + # ORMSelectCompileState. We could also base this on + # query._statement is not None as we have the ORM Query here + # however this is the more general path. + compile_state_cls = cast( + ORMCompileState, + ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"), + ) + + return compile_state_cls.create_for_statement(stmt, None) + + def _compile_context(self, for_statement: bool = False) -> QueryContext: + compile_state = self._compile_state(for_statement=for_statement) + context = QueryContext( + compile_state, + compile_state.statement, + compile_state.statement, + self._params, + self.session, + self.load_options, + ) + + return context + + +class AliasOption(interfaces.LoaderOption): + inherit_cache = False + + @util.deprecated( + "1.4", + "The :class:`.AliasOption` object is not necessary " + "for entities to be matched up to a query that is established " + "via :meth:`.Query.from_statement` and now does nothing.", + ) + def __init__(self, alias: Union[Alias, Subquery]): + r"""Return a :class:`.MapperOption` that will indicate to the + :class:`_query.Query` + that the main table has been aliased. + + """ + + def process_compile_state(self, compile_state: ORMCompileState) -> None: + pass + + +class BulkUD: + """State used for the orm.Query version of update() / delete(). + + This object is now specific to Query only. + + """ + + def __init__(self, query: Query[Any]): + self.query = query.enable_eagerloads(False) + self._validate_query_state() + self.mapper = self.query._entity_from_pre_ent_zero() + + def _validate_query_state(self) -> None: + for attr, methname, notset, op in ( + ("_limit_clause", "limit()", None, operator.is_), + ("_offset_clause", "offset()", None, operator.is_), + ("_order_by_clauses", "order_by()", (), operator.eq), + ("_group_by_clauses", "group_by()", (), operator.eq), + ("_distinct", "distinct()", False, operator.is_), + ( + "_from_obj", + "join(), outerjoin(), select_from(), or from_self()", + (), + operator.eq, + ), + ( + "_setup_joins", + "join(), outerjoin(), select_from(), or from_self()", + (), + operator.eq, + ), + ): + if not op(getattr(self.query, attr), notset): + raise sa_exc.InvalidRequestError( + "Can't call Query.update() or Query.delete() " + "when %s has been called" % (methname,) + ) + + @property + def session(self) -> Session: + return self.query.session + + +class BulkUpdate(BulkUD): + """BulkUD which handles UPDATEs.""" + + def __init__( + self, + query: Query[Any], + values: Dict[_DMLColumnArgument, Any], + update_kwargs: Optional[Dict[Any, Any]], + ): + super().__init__(query) + self.values = values + self.update_kwargs = update_kwargs + + +class BulkDelete(BulkUD): + """BulkUD which handles DELETEs.""" + + +class RowReturningQuery(Query[Row[_TP]]): + if TYPE_CHECKING: + + def tuples(self) -> Query[_TP]: # type: ignore + ... diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py new file mode 100644 index 00000000..b5e33ffd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py @@ -0,0 +1,3500 @@ +# orm/relationships.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Heuristics related to join conditions as used in +:func:`_orm.relationship`. + +Provides the :class:`.JoinCondition` object, which encapsulates +SQL annotation and aliasing behavior focused on the `primaryjoin` +and `secondaryjoin` aspects of :func:`_orm.relationship`. + +""" +from __future__ import annotations + +import collections +from collections import abc +import dataclasses +import inspect as _py_inspect +import itertools +import re +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import strategy_options +from ._typing import insp_is_aliased_class +from ._typing import is_has_collection_adapter +from .base import _DeclarativeMapped +from .base import _is_mapped_class +from .base import class_mapper +from .base import DynamicMapped +from .base import LoaderCallableStatus +from .base import PassiveFlag +from .base import state_str +from .base import WriteOnlyMapped +from .interfaces import _AttributeOptions +from .interfaces import _IntrospectsAnnotations +from .interfaces import MANYTOMANY +from .interfaces import MANYTOONE +from .interfaces import ONETOMANY +from .interfaces import PropComparator +from .interfaces import RelationshipDirection +from .interfaces import StrategizedProperty +from .util import _orm_annotate +from .util import _orm_deannotate +from .util import CascadeOptions +from .. import exc as sa_exc +from .. import Exists +from .. import log +from .. import schema +from .. import sql +from .. import util +from ..inspection import inspect +from ..sql import coercions +from ..sql import expression +from ..sql import operators +from ..sql import roles +from ..sql import visitors +from ..sql._typing import _ColumnExpressionArgument +from ..sql._typing import _HasClauseElement +from ..sql.annotation import _safe_annotate +from ..sql.elements import ColumnClause +from ..sql.elements import ColumnElement +from ..sql.util import _deep_annotate +from ..sql.util import _deep_deannotate +from ..sql.util import _shallow_annotate +from ..sql.util import adapt_criterion_to_null +from ..sql.util import ClauseAdapter +from ..sql.util import join_condition +from ..sql.util import selectables_overlap +from ..sql.util import visit_binary_product +from ..util.typing import de_optionalize_union_types +from ..util.typing import Literal +from ..util.typing import resolve_name_to_real_class_name + +if typing.TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _ExternalEntityType + from ._typing import _IdentityKeyType + from ._typing import _InstanceDict + from ._typing import _InternalEntityType + from ._typing import _O + from ._typing import _RegistryType + from .base import Mapped + from .clsregistry import _class_resolver + from .clsregistry import _ModNS + from .decl_base import _ClassScanMapperConfig + from .dependency import DependencyProcessor + from .mapper import Mapper + from .query import Query + from .session import Session + from .state import InstanceState + from .strategies import LazyLoader + from .util import AliasedClass + from .util import AliasedInsp + from ..sql._typing import _CoreAdapterProto + from ..sql._typing import _EquivalentColumnMap + from ..sql._typing import _InfoType + from ..sql.annotation import _AnnotationDict + from ..sql.annotation import SupportsAnnotations + from ..sql.elements import BinaryExpression + from ..sql.elements import BindParameter + from ..sql.elements import ClauseElement + from ..sql.schema import Table + from ..sql.selectable import FromClause + from ..util.typing import _AnnotationScanType + from ..util.typing import RODescriptorReference + +_T = TypeVar("_T", bound=Any) +_T1 = TypeVar("_T1", bound=Any) +_T2 = TypeVar("_T2", bound=Any) + +_PT = TypeVar("_PT", bound=Any) + +_PT2 = TypeVar("_PT2", bound=Any) + + +_RelationshipArgumentType = Union[ + str, + Type[_T], + Callable[[], Type[_T]], + "Mapper[_T]", + "AliasedClass[_T]", + Callable[[], "Mapper[_T]"], + Callable[[], "AliasedClass[_T]"], +] + +_LazyLoadArgumentType = Literal[ + "select", + "joined", + "selectin", + "subquery", + "raise", + "raise_on_sql", + "noload", + "immediate", + "write_only", + "dynamic", + True, + False, + None, +] + + +_RelationshipJoinConditionArgument = Union[ + str, _ColumnExpressionArgument[bool] +] +_RelationshipSecondaryArgument = Union[ + "FromClause", str, Callable[[], "FromClause"] +] +_ORMOrderByArgument = Union[ + Literal[False], + str, + _ColumnExpressionArgument[Any], + Callable[[], _ColumnExpressionArgument[Any]], + Callable[[], Iterable[_ColumnExpressionArgument[Any]]], + Iterable[Union[str, _ColumnExpressionArgument[Any]]], +] +ORMBackrefArgument = Union[str, Tuple[str, Dict[str, Any]]] + +_ORMColCollectionElement = Union[ + ColumnClause[Any], + _HasClauseElement[Any], + roles.DMLColumnRole, + "Mapped[Any]", +] +_ORMColCollectionArgument = Union[ + str, + Sequence[_ORMColCollectionElement], + Callable[[], Sequence[_ORMColCollectionElement]], + Callable[[], _ORMColCollectionElement], + _ORMColCollectionElement, +] + + +_CEA = TypeVar("_CEA", bound=_ColumnExpressionArgument[Any]) + +_CE = TypeVar("_CE", bound="ColumnElement[Any]") + + +_ColumnPairIterable = Iterable[Tuple[ColumnElement[Any], ColumnElement[Any]]] + +_ColumnPairs = Sequence[Tuple[ColumnElement[Any], ColumnElement[Any]]] + +_MutableColumnPairs = List[Tuple[ColumnElement[Any], ColumnElement[Any]]] + + +def remote(expr: _CEA) -> _CEA: + """Annotate a portion of a primaryjoin expression + with a 'remote' annotation. + + See the section :ref:`relationship_custom_foreign` for a + description of use. + + .. seealso:: + + :ref:`relationship_custom_foreign` + + :func:`.foreign` + + """ + return _annotate_columns( # type: ignore + coercions.expect(roles.ColumnArgumentRole, expr), {"remote": True} + ) + + +def foreign(expr: _CEA) -> _CEA: + """Annotate a portion of a primaryjoin expression + with a 'foreign' annotation. + + See the section :ref:`relationship_custom_foreign` for a + description of use. + + .. seealso:: + + :ref:`relationship_custom_foreign` + + :func:`.remote` + + """ + + return _annotate_columns( # type: ignore + coercions.expect(roles.ColumnArgumentRole, expr), {"foreign": True} + ) + + +@dataclasses.dataclass +class _RelationshipArg(Generic[_T1, _T2]): + """stores a user-defined parameter value that must be resolved and + parsed later at mapper configuration time. + + """ + + __slots__ = "name", "argument", "resolved" + name: str + argument: _T1 + resolved: Optional[_T2] + + def _is_populated(self) -> bool: + return self.argument is not None + + def _resolve_against_registry( + self, clsregistry_resolver: Callable[[str, bool], _class_resolver] + ) -> None: + attr_value = self.argument + + if isinstance(attr_value, str): + self.resolved = clsregistry_resolver( + attr_value, self.name == "secondary" + )() + elif callable(attr_value) and not _is_mapped_class(attr_value): + self.resolved = attr_value() + else: + self.resolved = attr_value + + +_RelationshipOrderByArg = Union[Literal[False], Tuple[ColumnElement[Any], ...]] + + +class _RelationshipArgs(NamedTuple): + """stores user-passed parameters that are resolved at mapper configuration + time. + + """ + + secondary: _RelationshipArg[ + Optional[_RelationshipSecondaryArgument], + Optional[FromClause], + ] + primaryjoin: _RelationshipArg[ + Optional[_RelationshipJoinConditionArgument], + Optional[ColumnElement[Any]], + ] + secondaryjoin: _RelationshipArg[ + Optional[_RelationshipJoinConditionArgument], + Optional[ColumnElement[Any]], + ] + order_by: _RelationshipArg[_ORMOrderByArgument, _RelationshipOrderByArg] + foreign_keys: _RelationshipArg[ + Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]] + ] + remote_side: _RelationshipArg[ + Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]] + ] + + +@log.class_logger +class RelationshipProperty( + _IntrospectsAnnotations, StrategizedProperty[_T], log.Identified +): + """Describes an object property that holds a single item or list + of items that correspond to a related database table. + + Public constructor is the :func:`_orm.relationship` function. + + .. seealso:: + + :ref:`relationship_config_toplevel` + + """ + + strategy_wildcard_key = strategy_options._RELATIONSHIP_TOKEN + inherit_cache = True + """:meta private:""" + + _links_to_entity = True + _is_relationship = True + + _overlaps: Sequence[str] + + _lazy_strategy: LazyLoader + + _persistence_only = dict( + passive_deletes=False, + passive_updates=True, + enable_typechecks=True, + active_history=False, + cascade_backrefs=False, + ) + + _dependency_processor: Optional[DependencyProcessor] = None + + primaryjoin: ColumnElement[bool] + secondaryjoin: Optional[ColumnElement[bool]] + secondary: Optional[FromClause] + _join_condition: JoinCondition + order_by: _RelationshipOrderByArg + + _user_defined_foreign_keys: Set[ColumnElement[Any]] + _calculated_foreign_keys: Set[ColumnElement[Any]] + + remote_side: Set[ColumnElement[Any]] + local_columns: Set[ColumnElement[Any]] + + synchronize_pairs: _ColumnPairs + secondary_synchronize_pairs: Optional[_ColumnPairs] + + local_remote_pairs: Optional[_ColumnPairs] + + direction: RelationshipDirection + + _init_args: _RelationshipArgs + + def __init__( + self, + argument: Optional[_RelationshipArgumentType[_T]] = None, + secondary: Optional[_RelationshipSecondaryArgument] = None, + *, + uselist: Optional[bool] = None, + collection_class: Optional[ + Union[Type[Collection[Any]], Callable[[], Collection[Any]]] + ] = None, + primaryjoin: Optional[_RelationshipJoinConditionArgument] = None, + secondaryjoin: Optional[_RelationshipJoinConditionArgument] = None, + back_populates: Optional[str] = None, + order_by: _ORMOrderByArgument = False, + backref: Optional[ORMBackrefArgument] = None, + overlaps: Optional[str] = None, + post_update: bool = False, + cascade: str = "save-update, merge", + viewonly: bool = False, + attribute_options: Optional[_AttributeOptions] = None, + lazy: _LazyLoadArgumentType = "select", + passive_deletes: Union[Literal["all"], bool] = False, + passive_updates: bool = True, + active_history: bool = False, + enable_typechecks: bool = True, + foreign_keys: Optional[_ORMColCollectionArgument] = None, + remote_side: Optional[_ORMColCollectionArgument] = None, + join_depth: Optional[int] = None, + comparator_factory: Optional[ + Type[RelationshipProperty.Comparator[Any]] + ] = None, + single_parent: bool = False, + innerjoin: bool = False, + distinct_target_key: Optional[bool] = None, + load_on_pending: bool = False, + query_class: Optional[Type[Query[Any]]] = None, + info: Optional[_InfoType] = None, + omit_join: Literal[None, False] = None, + sync_backref: Optional[bool] = None, + doc: Optional[str] = None, + bake_queries: Literal[True] = True, + cascade_backrefs: Literal[False] = False, + _local_remote_pairs: Optional[_ColumnPairs] = None, + _legacy_inactive_history_style: bool = False, + ): + super().__init__(attribute_options=attribute_options) + + self.uselist = uselist + self.argument = argument + + self._init_args = _RelationshipArgs( + _RelationshipArg("secondary", secondary, None), + _RelationshipArg("primaryjoin", primaryjoin, None), + _RelationshipArg("secondaryjoin", secondaryjoin, None), + _RelationshipArg("order_by", order_by, None), + _RelationshipArg("foreign_keys", foreign_keys, None), + _RelationshipArg("remote_side", remote_side, None), + ) + + self.post_update = post_update + self.viewonly = viewonly + if viewonly: + self._warn_for_persistence_only_flags( + passive_deletes=passive_deletes, + passive_updates=passive_updates, + enable_typechecks=enable_typechecks, + active_history=active_history, + cascade_backrefs=cascade_backrefs, + ) + if viewonly and sync_backref: + raise sa_exc.ArgumentError( + "sync_backref and viewonly cannot both be True" + ) + self.sync_backref = sync_backref + self.lazy = lazy + self.single_parent = single_parent + self.collection_class = collection_class + self.passive_deletes = passive_deletes + + if cascade_backrefs: + raise sa_exc.ArgumentError( + "The 'cascade_backrefs' parameter passed to " + "relationship() may only be set to False." + ) + + self.passive_updates = passive_updates + self.enable_typechecks = enable_typechecks + self.query_class = query_class + self.innerjoin = innerjoin + self.distinct_target_key = distinct_target_key + self.doc = doc + self.active_history = active_history + self._legacy_inactive_history_style = _legacy_inactive_history_style + + self.join_depth = join_depth + if omit_join: + util.warn( + "setting omit_join to True is not supported; selectin " + "loading of this relationship may not work correctly if this " + "flag is set explicitly. omit_join optimization is " + "automatically detected for conditions under which it is " + "supported." + ) + + self.omit_join = omit_join + self.local_remote_pairs = _local_remote_pairs + self.load_on_pending = load_on_pending + self.comparator_factory = ( + comparator_factory or RelationshipProperty.Comparator + ) + util.set_creation_order(self) + + if info is not None: + self.info.update(info) + + self.strategy_key = (("lazy", self.lazy),) + + self._reverse_property: Set[RelationshipProperty[Any]] = set() + + if overlaps: + self._overlaps = set(re.split(r"\s*,\s*", overlaps)) # type: ignore # noqa: E501 + else: + self._overlaps = () + + # mypy ignoring the @property setter + self.cascade = cascade # type: ignore + + self.back_populates = back_populates + + if self.back_populates: + if backref: + raise sa_exc.ArgumentError( + "backref and back_populates keyword arguments " + "are mutually exclusive" + ) + self.backref = None + else: + self.backref = backref + + def _warn_for_persistence_only_flags(self, **kw: Any) -> None: + for k, v in kw.items(): + if v != self._persistence_only[k]: + # we are warning here rather than warn deprecated as this is a + # configuration mistake, and Python shows regular warnings more + # aggressively than deprecation warnings by default. Unlike the + # case of setting viewonly with cascade, the settings being + # warned about here are not actively doing the wrong thing + # against viewonly=True, so it is not as urgent to have these + # raise an error. + util.warn( + "Setting %s on relationship() while also " + "setting viewonly=True does not make sense, as a " + "viewonly=True relationship does not perform persistence " + "operations. This configuration may raise an error " + "in a future release." % (k,) + ) + + def instrument_class(self, mapper: Mapper[Any]) -> None: + attributes.register_descriptor( + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), + parententity=mapper, + doc=self.doc, + ) + + class Comparator(util.MemoizedSlots, PropComparator[_PT]): + """Produce boolean, comparison, and other operators for + :class:`.RelationshipProperty` attributes. + + See the documentation for :class:`.PropComparator` for a brief + overview of ORM level operator definition. + + .. seealso:: + + :class:`.PropComparator` + + :class:`.ColumnProperty.Comparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + __slots__ = ( + "entity", + "mapper", + "property", + "_of_type", + "_extra_criteria", + ) + + prop: RODescriptorReference[RelationshipProperty[_PT]] + _of_type: Optional[_EntityType[_PT]] + + def __init__( + self, + prop: RelationshipProperty[_PT], + parentmapper: _InternalEntityType[Any], + adapt_to_entity: Optional[AliasedInsp[Any]] = None, + of_type: Optional[_EntityType[_PT]] = None, + extra_criteria: Tuple[ColumnElement[bool], ...] = (), + ): + """Construction of :class:`.RelationshipProperty.Comparator` + is internal to the ORM's attribute mechanics. + + """ + self.prop = prop + self._parententity = parentmapper + self._adapt_to_entity = adapt_to_entity + if of_type: + self._of_type = of_type + else: + self._of_type = None + self._extra_criteria = extra_criteria + + def adapt_to_entity( + self, adapt_to_entity: AliasedInsp[Any] + ) -> RelationshipProperty.Comparator[Any]: + return self.__class__( + self.prop, + self._parententity, + adapt_to_entity=adapt_to_entity, + of_type=self._of_type, + ) + + entity: _InternalEntityType[_PT] + """The target entity referred to by this + :class:`.RelationshipProperty.Comparator`. + + This is either a :class:`_orm.Mapper` or :class:`.AliasedInsp` + object. + + This is the "target" or "remote" side of the + :func:`_orm.relationship`. + + """ + + mapper: Mapper[_PT] + """The target :class:`_orm.Mapper` referred to by this + :class:`.RelationshipProperty.Comparator`. + + This is the "target" or "remote" side of the + :func:`_orm.relationship`. + + """ + + def _memoized_attr_entity(self) -> _InternalEntityType[_PT]: + if self._of_type: + return inspect(self._of_type) # type: ignore + else: + return self.prop.entity + + def _memoized_attr_mapper(self) -> Mapper[_PT]: + return self.entity.mapper + + def _source_selectable(self) -> FromClause: + if self._adapt_to_entity: + return self._adapt_to_entity.selectable + else: + return self.property.parent._with_polymorphic_selectable + + def __clause_element__(self) -> ColumnElement[bool]: + adapt_from = self._source_selectable() + if self._of_type: + of_type_entity = inspect(self._of_type) + else: + of_type_entity = None + + ( + pj, + sj, + source, + dest, + secondary, + target_adapter, + ) = self.prop._create_joins( + source_selectable=adapt_from, + source_polymorphic=True, + of_type_entity=of_type_entity, + alias_secondary=True, + extra_criteria=self._extra_criteria, + ) + if sj is not None: + return pj & sj + else: + return pj + + def of_type(self, class_: _EntityType[Any]) -> PropComparator[_PT]: + r"""Redefine this object in terms of a polymorphic subclass. + + See :meth:`.PropComparator.of_type` for an example. + + + """ + return RelationshipProperty.Comparator( + self.prop, + self._parententity, + adapt_to_entity=self._adapt_to_entity, + of_type=class_, + extra_criteria=self._extra_criteria, + ) + + def and_( + self, *criteria: _ColumnExpressionArgument[bool] + ) -> PropComparator[Any]: + """Add AND criteria. + + See :meth:`.PropComparator.and_` for an example. + + .. versionadded:: 1.4 + + """ + exprs = tuple( + coercions.expect(roles.WhereHavingRole, clause) + for clause in util.coerce_generator_arg(criteria) + ) + + return RelationshipProperty.Comparator( + self.prop, + self._parententity, + adapt_to_entity=self._adapt_to_entity, + of_type=self._of_type, + extra_criteria=self._extra_criteria + exprs, + ) + + def in_(self, other: Any) -> NoReturn: + """Produce an IN clause - this is not implemented + for :func:`_orm.relationship`-based attributes at this time. + + """ + raise NotImplementedError( + "in_() not yet supported for " + "relationships. For a simple " + "many-to-one, use in_() against " + "the set of foreign key values." + ) + + # https://github.com/python/mypy/issues/4266 + __hash__ = None # type: ignore + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + """Implement the ``==`` operator. + + In a many-to-one context, such as:: + + MyClass.some_prop == + + this will typically produce a + clause such as:: + + mytable.related_id == + + Where ```` is the primary key of the given + object. + + The ``==`` operator provides partial functionality for non- + many-to-one comparisons: + + * Comparisons against collections are not supported. + Use :meth:`~.Relationship.Comparator.contains`. + * Compared to a scalar one-to-many, will produce a + clause that compares the target columns in the parent to + the given target. + * Compared to a scalar many-to-many, an alias + of the association table will be rendered as + well, forming a natural join that is part of the + main body of the query. This will not work for + queries that go beyond simple AND conjunctions of + comparisons, such as those which use OR. Use + explicit joins, outerjoins, or + :meth:`~.Relationship.Comparator.has` for + more comprehensive non-many-to-one scalar + membership tests. + * Comparisons against ``None`` given in a one-to-many + or many-to-many context produce a NOT EXISTS clause. + + """ + if other is None or isinstance(other, expression.Null): + if self.property.direction in [ONETOMANY, MANYTOMANY]: + return ~self._criterion_exists() + else: + return _orm_annotate( + self.property._optimized_compare( + None, adapt_source=self.adapter + ) + ) + elif self.property.uselist: + raise sa_exc.InvalidRequestError( + "Can't compare a collection to an object or collection; " + "use contains() to test for membership." + ) + else: + return _orm_annotate( + self.property._optimized_compare( + other, adapt_source=self.adapter + ) + ) + + def _criterion_exists( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> Exists: + where_criteria = ( + coercions.expect(roles.WhereHavingRole, criterion) + if criterion is not None + else None + ) + + if getattr(self, "_of_type", None): + info: Optional[_InternalEntityType[Any]] = inspect( + self._of_type + ) + assert info is not None + target_mapper, to_selectable, is_aliased_class = ( + info.mapper, + info.selectable, + info.is_aliased_class, + ) + if self.property._is_self_referential and not is_aliased_class: + to_selectable = to_selectable._anonymous_fromclause() + + single_crit = target_mapper._single_table_criterion + if single_crit is not None: + if where_criteria is not None: + where_criteria = single_crit & where_criteria + else: + where_criteria = single_crit + else: + is_aliased_class = False + to_selectable = None + + if self.adapter: + source_selectable = self._source_selectable() + else: + source_selectable = None + + ( + pj, + sj, + source, + dest, + secondary, + target_adapter, + ) = self.property._create_joins( + dest_selectable=to_selectable, + source_selectable=source_selectable, + ) + + for k in kwargs: + crit = getattr(self.property.mapper.class_, k) == kwargs[k] + if where_criteria is None: + where_criteria = crit + else: + where_criteria = where_criteria & crit + + # annotate the *local* side of the join condition, in the case + # of pj + sj this is the full primaryjoin, in the case of just + # pj its the local side of the primaryjoin. + if sj is not None: + j = _orm_annotate(pj) & sj + else: + j = _orm_annotate(pj, exclude=self.property.remote_side) + + if ( + where_criteria is not None + and target_adapter + and not is_aliased_class + ): + # limit this adapter to annotated only? + where_criteria = target_adapter.traverse(where_criteria) + + # only have the "joined left side" of what we + # return be subject to Query adaption. The right + # side of it is used for an exists() subquery and + # should not correlate or otherwise reach out + # to anything in the enclosing query. + if where_criteria is not None: + where_criteria = where_criteria._annotate( + {"no_replacement_traverse": True} + ) + + crit = j & sql.True_._ifnone(where_criteria) + + if secondary is not None: + ex = ( + sql.exists(1) + .where(crit) + .select_from(dest, secondary) + .correlate_except(dest, secondary) + ) + else: + ex = ( + sql.exists(1) + .where(crit) + .select_from(dest) + .correlate_except(dest) + ) + return ex + + def any( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + """Produce an expression that tests a collection against + particular criterion, using EXISTS. + + An expression like:: + + session.query(MyClass).filter( + MyClass.somereference.any(SomeRelated.x==2) + ) + + + Will produce a query like:: + + SELECT * FROM my_table WHERE + EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id + AND related.x=2) + + Because :meth:`~.Relationship.Comparator.any` uses + a correlated subquery, its performance is not nearly as + good when compared against large target tables as that of + using a join. + + :meth:`~.Relationship.Comparator.any` is particularly + useful for testing for empty collections:: + + session.query(MyClass).filter( + ~MyClass.somereference.any() + ) + + will produce:: + + SELECT * FROM my_table WHERE + NOT (EXISTS (SELECT 1 FROM related WHERE + related.my_id=my_table.id)) + + :meth:`~.Relationship.Comparator.any` is only + valid for collections, i.e. a :func:`_orm.relationship` + that has ``uselist=True``. For scalar references, + use :meth:`~.Relationship.Comparator.has`. + + """ + if not self.property.uselist: + raise sa_exc.InvalidRequestError( + "'any()' not implemented for scalar " + "attributes. Use has()." + ) + + return self._criterion_exists(criterion, **kwargs) + + def has( + self, + criterion: Optional[_ColumnExpressionArgument[bool]] = None, + **kwargs: Any, + ) -> ColumnElement[bool]: + """Produce an expression that tests a scalar reference against + particular criterion, using EXISTS. + + An expression like:: + + session.query(MyClass).filter( + MyClass.somereference.has(SomeRelated.x==2) + ) + + + Will produce a query like:: + + SELECT * FROM my_table WHERE + EXISTS (SELECT 1 FROM related WHERE + related.id==my_table.related_id AND related.x=2) + + Because :meth:`~.Relationship.Comparator.has` uses + a correlated subquery, its performance is not nearly as + good when compared against large target tables as that of + using a join. + + :meth:`~.Relationship.Comparator.has` is only + valid for scalar references, i.e. a :func:`_orm.relationship` + that has ``uselist=False``. For collection references, + use :meth:`~.Relationship.Comparator.any`. + + """ + if self.property.uselist: + raise sa_exc.InvalidRequestError( + "'has()' not implemented for collections. Use any()." + ) + return self._criterion_exists(criterion, **kwargs) + + def contains( + self, other: _ColumnExpressionArgument[Any], **kwargs: Any + ) -> ColumnElement[bool]: + """Return a simple expression that tests a collection for + containment of a particular item. + + :meth:`~.Relationship.Comparator.contains` is + only valid for a collection, i.e. a + :func:`_orm.relationship` that implements + one-to-many or many-to-many with ``uselist=True``. + + When used in a simple one-to-many context, an + expression like:: + + MyClass.contains(other) + + Produces a clause like:: + + mytable.id == + + Where ```` is the value of the foreign key + attribute on ``other`` which refers to the primary + key of its parent object. From this it follows that + :meth:`~.Relationship.Comparator.contains` is + very useful when used with simple one-to-many + operations. + + For many-to-many operations, the behavior of + :meth:`~.Relationship.Comparator.contains` + has more caveats. The association table will be + rendered in the statement, producing an "implicit" + join, that is, includes multiple tables in the FROM + clause which are equated in the WHERE clause:: + + query(MyClass).filter(MyClass.contains(other)) + + Produces a query like:: + + SELECT * FROM my_table, my_association_table AS + my_association_table_1 WHERE + my_table.id = my_association_table_1.parent_id + AND my_association_table_1.child_id = + + Where ```` would be the primary key of + ``other``. From the above, it is clear that + :meth:`~.Relationship.Comparator.contains` + will **not** work with many-to-many collections when + used in queries that move beyond simple AND + conjunctions, such as multiple + :meth:`~.Relationship.Comparator.contains` + expressions joined by OR. In such cases subqueries or + explicit "outer joins" will need to be used instead. + See :meth:`~.Relationship.Comparator.any` for + a less-performant alternative using EXISTS, or refer + to :meth:`_query.Query.outerjoin` + as well as :ref:`orm_queryguide_joins` + for more details on constructing outer joins. + + kwargs may be ignored by this operator but are required for API + conformance. + """ + if not self.prop.uselist: + raise sa_exc.InvalidRequestError( + "'contains' not implemented for scalar " + "attributes. Use ==" + ) + + clause = self.prop._optimized_compare( + other, adapt_source=self.adapter + ) + + if self.prop.secondaryjoin is not None: + clause.negation_clause = self.__negated_contains_or_equals( + other + ) + + return clause + + def __negated_contains_or_equals( + self, other: Any + ) -> ColumnElement[bool]: + if self.prop.direction == MANYTOONE: + state = attributes.instance_state(other) + + def state_bindparam( + local_col: ColumnElement[Any], + state: InstanceState[Any], + remote_col: ColumnElement[Any], + ) -> BindParameter[Any]: + dict_ = state.dict + return sql.bindparam( + local_col.key, + type_=local_col.type, + unique=True, + callable_=self.prop._get_attr_w_warn_on_none( + self.prop.mapper, state, dict_, remote_col + ), + ) + + def adapt(col: _CE) -> _CE: + if self.adapter: + return self.adapter(col) + else: + return col + + if self.property._use_get: + return sql.and_( + *[ + sql.or_( + adapt(x) + != state_bindparam(adapt(x), state, y), + adapt(x) == None, + ) + for (x, y) in self.property.local_remote_pairs + ] + ) + + criterion = sql.and_( + *[ + x == y + for (x, y) in zip( + self.property.mapper.primary_key, + self.property.mapper.primary_key_from_instance(other), + ) + ] + ) + + return ~self._criterion_exists(criterion) + + def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + """Implement the ``!=`` operator. + + In a many-to-one context, such as:: + + MyClass.some_prop != + + This will typically produce a clause such as:: + + mytable.related_id != + + Where ```` is the primary key of the + given object. + + The ``!=`` operator provides partial functionality for non- + many-to-one comparisons: + + * Comparisons against collections are not supported. + Use + :meth:`~.Relationship.Comparator.contains` + in conjunction with :func:`_expression.not_`. + * Compared to a scalar one-to-many, will produce a + clause that compares the target columns in the parent to + the given target. + * Compared to a scalar many-to-many, an alias + of the association table will be rendered as + well, forming a natural join that is part of the + main body of the query. This will not work for + queries that go beyond simple AND conjunctions of + comparisons, such as those which use OR. Use + explicit joins, outerjoins, or + :meth:`~.Relationship.Comparator.has` in + conjunction with :func:`_expression.not_` for + more comprehensive non-many-to-one scalar + membership tests. + * Comparisons against ``None`` given in a one-to-many + or many-to-many context produce an EXISTS clause. + + """ + if other is None or isinstance(other, expression.Null): + if self.property.direction == MANYTOONE: + return _orm_annotate( + ~self.property._optimized_compare( + None, adapt_source=self.adapter + ) + ) + + else: + return self._criterion_exists() + elif self.property.uselist: + raise sa_exc.InvalidRequestError( + "Can't compare a collection" + " to an object or collection; use " + "contains() to test for membership." + ) + else: + return _orm_annotate(self.__negated_contains_or_equals(other)) + + def _memoized_attr_property(self) -> RelationshipProperty[_PT]: + self.prop.parent._check_configure() + return self.prop + + def _with_parent( + self, + instance: object, + alias_secondary: bool = True, + from_entity: Optional[_EntityType[Any]] = None, + ) -> ColumnElement[bool]: + assert instance is not None + adapt_source: Optional[_CoreAdapterProto] = None + if from_entity is not None: + insp: Optional[_InternalEntityType[Any]] = inspect(from_entity) + assert insp is not None + if insp_is_aliased_class(insp): + adapt_source = insp._adapter.adapt_clause + return self._optimized_compare( + instance, + value_is_parent=True, + adapt_source=adapt_source, + alias_secondary=alias_secondary, + ) + + def _optimized_compare( + self, + state: Any, + value_is_parent: bool = False, + adapt_source: Optional[_CoreAdapterProto] = None, + alias_secondary: bool = True, + ) -> ColumnElement[bool]: + if state is not None: + try: + state = inspect(state) + except sa_exc.NoInspectionAvailable: + state = None + + if state is None or not getattr(state, "is_instance", False): + raise sa_exc.ArgumentError( + "Mapped instance expected for relationship " + "comparison to object. Classes, queries and other " + "SQL elements are not accepted in this context; for " + "comparison with a subquery, " + "use %s.has(**criteria)." % self + ) + reverse_direction = not value_is_parent + + if state is None: + return self._lazy_none_clause( + reverse_direction, adapt_source=adapt_source + ) + + if not reverse_direction: + criterion, bind_to_col = ( + self._lazy_strategy._lazywhere, + self._lazy_strategy._bind_to_col, + ) + else: + criterion, bind_to_col = ( + self._lazy_strategy._rev_lazywhere, + self._lazy_strategy._rev_bind_to_col, + ) + + if reverse_direction: + mapper = self.mapper + else: + mapper = self.parent + + dict_ = attributes.instance_dict(state.obj()) + + def visit_bindparam(bindparam: BindParameter[Any]) -> None: + if bindparam._identifying_key in bind_to_col: + bindparam.callable = self._get_attr_w_warn_on_none( + mapper, + state, + dict_, + bind_to_col[bindparam._identifying_key], + ) + + if self.secondary is not None and alias_secondary: + criterion = ClauseAdapter( + self.secondary._anonymous_fromclause() + ).traverse(criterion) + + criterion = visitors.cloned_traverse( + criterion, {}, {"bindparam": visit_bindparam} + ) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion + + def _get_attr_w_warn_on_none( + self, + mapper: Mapper[Any], + state: InstanceState[Any], + dict_: _InstanceDict, + column: ColumnElement[Any], + ) -> Callable[[], Any]: + """Create the callable that is used in a many-to-one expression. + + E.g.:: + + u1 = s.query(User).get(5) + + expr = Address.user == u1 + + Above, the SQL should be "address.user_id = 5". The callable + returned by this method produces the value "5" based on the identity + of ``u1``. + + """ + + # in this callable, we're trying to thread the needle through + # a wide variety of scenarios, including: + # + # * the object hasn't been flushed yet and there's no value for + # the attribute as of yet + # + # * the object hasn't been flushed yet but it has a user-defined + # value + # + # * the object has a value but it's expired and not locally present + # + # * the object has a value but it's expired and not locally present, + # and the object is also detached + # + # * The object hadn't been flushed yet, there was no value, but + # later, the object has been expired and detached, and *now* + # they're trying to evaluate it + # + # * the object had a value, but it was changed to a new value, and + # then expired + # + # * the object had a value, but it was changed to a new value, and + # then expired, then the object was detached + # + # * the object has a user-set value, but it's None and we don't do + # the comparison correctly for that so warn + # + + prop = mapper.get_property_by_column(column) + + # by invoking this method, InstanceState will track the last known + # value for this key each time the attribute is to be expired. + # this feature was added explicitly for use in this method. + state._track_last_known_value(prop.key) + + lkv_fixed = state._last_known_values + + def _go() -> Any: + assert lkv_fixed is not None + last_known = to_return = lkv_fixed[prop.key] + existing_is_available = ( + last_known is not LoaderCallableStatus.NO_VALUE + ) + + # we support that the value may have changed. so here we + # try to get the most recent value including re-fetching. + # only if we can't get a value now due to detachment do we return + # the last known value + current_value = mapper._get_state_attr_by_column( + state, + dict_, + column, + passive=( + PassiveFlag.PASSIVE_OFF + if state.persistent + else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK + ), + ) + + if current_value is LoaderCallableStatus.NEVER_SET: + if not existing_is_available: + raise sa_exc.InvalidRequestError( + "Can't resolve value for column %s on object " + "%s; no value has been set for this column" + % (column, state_str(state)) + ) + elif current_value is LoaderCallableStatus.PASSIVE_NO_RESULT: + if not existing_is_available: + raise sa_exc.InvalidRequestError( + "Can't resolve value for column %s on object " + "%s; the object is detached and the value was " + "expired" % (column, state_str(state)) + ) + else: + to_return = current_value + if to_return is None: + util.warn( + "Got None for value of column %s; this is unsupported " + "for a relationship comparison and will not " + "currently produce an IS comparison " + "(but may in a future release)" % column + ) + return to_return + + return _go + + def _lazy_none_clause( + self, + reverse_direction: bool = False, + adapt_source: Optional[_CoreAdapterProto] = None, + ) -> ColumnElement[bool]: + if not reverse_direction: + criterion, bind_to_col = ( + self._lazy_strategy._lazywhere, + self._lazy_strategy._bind_to_col, + ) + else: + criterion, bind_to_col = ( + self._lazy_strategy._rev_lazywhere, + self._lazy_strategy._rev_bind_to_col, + ) + + criterion = adapt_criterion_to_null(criterion, bind_to_col) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion + + def __str__(self) -> str: + return str(self.parent.class_.__name__) + "." + self.key + + def merge( + self, + session: Session, + source_state: InstanceState[Any], + source_dict: _InstanceDict, + dest_state: InstanceState[Any], + dest_dict: _InstanceDict, + load: bool, + _recursive: Dict[Any, object], + _resolve_conflict_map: Dict[_IdentityKeyType[Any], object], + ) -> None: + if load: + for r in self._reverse_property: + if (source_state, r) in _recursive: + return + + if "merge" not in self._cascade: + return + + if self.key not in source_dict: + return + + if self.uselist: + impl = source_state.get_impl(self.key) + + assert is_has_collection_adapter(impl) + instances_iterable = impl.get_collection(source_state, source_dict) + + # if this is a CollectionAttributeImpl, then empty should + # be False, otherwise "self.key in source_dict" should not be + # True + assert not instances_iterable.empty if impl.collection else True + + if load: + # for a full merge, pre-load the destination collection, + # so that individual _merge of each item pulls from identity + # map for those already present. + # also assumes CollectionAttributeImpl behavior of loading + # "old" list in any case + dest_state.get_impl(self.key).get( + dest_state, dest_dict, passive=PassiveFlag.PASSIVE_MERGE + ) + + dest_list = [] + for current in instances_iterable: + current_state = attributes.instance_state(current) + current_dict = attributes.instance_dict(current) + _recursive[(current_state, self)] = True + obj = session._merge( + current_state, + current_dict, + load=load, + _recursive=_recursive, + _resolve_conflict_map=_resolve_conflict_map, + ) + if obj is not None: + dest_list.append(obj) + + if not load: + coll = attributes.init_state_collection( + dest_state, dest_dict, self.key + ) + for c in dest_list: + coll.append_without_event(c) + else: + dest_impl = dest_state.get_impl(self.key) + assert is_has_collection_adapter(dest_impl) + dest_impl.set( + dest_state, + dest_dict, + dest_list, + _adapt=False, + passive=PassiveFlag.PASSIVE_MERGE, + ) + else: + current = source_dict[self.key] + if current is not None: + current_state = attributes.instance_state(current) + current_dict = attributes.instance_dict(current) + _recursive[(current_state, self)] = True + obj = session._merge( + current_state, + current_dict, + load=load, + _recursive=_recursive, + _resolve_conflict_map=_resolve_conflict_map, + ) + else: + obj = None + + if not load: + dest_dict[self.key] = obj + else: + dest_state.get_impl(self.key).set( + dest_state, dest_dict, obj, None + ) + + def _value_as_iterable( + self, + state: InstanceState[_O], + dict_: _InstanceDict, + key: str, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> Sequence[Tuple[InstanceState[_O], _O]]: + """Return a list of tuples (state, obj) for the given + key. + + returns an empty list if the value is None/empty/PASSIVE_NO_RESULT + """ + + impl = state.manager[key].impl + x = impl.get(state, dict_, passive=passive) + if x is LoaderCallableStatus.PASSIVE_NO_RESULT or x is None: + return [] + elif is_has_collection_adapter(impl): + return [ + (attributes.instance_state(o), o) + for o in impl.get_collection(state, dict_, x, passive=passive) + ] + else: + return [(attributes.instance_state(x), x)] + + def cascade_iterator( + self, + type_: str, + state: InstanceState[Any], + dict_: _InstanceDict, + visited_states: Set[InstanceState[Any]], + halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None, + ) -> Iterator[Tuple[Any, Mapper[Any], InstanceState[Any], _InstanceDict]]: + # assert type_ in self._cascade + + # only actively lazy load on the 'delete' cascade + if type_ != "delete" or self.passive_deletes: + passive = PassiveFlag.PASSIVE_NO_INITIALIZE + else: + passive = PassiveFlag.PASSIVE_OFF | PassiveFlag.NO_RAISE + + if type_ == "save-update": + tuples = state.manager[self.key].impl.get_all_pending(state, dict_) + else: + tuples = self._value_as_iterable( + state, dict_, self.key, passive=passive + ) + + skip_pending = ( + type_ == "refresh-expire" and "delete-orphan" not in self._cascade + ) + + for instance_state, c in tuples: + if instance_state in visited_states: + continue + + if c is None: + # would like to emit a warning here, but + # would not be consistent with collection.append(None) + # current behavior of silently skipping. + # see [ticket:2229] + continue + + assert instance_state is not None + instance_dict = attributes.instance_dict(c) + + if halt_on and halt_on(instance_state): + continue + + if skip_pending and not instance_state.key: + continue + + instance_mapper = instance_state.manager.mapper + + if not instance_mapper.isa(self.mapper.class_manager.mapper): + raise AssertionError( + "Attribute '%s' on class '%s' " + "doesn't handle objects " + "of type '%s'" + % (self.key, self.parent.class_, c.__class__) + ) + + visited_states.add(instance_state) + + yield c, instance_mapper, instance_state, instance_dict + + @property + def _effective_sync_backref(self) -> bool: + if self.viewonly: + return False + else: + return self.sync_backref is not False + + @staticmethod + def _check_sync_backref( + rel_a: RelationshipProperty[Any], rel_b: RelationshipProperty[Any] + ) -> None: + if rel_a.viewonly and rel_b.sync_backref: + raise sa_exc.InvalidRequestError( + "Relationship %s cannot specify sync_backref=True since %s " + "includes viewonly=True." % (rel_b, rel_a) + ) + if ( + rel_a.viewonly + and not rel_b.viewonly + and rel_b.sync_backref is not False + ): + rel_b.sync_backref = False + + def _add_reverse_property(self, key: str) -> None: + other = self.mapper.get_property(key, _configure_mappers=False) + if not isinstance(other, RelationshipProperty): + raise sa_exc.InvalidRequestError( + "back_populates on relationship '%s' refers to attribute '%s' " + "that is not a relationship. The back_populates parameter " + "should refer to the name of a relationship on the target " + "class." % (self, other) + ) + # viewonly and sync_backref cases + # 1. self.viewonly==True and other.sync_backref==True -> error + # 2. self.viewonly==True and other.viewonly==False and + # other.sync_backref==None -> warn sync_backref=False, set to False + self._check_sync_backref(self, other) + # 3. other.viewonly==True and self.sync_backref==True -> error + # 4. other.viewonly==True and self.viewonly==False and + # self.sync_backref==None -> warn sync_backref=False, set to False + self._check_sync_backref(other, self) + + self._reverse_property.add(other) + other._reverse_property.add(self) + + other._setup_entity() + + if not other.mapper.common_parent(self.parent): + raise sa_exc.ArgumentError( + "reverse_property %r on " + "relationship %s references relationship %s, which " + "does not reference mapper %s" + % (key, self, other, self.parent) + ) + + if ( + other._configure_started + and self.direction in (ONETOMANY, MANYTOONE) + and self.direction == other.direction + ): + raise sa_exc.ArgumentError( + "%s and back-reference %s are " + "both of the same direction %r. Did you mean to " + "set remote_side on the many-to-one side ?" + % (other, self, self.direction) + ) + + @util.memoized_property + def entity(self) -> _InternalEntityType[_T]: + """Return the target mapped entity, which is an inspect() of the + class or aliased class that is referenced by this + :class:`.RelationshipProperty`. + + """ + self.parent._check_configure() + return self.entity + + @util.memoized_property + def mapper(self) -> Mapper[_T]: + """Return the targeted :class:`_orm.Mapper` for this + :class:`.RelationshipProperty`. + + """ + return self.entity.mapper + + def do_init(self) -> None: + self._check_conflicts() + self._process_dependent_arguments() + self._setup_entity() + self._setup_registry_dependencies() + self._setup_join_conditions() + self._check_cascade_settings(self._cascade) + self._post_init() + self._generate_backref() + self._join_condition._warn_for_conflicting_sync_targets() + super().do_init() + self._lazy_strategy = cast( + "LazyLoader", self._get_strategy((("lazy", "select"),)) + ) + + def _setup_registry_dependencies(self) -> None: + self.parent.mapper.registry._set_depends_on( + self.entity.mapper.registry + ) + + def _process_dependent_arguments(self) -> None: + """Convert incoming configuration arguments to their + proper form. + + Callables are resolved, ORM annotations removed. + + """ + + # accept callables for other attributes which may require + # deferred initialization. This technique is used + # by declarative "string configs" and some recipes. + init_args = self._init_args + + for attr in ( + "order_by", + "primaryjoin", + "secondaryjoin", + "secondary", + "foreign_keys", + "remote_side", + ): + rel_arg = getattr(init_args, attr) + + rel_arg._resolve_against_registry(self._clsregistry_resolvers[1]) + + # remove "annotations" which are present if mapped class + # descriptors are used to create the join expression. + for attr in "primaryjoin", "secondaryjoin": + rel_arg = getattr(init_args, attr) + val = rel_arg.resolved + if val is not None: + rel_arg.resolved = _orm_deannotate( + coercions.expect( + roles.ColumnArgumentRole, val, argname=attr + ) + ) + + secondary = init_args.secondary.resolved + if secondary is not None and _is_mapped_class(secondary): + raise sa_exc.ArgumentError( + "secondary argument %s passed to to relationship() %s must " + "be a Table object or other FROM clause; can't send a mapped " + "class directly as rows in 'secondary' are persisted " + "independently of a class that is mapped " + "to that same table." % (secondary, self) + ) + + # ensure expressions in self.order_by, foreign_keys, + # remote_side are all columns, not strings. + if ( + init_args.order_by.resolved is not False + and init_args.order_by.resolved is not None + ): + self.order_by = tuple( + coercions.expect( + roles.ColumnArgumentRole, x, argname="order_by" + ) + for x in util.to_list(init_args.order_by.resolved) + ) + else: + self.order_by = False + + self._user_defined_foreign_keys = util.column_set( + coercions.expect( + roles.ColumnArgumentRole, x, argname="foreign_keys" + ) + for x in util.to_column_set(init_args.foreign_keys.resolved) + ) + + self.remote_side = util.column_set( + coercions.expect( + roles.ColumnArgumentRole, x, argname="remote_side" + ) + for x in util.to_column_set(init_args.remote_side.resolved) + ) + + def declarative_scan( + self, + decl_scan: _ClassScanMapperConfig, + registry: _RegistryType, + cls: Type[Any], + originating_module: Optional[str], + key: str, + mapped_container: Optional[Type[Mapped[Any]]], + annotation: Optional[_AnnotationScanType], + extracted_mapped_annotation: Optional[_AnnotationScanType], + is_dataclass_field: bool, + ) -> None: + argument = extracted_mapped_annotation + + if extracted_mapped_annotation is None: + if self.argument is None: + self._raise_for_required(key, cls) + else: + return + + argument = extracted_mapped_annotation + assert originating_module is not None + + if mapped_container is not None: + is_write_only = issubclass(mapped_container, WriteOnlyMapped) + is_dynamic = issubclass(mapped_container, DynamicMapped) + if is_write_only: + self.lazy = "write_only" + self.strategy_key = (("lazy", self.lazy),) + elif is_dynamic: + self.lazy = "dynamic" + self.strategy_key = (("lazy", self.lazy),) + else: + is_write_only = is_dynamic = False + + argument = de_optionalize_union_types(argument) + + if hasattr(argument, "__origin__"): + arg_origin = argument.__origin__ + if isinstance(arg_origin, type) and issubclass( + arg_origin, abc.Collection + ): + if self.collection_class is None: + if _py_inspect.isabstract(arg_origin): + raise sa_exc.ArgumentError( + f"Collection annotation type {arg_origin} cannot " + "be instantiated; please provide an explicit " + "'collection_class' parameter " + "(e.g. list, set, etc.) to the " + "relationship() function to accompany this " + "annotation" + ) + + self.collection_class = arg_origin + + elif not is_write_only and not is_dynamic: + self.uselist = False + + if argument.__args__: # type: ignore + if isinstance(arg_origin, type) and issubclass( + arg_origin, typing.Mapping + ): + type_arg = argument.__args__[-1] # type: ignore + else: + type_arg = argument.__args__[0] # type: ignore + if hasattr(type_arg, "__forward_arg__"): + str_argument = type_arg.__forward_arg__ + + argument = resolve_name_to_real_class_name( + str_argument, originating_module + ) + else: + argument = type_arg + else: + raise sa_exc.ArgumentError( + f"Generic alias {argument} requires an argument" + ) + elif hasattr(argument, "__forward_arg__"): + argument = argument.__forward_arg__ + + argument = resolve_name_to_real_class_name( + argument, originating_module + ) + + if ( + self.collection_class is None + and not is_write_only + and not is_dynamic + ): + self.uselist = False + + # ticket #8759 + # if a lead argument was given to relationship(), like + # `relationship("B")`, use that, don't replace it with class we + # found in the annotation. The declarative_scan() method call here is + # still useful, as we continue to derive collection type and do + # checking of the annotation in any case. + if self.argument is None: + self.argument = cast("_RelationshipArgumentType[_T]", argument) + + @util.preload_module("sqlalchemy.orm.mapper") + def _setup_entity(self, __argument: Any = None) -> None: + if "entity" in self.__dict__: + return + + mapperlib = util.preloaded.orm_mapper + + if __argument: + argument = __argument + else: + argument = self.argument + + resolved_argument: _ExternalEntityType[Any] + + if isinstance(argument, str): + # we might want to cleanup clsregistry API to make this + # more straightforward + resolved_argument = cast( + "_ExternalEntityType[Any]", + self._clsregistry_resolve_name(argument)(), + ) + elif callable(argument) and not isinstance( + argument, (type, mapperlib.Mapper) + ): + resolved_argument = argument() + else: + resolved_argument = argument + + entity: _InternalEntityType[Any] + + if isinstance(resolved_argument, type): + entity = class_mapper(resolved_argument, configure=False) + else: + try: + entity = inspect(resolved_argument) + except sa_exc.NoInspectionAvailable: + entity = None # type: ignore + + if not hasattr(entity, "mapper"): + raise sa_exc.ArgumentError( + "relationship '%s' expects " + "a class or a mapper argument (received: %s)" + % (self.key, type(resolved_argument)) + ) + + self.entity = entity + self.target = self.entity.persist_selectable + + def _setup_join_conditions(self) -> None: + self._join_condition = jc = JoinCondition( + parent_persist_selectable=self.parent.persist_selectable, + child_persist_selectable=self.entity.persist_selectable, + parent_local_selectable=self.parent.local_table, + child_local_selectable=self.entity.local_table, + primaryjoin=self._init_args.primaryjoin.resolved, + secondary=self._init_args.secondary.resolved, + secondaryjoin=self._init_args.secondaryjoin.resolved, + parent_equivalents=self.parent._equivalent_columns, + child_equivalents=self.mapper._equivalent_columns, + consider_as_foreign_keys=self._user_defined_foreign_keys, + local_remote_pairs=self.local_remote_pairs, + remote_side=self.remote_side, + self_referential=self._is_self_referential, + prop=self, + support_sync=not self.viewonly, + can_be_synced_fn=self._columns_are_mapped, + ) + self.primaryjoin = jc.primaryjoin + self.secondaryjoin = jc.secondaryjoin + self.secondary = jc.secondary + self.direction = jc.direction + self.local_remote_pairs = jc.local_remote_pairs + self.remote_side = jc.remote_columns + self.local_columns = jc.local_columns + self.synchronize_pairs = jc.synchronize_pairs + self._calculated_foreign_keys = jc.foreign_key_columns + self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs + + @property + def _clsregistry_resolve_arg( + self, + ) -> Callable[[str, bool], _class_resolver]: + return self._clsregistry_resolvers[1] + + @property + def _clsregistry_resolve_name( + self, + ) -> Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]]: + return self._clsregistry_resolvers[0] + + @util.memoized_property + @util.preload_module("sqlalchemy.orm.clsregistry") + def _clsregistry_resolvers( + self, + ) -> Tuple[ + Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]], + Callable[[str, bool], _class_resolver], + ]: + _resolver = util.preloaded.orm_clsregistry._resolver + + return _resolver(self.parent.class_, self) + + def _check_conflicts(self) -> None: + """Test that this relationship is legal, warn about + inheritance conflicts.""" + if self.parent.non_primary and not class_mapper( + self.parent.class_, configure=False + ).has_property(self.key): + raise sa_exc.ArgumentError( + "Attempting to assign a new " + "relationship '%s' to a non-primary mapper on " + "class '%s'. New relationships can only be added " + "to the primary mapper, i.e. the very first mapper " + "created for class '%s' " + % ( + self.key, + self.parent.class_.__name__, + self.parent.class_.__name__, + ) + ) + + @property + def cascade(self) -> CascadeOptions: + """Return the current cascade setting for this + :class:`.RelationshipProperty`. + """ + return self._cascade + + @cascade.setter + def cascade(self, cascade: Union[str, CascadeOptions]) -> None: + self._set_cascade(cascade) + + def _set_cascade(self, cascade_arg: Union[str, CascadeOptions]) -> None: + cascade = CascadeOptions(cascade_arg) + + if self.viewonly: + cascade = CascadeOptions( + cascade.intersection(CascadeOptions._viewonly_cascades) + ) + + if "mapper" in self.__dict__: + self._check_cascade_settings(cascade) + self._cascade = cascade + + if self._dependency_processor: + self._dependency_processor.cascade = cascade + + def _check_cascade_settings(self, cascade: CascadeOptions) -> None: + if ( + cascade.delete_orphan + and not self.single_parent + and (self.direction is MANYTOMANY or self.direction is MANYTOONE) + ): + raise sa_exc.ArgumentError( + "For %(direction)s relationship %(rel)s, delete-orphan " + "cascade is normally " + 'configured only on the "one" side of a one-to-many ' + "relationship, " + 'and not on the "many" side of a many-to-one or many-to-many ' + "relationship. " + "To force this relationship to allow a particular " + '"%(relatedcls)s" object to be referenced by only ' + 'a single "%(clsname)s" object at a time via the ' + "%(rel)s relationship, which " + "would allow " + "delete-orphan cascade to take place in this direction, set " + "the single_parent=True flag." + % { + "rel": self, + "direction": ( + "many-to-one" + if self.direction is MANYTOONE + else "many-to-many" + ), + "clsname": self.parent.class_.__name__, + "relatedcls": self.mapper.class_.__name__, + }, + code="bbf0", + ) + + if self.passive_deletes == "all" and ( + "delete" in cascade or "delete-orphan" in cascade + ): + raise sa_exc.ArgumentError( + "On %s, can't set passive_deletes='all' in conjunction " + "with 'delete' or 'delete-orphan' cascade" % self + ) + + if cascade.delete_orphan: + self.mapper.primary_mapper()._delete_orphans.append( + (self.key, self.parent.class_) + ) + + def _persists_for(self, mapper: Mapper[Any]) -> bool: + """Return True if this property will persist values on behalf + of the given mapper. + + """ + + return ( + self.key in mapper.relationships + and mapper.relationships[self.key] is self + ) + + def _columns_are_mapped(self, *cols: ColumnElement[Any]) -> bool: + """Return True if all columns in the given collection are + mapped by the tables referenced by this :class:`.RelationshipProperty`. + + """ + + secondary = self._init_args.secondary.resolved + for c in cols: + if secondary is not None and secondary.c.contains_column(c): + continue + if not self.parent.persist_selectable.c.contains_column( + c + ) and not self.target.c.contains_column(c): + return False + return True + + def _generate_backref(self) -> None: + """Interpret the 'backref' instruction to create a + :func:`_orm.relationship` complementary to this one.""" + + if self.parent.non_primary: + return + if self.backref is not None and not self.back_populates: + kwargs: Dict[str, Any] + if isinstance(self.backref, str): + backref_key, kwargs = self.backref, {} + else: + backref_key, kwargs = self.backref + mapper = self.mapper.primary_mapper() + + if not mapper.concrete: + check = set(mapper.iterate_to_root()).union( + mapper.self_and_descendants + ) + for m in check: + if m.has_property(backref_key) and not m.concrete: + raise sa_exc.ArgumentError( + "Error creating backref " + "'%s' on relationship '%s': property of that " + "name exists on mapper '%s'" + % (backref_key, self, m) + ) + + # determine primaryjoin/secondaryjoin for the + # backref. Use the one we had, so that + # a custom join doesn't have to be specified in + # both directions. + if self.secondary is not None: + # for many to many, just switch primaryjoin/ + # secondaryjoin. use the annotated + # pj/sj on the _join_condition. + pj = kwargs.pop( + "primaryjoin", + self._join_condition.secondaryjoin_minus_local, + ) + sj = kwargs.pop( + "secondaryjoin", + self._join_condition.primaryjoin_minus_local, + ) + else: + pj = kwargs.pop( + "primaryjoin", + self._join_condition.primaryjoin_reverse_remote, + ) + sj = kwargs.pop("secondaryjoin", None) + if sj: + raise sa_exc.InvalidRequestError( + "Can't assign 'secondaryjoin' on a backref " + "against a non-secondary relationship." + ) + + foreign_keys = kwargs.pop( + "foreign_keys", self._user_defined_foreign_keys + ) + parent = self.parent.primary_mapper() + kwargs.setdefault("viewonly", self.viewonly) + kwargs.setdefault("post_update", self.post_update) + kwargs.setdefault("passive_updates", self.passive_updates) + kwargs.setdefault("sync_backref", self.sync_backref) + self.back_populates = backref_key + relationship = RelationshipProperty( + parent, + self.secondary, + primaryjoin=pj, + secondaryjoin=sj, + foreign_keys=foreign_keys, + back_populates=self.key, + **kwargs, + ) + mapper._configure_property( + backref_key, relationship, warn_for_existing=True + ) + + if self.back_populates: + self._add_reverse_property(self.back_populates) + + @util.preload_module("sqlalchemy.orm.dependency") + def _post_init(self) -> None: + dependency = util.preloaded.orm_dependency + + if self.uselist is None: + self.uselist = self.direction is not MANYTOONE + if not self.viewonly: + self._dependency_processor = ( # type: ignore + dependency.DependencyProcessor.from_relationship + )(self) + + @util.memoized_property + def _use_get(self) -> bool: + """memoize the 'use_get' attribute of this RelationshipLoader's + lazyloader.""" + + strategy = self._lazy_strategy + return strategy.use_get + + @util.memoized_property + def _is_self_referential(self) -> bool: + return self.mapper.common_parent(self.parent) + + def _create_joins( + self, + source_polymorphic: bool = False, + source_selectable: Optional[FromClause] = None, + dest_selectable: Optional[FromClause] = None, + of_type_entity: Optional[_InternalEntityType[Any]] = None, + alias_secondary: bool = False, + extra_criteria: Tuple[ColumnElement[bool], ...] = (), + ) -> Tuple[ + ColumnElement[bool], + Optional[ColumnElement[bool]], + FromClause, + FromClause, + Optional[FromClause], + Optional[ClauseAdapter], + ]: + aliased = False + + if alias_secondary and self.secondary is not None: + aliased = True + + if source_selectable is None: + if source_polymorphic and self.parent.with_polymorphic: + source_selectable = self.parent._with_polymorphic_selectable + + if of_type_entity: + dest_mapper = of_type_entity.mapper + if dest_selectable is None: + dest_selectable = of_type_entity.selectable + aliased = True + else: + dest_mapper = self.mapper + + if dest_selectable is None: + dest_selectable = self.entity.selectable + if self.mapper.with_polymorphic: + aliased = True + + if self._is_self_referential and source_selectable is None: + dest_selectable = dest_selectable._anonymous_fromclause() + aliased = True + elif ( + dest_selectable is not self.mapper._with_polymorphic_selectable + or self.mapper.with_polymorphic + ): + aliased = True + + single_crit = dest_mapper._single_table_criterion + aliased = aliased or ( + source_selectable is not None + and ( + source_selectable + is not self.parent._with_polymorphic_selectable + or source_selectable._is_subquery + ) + ) + + ( + primaryjoin, + secondaryjoin, + secondary, + target_adapter, + dest_selectable, + ) = self._join_condition.join_targets( + source_selectable, + dest_selectable, + aliased, + single_crit, + extra_criteria, + ) + if source_selectable is None: + source_selectable = self.parent.local_table + if dest_selectable is None: + dest_selectable = self.entity.local_table + return ( + primaryjoin, + secondaryjoin, + source_selectable, + dest_selectable, + secondary, + target_adapter, + ) + + +def _annotate_columns(element: _CE, annotations: _AnnotationDict) -> _CE: + def clone(elem: _CE) -> _CE: + if isinstance(elem, expression.ColumnClause): + elem = elem._annotate(annotations.copy()) # type: ignore + elem._copy_internals(clone=clone) + return elem + + if element is not None: + element = clone(element) + clone = None # type: ignore # remove gc cycles + return element + + +class JoinCondition: + primaryjoin_initial: Optional[ColumnElement[bool]] + primaryjoin: ColumnElement[bool] + secondaryjoin: Optional[ColumnElement[bool]] + secondary: Optional[FromClause] + prop: RelationshipProperty[Any] + + synchronize_pairs: _ColumnPairs + secondary_synchronize_pairs: _ColumnPairs + direction: RelationshipDirection + + parent_persist_selectable: FromClause + child_persist_selectable: FromClause + parent_local_selectable: FromClause + child_local_selectable: FromClause + + _local_remote_pairs: Optional[_ColumnPairs] + + def __init__( + self, + parent_persist_selectable: FromClause, + child_persist_selectable: FromClause, + parent_local_selectable: FromClause, + child_local_selectable: FromClause, + *, + primaryjoin: Optional[ColumnElement[bool]] = None, + secondary: Optional[FromClause] = None, + secondaryjoin: Optional[ColumnElement[bool]] = None, + parent_equivalents: Optional[_EquivalentColumnMap] = None, + child_equivalents: Optional[_EquivalentColumnMap] = None, + consider_as_foreign_keys: Any = None, + local_remote_pairs: Optional[_ColumnPairs] = None, + remote_side: Any = None, + self_referential: Any = False, + prop: RelationshipProperty[Any], + support_sync: bool = True, + can_be_synced_fn: Callable[..., bool] = lambda *c: True, + ): + self.parent_persist_selectable = parent_persist_selectable + self.parent_local_selectable = parent_local_selectable + self.child_persist_selectable = child_persist_selectable + self.child_local_selectable = child_local_selectable + self.parent_equivalents = parent_equivalents + self.child_equivalents = child_equivalents + self.primaryjoin_initial = primaryjoin + self.secondaryjoin = secondaryjoin + self.secondary = secondary + self.consider_as_foreign_keys = consider_as_foreign_keys + self._local_remote_pairs = local_remote_pairs + self._remote_side = remote_side + self.prop = prop + self.self_referential = self_referential + self.support_sync = support_sync + self.can_be_synced_fn = can_be_synced_fn + + self._determine_joins() + assert self.primaryjoin is not None + + self._sanitize_joins() + self._annotate_fks() + self._annotate_remote() + self._annotate_local() + self._annotate_parentmapper() + self._setup_pairs() + self._check_foreign_cols(self.primaryjoin, True) + if self.secondaryjoin is not None: + self._check_foreign_cols(self.secondaryjoin, False) + self._determine_direction() + self._check_remote_side() + self._log_joins() + + def _log_joins(self) -> None: + log = self.prop.logger + log.info("%s setup primary join %s", self.prop, self.primaryjoin) + log.info("%s setup secondary join %s", self.prop, self.secondaryjoin) + log.info( + "%s synchronize pairs [%s]", + self.prop, + ",".join( + "(%s => %s)" % (l, r) for (l, r) in self.synchronize_pairs + ), + ) + log.info( + "%s secondary synchronize pairs [%s]", + self.prop, + ",".join( + "(%s => %s)" % (l, r) + for (l, r) in self.secondary_synchronize_pairs or [] + ), + ) + log.info( + "%s local/remote pairs [%s]", + self.prop, + ",".join( + "(%s / %s)" % (l, r) for (l, r) in self.local_remote_pairs + ), + ) + log.info( + "%s remote columns [%s]", + self.prop, + ",".join("%s" % col for col in self.remote_columns), + ) + log.info( + "%s local columns [%s]", + self.prop, + ",".join("%s" % col for col in self.local_columns), + ) + log.info("%s relationship direction %s", self.prop, self.direction) + + def _sanitize_joins(self) -> None: + """remove the parententity annotation from our join conditions which + can leak in here based on some declarative patterns and maybe others. + + "parentmapper" is relied upon both by the ORM evaluator as well as + the use case in _join_fixture_inh_selfref_w_entity + that relies upon it being present, see :ticket:`3364`. + + """ + + self.primaryjoin = _deep_deannotate( + self.primaryjoin, values=("parententity", "proxy_key") + ) + if self.secondaryjoin is not None: + self.secondaryjoin = _deep_deannotate( + self.secondaryjoin, values=("parententity", "proxy_key") + ) + + def _determine_joins(self) -> None: + """Determine the 'primaryjoin' and 'secondaryjoin' attributes, + if not passed to the constructor already. + + This is based on analysis of the foreign key relationships + between the parent and target mapped selectables. + + """ + if self.secondaryjoin is not None and self.secondary is None: + raise sa_exc.ArgumentError( + "Property %s specified with secondary " + "join condition but " + "no secondary argument" % self.prop + ) + + # find a join between the given mapper's mapped table and + # the given table. will try the mapper's local table first + # for more specificity, then if not found will try the more + # general mapped table, which in the case of inheritance is + # a join. + try: + consider_as_foreign_keys = self.consider_as_foreign_keys or None + if self.secondary is not None: + if self.secondaryjoin is None: + self.secondaryjoin = join_condition( + self.child_persist_selectable, + self.secondary, + a_subset=self.child_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys, + ) + if self.primaryjoin_initial is None: + self.primaryjoin = join_condition( + self.parent_persist_selectable, + self.secondary, + a_subset=self.parent_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys, + ) + else: + self.primaryjoin = self.primaryjoin_initial + else: + if self.primaryjoin_initial is None: + self.primaryjoin = join_condition( + self.parent_persist_selectable, + self.child_persist_selectable, + a_subset=self.parent_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys, + ) + else: + self.primaryjoin = self.primaryjoin_initial + except sa_exc.NoForeignKeysError as nfe: + if self.secondary is not None: + raise sa_exc.NoForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are no foreign keys " + "linking these tables via secondary table '%s'. " + "Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or " + "specify 'primaryjoin' and 'secondaryjoin' " + "expressions." % (self.prop, self.secondary) + ) from nfe + else: + raise sa_exc.NoForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are no foreign keys " + "linking these tables. " + "Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or " + "specify a 'primaryjoin' expression." % self.prop + ) from nfe + except sa_exc.AmbiguousForeignKeysError as afe: + if self.secondary is not None: + raise sa_exc.AmbiguousForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are multiple foreign key " + "paths linking the tables via secondary table '%s'. " + "Specify the 'foreign_keys' " + "argument, providing a list of those columns which " + "should be counted as containing a foreign key " + "reference from the secondary table to each of the " + "parent and child tables." % (self.prop, self.secondary) + ) from afe + else: + raise sa_exc.AmbiguousForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are multiple foreign key " + "paths linking the tables. Specify the " + "'foreign_keys' argument, providing a list of those " + "columns which should be counted as containing a " + "foreign key reference to the parent table." % self.prop + ) from afe + + @property + def primaryjoin_minus_local(self) -> ColumnElement[bool]: + return _deep_deannotate(self.primaryjoin, values=("local", "remote")) + + @property + def secondaryjoin_minus_local(self) -> ColumnElement[bool]: + assert self.secondaryjoin is not None + return _deep_deannotate(self.secondaryjoin, values=("local", "remote")) + + @util.memoized_property + def primaryjoin_reverse_remote(self) -> ColumnElement[bool]: + """Return the primaryjoin condition suitable for the + "reverse" direction. + + If the primaryjoin was delivered here with pre-existing + "remote" annotations, the local/remote annotations + are reversed. Otherwise, the local/remote annotations + are removed. + + """ + if self._has_remote_annotations: + + def replace(element: _CE, **kw: Any) -> Optional[_CE]: + if "remote" in element._annotations: + v = dict(element._annotations) + del v["remote"] + v["local"] = True + return element._with_annotations(v) + elif "local" in element._annotations: + v = dict(element._annotations) + del v["local"] + v["remote"] = True + return element._with_annotations(v) + + return None + + return visitors.replacement_traverse(self.primaryjoin, {}, replace) + else: + if self._has_foreign_annotations: + # TODO: coverage + return _deep_deannotate( + self.primaryjoin, values=("local", "remote") + ) + else: + return _deep_deannotate(self.primaryjoin) + + def _has_annotation(self, clause: ClauseElement, annotation: str) -> bool: + for col in visitors.iterate(clause, {}): + if annotation in col._annotations: + return True + else: + return False + + @util.memoized_property + def _has_foreign_annotations(self) -> bool: + return self._has_annotation(self.primaryjoin, "foreign") + + @util.memoized_property + def _has_remote_annotations(self) -> bool: + return self._has_annotation(self.primaryjoin, "remote") + + def _annotate_fks(self) -> None: + """Annotate the primaryjoin and secondaryjoin + structures with 'foreign' annotations marking columns + considered as foreign. + + """ + if self._has_foreign_annotations: + return + + if self.consider_as_foreign_keys: + self._annotate_from_fk_list() + else: + self._annotate_present_fks() + + def _annotate_from_fk_list(self) -> None: + def check_fk(element: _CE, **kw: Any) -> Optional[_CE]: + if element in self.consider_as_foreign_keys: + return element._annotate({"foreign": True}) + return None + + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, check_fk + ) + if self.secondaryjoin is not None: + self.secondaryjoin = visitors.replacement_traverse( + self.secondaryjoin, {}, check_fk + ) + + def _annotate_present_fks(self) -> None: + if self.secondary is not None: + secondarycols = util.column_set(self.secondary.c) + else: + secondarycols = set() + + def is_foreign( + a: ColumnElement[Any], b: ColumnElement[Any] + ) -> Optional[ColumnElement[Any]]: + if isinstance(a, schema.Column) and isinstance(b, schema.Column): + if a.references(b): + return a + elif b.references(a): + return b + + if secondarycols: + if a in secondarycols and b not in secondarycols: + return a + elif b in secondarycols and a not in secondarycols: + return b + + return None + + def visit_binary(binary: BinaryExpression[Any]) -> None: + if not isinstance( + binary.left, sql.ColumnElement + ) or not isinstance(binary.right, sql.ColumnElement): + return + + if ( + "foreign" not in binary.left._annotations + and "foreign" not in binary.right._annotations + ): + col = is_foreign(binary.left, binary.right) + if col is not None: + if col.compare(binary.left): + binary.left = binary.left._annotate({"foreign": True}) + elif col.compare(binary.right): + binary.right = binary.right._annotate( + {"foreign": True} + ) + + self.primaryjoin = visitors.cloned_traverse( + self.primaryjoin, {}, {"binary": visit_binary} + ) + if self.secondaryjoin is not None: + self.secondaryjoin = visitors.cloned_traverse( + self.secondaryjoin, {}, {"binary": visit_binary} + ) + + def _refers_to_parent_table(self) -> bool: + """Return True if the join condition contains column + comparisons where both columns are in both tables. + + """ + pt = self.parent_persist_selectable + mt = self.child_persist_selectable + result = False + + def visit_binary(binary: BinaryExpression[Any]) -> None: + nonlocal result + c, f = binary.left, binary.right + if ( + isinstance(c, expression.ColumnClause) + and isinstance(f, expression.ColumnClause) + and pt.is_derived_from(c.table) + and pt.is_derived_from(f.table) + and mt.is_derived_from(c.table) + and mt.is_derived_from(f.table) + ): + result = True + + visitors.traverse(self.primaryjoin, {}, {"binary": visit_binary}) + return result + + def _tables_overlap(self) -> bool: + """Return True if parent/child tables have some overlap.""" + + return selectables_overlap( + self.parent_persist_selectable, self.child_persist_selectable + ) + + def _annotate_remote(self) -> None: + """Annotate the primaryjoin and secondaryjoin + structures with 'remote' annotations marking columns + considered as part of the 'remote' side. + + """ + if self._has_remote_annotations: + return + + if self.secondary is not None: + self._annotate_remote_secondary() + elif self._local_remote_pairs or self._remote_side: + self._annotate_remote_from_args() + elif self._refers_to_parent_table(): + self._annotate_selfref( + lambda col: "foreign" in col._annotations, False + ) + elif self._tables_overlap(): + self._annotate_remote_with_overlap() + else: + self._annotate_remote_distinct_selectables() + + def _annotate_remote_secondary(self) -> None: + """annotate 'remote' in primaryjoin, secondaryjoin + when 'secondary' is present. + + """ + + assert self.secondary is not None + fixed_secondary = self.secondary + + def repl(element: _CE, **kw: Any) -> Optional[_CE]: + if fixed_secondary.c.contains_column(element): + return element._annotate({"remote": True}) + return None + + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, repl + ) + + assert self.secondaryjoin is not None + self.secondaryjoin = visitors.replacement_traverse( + self.secondaryjoin, {}, repl + ) + + def _annotate_selfref( + self, fn: Callable[[ColumnElement[Any]], bool], remote_side_given: bool + ) -> None: + """annotate 'remote' in primaryjoin, secondaryjoin + when the relationship is detected as self-referential. + + """ + + def visit_binary(binary: BinaryExpression[Any]) -> None: + equated = binary.left.compare(binary.right) + if isinstance(binary.left, expression.ColumnClause) and isinstance( + binary.right, expression.ColumnClause + ): + # assume one to many - FKs are "remote" + if fn(binary.left): + binary.left = binary.left._annotate({"remote": True}) + if fn(binary.right) and not equated: + binary.right = binary.right._annotate({"remote": True}) + elif not remote_side_given: + self._warn_non_column_elements() + + self.primaryjoin = visitors.cloned_traverse( + self.primaryjoin, {}, {"binary": visit_binary} + ) + + def _annotate_remote_from_args(self) -> None: + """annotate 'remote' in primaryjoin, secondaryjoin + when the 'remote_side' or '_local_remote_pairs' + arguments are used. + + """ + if self._local_remote_pairs: + if self._remote_side: + raise sa_exc.ArgumentError( + "remote_side argument is redundant " + "against more detailed _local_remote_side " + "argument." + ) + + remote_side = [r for (l, r) in self._local_remote_pairs] + else: + remote_side = self._remote_side + + if self._refers_to_parent_table(): + self._annotate_selfref(lambda col: col in remote_side, True) + else: + + def repl(element: _CE, **kw: Any) -> Optional[_CE]: + # use set() to avoid generating ``__eq__()`` expressions + # against each element + if element in set(remote_side): + return element._annotate({"remote": True}) + return None + + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, repl + ) + + def _annotate_remote_with_overlap(self) -> None: + """annotate 'remote' in primaryjoin, secondaryjoin + when the parent/child tables have some set of + tables in common, though is not a fully self-referential + relationship. + + """ + + def visit_binary(binary: BinaryExpression[Any]) -> None: + binary.left, binary.right = proc_left_right( + binary.left, binary.right + ) + binary.right, binary.left = proc_left_right( + binary.right, binary.left + ) + + check_entities = ( + self.prop is not None and self.prop.mapper is not self.prop.parent + ) + + def proc_left_right( + left: ColumnElement[Any], right: ColumnElement[Any] + ) -> Tuple[ColumnElement[Any], ColumnElement[Any]]: + if isinstance(left, expression.ColumnClause) and isinstance( + right, expression.ColumnClause + ): + if self.child_persist_selectable.c.contains_column( + right + ) and self.parent_persist_selectable.c.contains_column(left): + right = right._annotate({"remote": True}) + elif ( + check_entities + and right._annotations.get("parentmapper") is self.prop.mapper + ): + right = right._annotate({"remote": True}) + elif ( + check_entities + and left._annotations.get("parentmapper") is self.prop.mapper + ): + left = left._annotate({"remote": True}) + else: + self._warn_non_column_elements() + + return left, right + + self.primaryjoin = visitors.cloned_traverse( + self.primaryjoin, {}, {"binary": visit_binary} + ) + + def _annotate_remote_distinct_selectables(self) -> None: + """annotate 'remote' in primaryjoin, secondaryjoin + when the parent/child tables are entirely + separate. + + """ + + def repl(element: _CE, **kw: Any) -> Optional[_CE]: + if self.child_persist_selectable.c.contains_column(element) and ( + not self.parent_local_selectable.c.contains_column(element) + or self.child_local_selectable.c.contains_column(element) + ): + return element._annotate({"remote": True}) + return None + + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, repl + ) + + def _warn_non_column_elements(self) -> None: + util.warn( + "Non-simple column elements in primary " + "join condition for property %s - consider using " + "remote() annotations to mark the remote side." % self.prop + ) + + def _annotate_local(self) -> None: + """Annotate the primaryjoin and secondaryjoin + structures with 'local' annotations. + + This annotates all column elements found + simultaneously in the parent table + and the join condition that don't have a + 'remote' annotation set up from + _annotate_remote() or user-defined. + + """ + if self._has_annotation(self.primaryjoin, "local"): + return + + if self._local_remote_pairs: + local_side = util.column_set( + [l for (l, r) in self._local_remote_pairs] + ) + else: + local_side = util.column_set(self.parent_persist_selectable.c) + + def locals_(element: _CE, **kw: Any) -> Optional[_CE]: + if "remote" not in element._annotations and element in local_side: + return element._annotate({"local": True}) + return None + + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, locals_ + ) + + def _annotate_parentmapper(self) -> None: + def parentmappers_(element: _CE, **kw: Any) -> Optional[_CE]: + if "remote" in element._annotations: + return element._annotate({"parentmapper": self.prop.mapper}) + elif "local" in element._annotations: + return element._annotate({"parentmapper": self.prop.parent}) + return None + + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, parentmappers_ + ) + + def _check_remote_side(self) -> None: + if not self.local_remote_pairs: + raise sa_exc.ArgumentError( + "Relationship %s could " + "not determine any unambiguous local/remote column " + "pairs based on join condition and remote_side " + "arguments. " + "Consider using the remote() annotation to " + "accurately mark those elements of the join " + "condition that are on the remote side of " + "the relationship." % (self.prop,) + ) + else: + not_target = util.column_set( + self.parent_persist_selectable.c + ).difference(self.child_persist_selectable.c) + + for _, rmt in self.local_remote_pairs: + if rmt in not_target: + util.warn( + "Expression %s is marked as 'remote', but these " + "column(s) are local to the local side. The " + "remote() annotation is needed only for a " + "self-referential relationship where both sides " + "of the relationship refer to the same tables." + % (rmt,) + ) + + def _check_foreign_cols( + self, join_condition: ColumnElement[bool], primary: bool + ) -> None: + """Check the foreign key columns collected and emit error + messages.""" + + can_sync = False + + foreign_cols = self._gather_columns_with_annotation( + join_condition, "foreign" + ) + + has_foreign = bool(foreign_cols) + + if primary: + can_sync = bool(self.synchronize_pairs) + else: + can_sync = bool(self.secondary_synchronize_pairs) + + if ( + self.support_sync + and can_sync + or (not self.support_sync and has_foreign) + ): + return + + # from here below is just determining the best error message + # to report. Check for a join condition using any operator + # (not just ==), perhaps they need to turn on "viewonly=True". + if self.support_sync and has_foreign and not can_sync: + err = ( + "Could not locate any simple equality expressions " + "involving locally mapped foreign key columns for " + "%s join condition " + "'%s' on relationship %s." + % ( + primary and "primary" or "secondary", + join_condition, + self.prop, + ) + ) + err += ( + " Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or are " + "annotated in the join condition with the foreign() " + "annotation. To allow comparison operators other than " + "'==', the relationship can be marked as viewonly=True." + ) + + raise sa_exc.ArgumentError(err) + else: + err = ( + "Could not locate any relevant foreign key columns " + "for %s join condition '%s' on relationship %s." + % ( + primary and "primary" or "secondary", + join_condition, + self.prop, + ) + ) + err += ( + " Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or are " + "annotated in the join condition with the foreign() " + "annotation." + ) + raise sa_exc.ArgumentError(err) + + def _determine_direction(self) -> None: + """Determine if this relationship is one to many, many to one, + many to many. + + """ + if self.secondaryjoin is not None: + self.direction = MANYTOMANY + else: + parentcols = util.column_set(self.parent_persist_selectable.c) + targetcols = util.column_set(self.child_persist_selectable.c) + + # fk collection which suggests ONETOMANY. + onetomany_fk = targetcols.intersection(self.foreign_key_columns) + + # fk collection which suggests MANYTOONE. + + manytoone_fk = parentcols.intersection(self.foreign_key_columns) + + if onetomany_fk and manytoone_fk: + # fks on both sides. test for overlap of local/remote + # with foreign key. + # we will gather columns directly from their annotations + # without deannotating, so that we can distinguish on a column + # that refers to itself. + + # 1. columns that are both remote and FK suggest + # onetomany. + onetomany_local = self._gather_columns_with_annotation( + self.primaryjoin, "remote", "foreign" + ) + + # 2. columns that are FK but are not remote (e.g. local) + # suggest manytoone. + manytoone_local = { + c + for c in self._gather_columns_with_annotation( + self.primaryjoin, "foreign" + ) + if "remote" not in c._annotations + } + + # 3. if both collections are present, remove columns that + # refer to themselves. This is for the case of + # and_(Me.id == Me.remote_id, Me.version == Me.version) + if onetomany_local and manytoone_local: + self_equated = self.remote_columns.intersection( + self.local_columns + ) + onetomany_local = onetomany_local.difference(self_equated) + manytoone_local = manytoone_local.difference(self_equated) + + # at this point, if only one or the other collection is + # present, we know the direction, otherwise it's still + # ambiguous. + + if onetomany_local and not manytoone_local: + self.direction = ONETOMANY + elif manytoone_local and not onetomany_local: + self.direction = MANYTOONE + else: + raise sa_exc.ArgumentError( + "Can't determine relationship" + " direction for relationship '%s' - foreign " + "key columns within the join condition are present " + "in both the parent and the child's mapped tables. " + "Ensure that only those columns referring " + "to a parent column are marked as foreign, " + "either via the foreign() annotation or " + "via the foreign_keys argument." % self.prop + ) + elif onetomany_fk: + self.direction = ONETOMANY + elif manytoone_fk: + self.direction = MANYTOONE + else: + raise sa_exc.ArgumentError( + "Can't determine relationship " + "direction for relationship '%s' - foreign " + "key columns are present in neither the parent " + "nor the child's mapped tables" % self.prop + ) + + def _deannotate_pairs( + self, collection: _ColumnPairIterable + ) -> _MutableColumnPairs: + """provide deannotation for the various lists of + pairs, so that using them in hashes doesn't incur + high-overhead __eq__() comparisons against + original columns mapped. + + """ + return [(x._deannotate(), y._deannotate()) for x, y in collection] + + def _setup_pairs(self) -> None: + sync_pairs: _MutableColumnPairs = [] + lrp: util.OrderedSet[Tuple[ColumnElement[Any], ColumnElement[Any]]] = ( + util.OrderedSet([]) + ) + secondary_sync_pairs: _MutableColumnPairs = [] + + def go( + joincond: ColumnElement[bool], + collection: _MutableColumnPairs, + ) -> None: + def visit_binary( + binary: BinaryExpression[Any], + left: ColumnElement[Any], + right: ColumnElement[Any], + ) -> None: + if ( + "remote" in right._annotations + and "remote" not in left._annotations + and self.can_be_synced_fn(left) + ): + lrp.add((left, right)) + elif ( + "remote" in left._annotations + and "remote" not in right._annotations + and self.can_be_synced_fn(right) + ): + lrp.add((right, left)) + if binary.operator is operators.eq and self.can_be_synced_fn( + left, right + ): + if "foreign" in right._annotations: + collection.append((left, right)) + elif "foreign" in left._annotations: + collection.append((right, left)) + + visit_binary_product(visit_binary, joincond) + + for joincond, collection in [ + (self.primaryjoin, sync_pairs), + (self.secondaryjoin, secondary_sync_pairs), + ]: + if joincond is None: + continue + go(joincond, collection) + + self.local_remote_pairs = self._deannotate_pairs(lrp) + self.synchronize_pairs = self._deannotate_pairs(sync_pairs) + self.secondary_synchronize_pairs = self._deannotate_pairs( + secondary_sync_pairs + ) + + _track_overlapping_sync_targets: weakref.WeakKeyDictionary[ + ColumnElement[Any], + weakref.WeakKeyDictionary[ + RelationshipProperty[Any], ColumnElement[Any] + ], + ] = weakref.WeakKeyDictionary() + + def _warn_for_conflicting_sync_targets(self) -> None: + if not self.support_sync: + return + + # we would like to detect if we are synchronizing any column + # pairs in conflict with another relationship that wishes to sync + # an entirely different column to the same target. This is a + # very rare edge case so we will try to minimize the memory/overhead + # impact of this check + for from_, to_ in [ + (from_, to_) for (from_, to_) in self.synchronize_pairs + ] + [ + (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs + ]: + # save ourselves a ton of memory and overhead by only + # considering columns that are subject to a overlapping + # FK constraints at the core level. This condition can arise + # if multiple relationships overlap foreign() directly, but + # we're going to assume it's typically a ForeignKeyConstraint- + # level configuration that benefits from this warning. + + if to_ not in self._track_overlapping_sync_targets: + self._track_overlapping_sync_targets[to_] = ( + weakref.WeakKeyDictionary({self.prop: from_}) + ) + else: + other_props = [] + prop_to_from = self._track_overlapping_sync_targets[to_] + + for pr, fr_ in prop_to_from.items(): + if ( + not pr.mapper._dispose_called + and pr not in self.prop._reverse_property + and pr.key not in self.prop._overlaps + and self.prop.key not in pr._overlaps + # note: the "__*" symbol is used internally by + # SQLAlchemy as a general means of suppressing the + # overlaps warning for some extension cases, however + # this is not currently + # a publicly supported symbol and may change at + # any time. + and "__*" not in self.prop._overlaps + and "__*" not in pr._overlaps + and not self.prop.parent.is_sibling(pr.parent) + and not self.prop.mapper.is_sibling(pr.mapper) + and not self.prop.parent.is_sibling(pr.mapper) + and not self.prop.mapper.is_sibling(pr.parent) + and ( + self.prop.key != pr.key + or not self.prop.parent.common_parent(pr.parent) + ) + ): + other_props.append((pr, fr_)) + + if other_props: + util.warn( + "relationship '%s' will copy column %s to column %s, " + "which conflicts with relationship(s): %s. " + "If this is not the intention, consider if these " + "relationships should be linked with " + "back_populates, or if viewonly=True should be " + "applied to one or more if they are read-only. " + "For the less common case that foreign key " + "constraints are partially overlapping, the " + "orm.foreign() " + "annotation can be used to isolate the columns that " + "should be written towards. To silence this " + "warning, add the parameter 'overlaps=\"%s\"' to the " + "'%s' relationship." + % ( + self.prop, + from_, + to_, + ", ".join( + sorted( + "'%s' (copies %s to %s)" % (pr, fr_, to_) + for (pr, fr_) in other_props + ) + ), + ",".join(sorted(pr.key for pr, fr in other_props)), + self.prop, + ), + code="qzyx", + ) + self._track_overlapping_sync_targets[to_][self.prop] = from_ + + @util.memoized_property + def remote_columns(self) -> Set[ColumnElement[Any]]: + return self._gather_join_annotations("remote") + + @util.memoized_property + def local_columns(self) -> Set[ColumnElement[Any]]: + return self._gather_join_annotations("local") + + @util.memoized_property + def foreign_key_columns(self) -> Set[ColumnElement[Any]]: + return self._gather_join_annotations("foreign") + + def _gather_join_annotations( + self, annotation: str + ) -> Set[ColumnElement[Any]]: + s = set( + self._gather_columns_with_annotation(self.primaryjoin, annotation) + ) + if self.secondaryjoin is not None: + s.update( + self._gather_columns_with_annotation( + self.secondaryjoin, annotation + ) + ) + return {x._deannotate() for x in s} + + def _gather_columns_with_annotation( + self, clause: ColumnElement[Any], *annotation: Iterable[str] + ) -> Set[ColumnElement[Any]]: + annotation_set = set(annotation) + return { + cast(ColumnElement[Any], col) + for col in visitors.iterate(clause, {}) + if annotation_set.issubset(col._annotations) + } + + @util.memoized_property + def _secondary_lineage_set(self) -> FrozenSet[ColumnElement[Any]]: + if self.secondary is not None: + return frozenset( + itertools.chain(*[c.proxy_set for c in self.secondary.c]) + ) + else: + return util.EMPTY_SET + + def join_targets( + self, + source_selectable: Optional[FromClause], + dest_selectable: FromClause, + aliased: bool, + single_crit: Optional[ColumnElement[bool]] = None, + extra_criteria: Tuple[ColumnElement[bool], ...] = (), + ) -> Tuple[ + ColumnElement[bool], + Optional[ColumnElement[bool]], + Optional[FromClause], + Optional[ClauseAdapter], + FromClause, + ]: + """Given a source and destination selectable, create a + join between them. + + This takes into account aliasing the join clause + to reference the appropriate corresponding columns + in the target objects, as well as the extra child + criterion, equivalent column sets, etc. + + """ + # place a barrier on the destination such that + # replacement traversals won't ever dig into it. + # its internal structure remains fixed + # regardless of context. + dest_selectable = _shallow_annotate( + dest_selectable, {"no_replacement_traverse": True} + ) + + primaryjoin, secondaryjoin, secondary = ( + self.primaryjoin, + self.secondaryjoin, + self.secondary, + ) + + # adjust the join condition for single table inheritance, + # in the case that the join is to a subclass + # this is analogous to the + # "_adjust_for_single_table_inheritance()" method in Query. + + if single_crit is not None: + if secondaryjoin is not None: + secondaryjoin = secondaryjoin & single_crit + else: + primaryjoin = primaryjoin & single_crit + + if extra_criteria: + + def mark_exclude_cols( + elem: SupportsAnnotations, annotations: _AnnotationDict + ) -> SupportsAnnotations: + """note unrelated columns in the "extra criteria" as either + should be adapted or not adapted, even though they are not + part of our "local" or "remote" side. + + see #9779 for this case, as well as #11010 for a follow up + + """ + + parentmapper_for_element = elem._annotations.get( + "parentmapper", None + ) + + if ( + parentmapper_for_element is not self.prop.parent + and parentmapper_for_element is not self.prop.mapper + and elem not in self._secondary_lineage_set + ): + return _safe_annotate(elem, annotations) + else: + return elem + + extra_criteria = tuple( + _deep_annotate( + elem, + {"should_not_adapt": True}, + annotate_callable=mark_exclude_cols, + ) + for elem in extra_criteria + ) + + if secondaryjoin is not None: + secondaryjoin = secondaryjoin & sql.and_(*extra_criteria) + else: + primaryjoin = primaryjoin & sql.and_(*extra_criteria) + + if aliased: + if secondary is not None: + secondary = secondary._anonymous_fromclause(flat=True) + primary_aliasizer = ClauseAdapter( + secondary, + exclude_fn=_local_col_exclude, + ) + secondary_aliasizer = ClauseAdapter( + dest_selectable, equivalents=self.child_equivalents + ).chain(primary_aliasizer) + if source_selectable is not None: + primary_aliasizer = ClauseAdapter( + secondary, + exclude_fn=_local_col_exclude, + ).chain( + ClauseAdapter( + source_selectable, + equivalents=self.parent_equivalents, + ) + ) + + secondaryjoin = secondary_aliasizer.traverse(secondaryjoin) + else: + primary_aliasizer = ClauseAdapter( + dest_selectable, + exclude_fn=_local_col_exclude, + equivalents=self.child_equivalents, + ) + if source_selectable is not None: + primary_aliasizer.chain( + ClauseAdapter( + source_selectable, + exclude_fn=_remote_col_exclude, + equivalents=self.parent_equivalents, + ) + ) + secondary_aliasizer = None + + primaryjoin = primary_aliasizer.traverse(primaryjoin) + target_adapter = secondary_aliasizer or primary_aliasizer + target_adapter.exclude_fn = None + else: + target_adapter = None + return ( + primaryjoin, + secondaryjoin, + secondary, + target_adapter, + dest_selectable, + ) + + def create_lazy_clause(self, reverse_direction: bool = False) -> Tuple[ + ColumnElement[bool], + Dict[str, ColumnElement[Any]], + Dict[ColumnElement[Any], ColumnElement[Any]], + ]: + binds: Dict[ColumnElement[Any], BindParameter[Any]] = {} + equated_columns: Dict[ColumnElement[Any], ColumnElement[Any]] = {} + + has_secondary = self.secondaryjoin is not None + + if has_secondary: + lookup = collections.defaultdict(list) + for l, r in self.local_remote_pairs: + lookup[l].append((l, r)) + equated_columns[r] = l + elif not reverse_direction: + for l, r in self.local_remote_pairs: + equated_columns[r] = l + else: + for l, r in self.local_remote_pairs: + equated_columns[l] = r + + def col_to_bind( + element: ColumnElement[Any], **kw: Any + ) -> Optional[BindParameter[Any]]: + if ( + (not reverse_direction and "local" in element._annotations) + or reverse_direction + and ( + (has_secondary and element in lookup) + or (not has_secondary and "remote" in element._annotations) + ) + ): + if element not in binds: + binds[element] = sql.bindparam( + None, None, type_=element.type, unique=True + ) + return binds[element] + return None + + lazywhere = self.primaryjoin + if self.secondaryjoin is None or not reverse_direction: + lazywhere = visitors.replacement_traverse( + lazywhere, {}, col_to_bind + ) + + if self.secondaryjoin is not None: + secondaryjoin = self.secondaryjoin + if reverse_direction: + secondaryjoin = visitors.replacement_traverse( + secondaryjoin, {}, col_to_bind + ) + lazywhere = sql.and_(lazywhere, secondaryjoin) + + bind_to_col = {binds[col].key: col for col in binds} + + return lazywhere, bind_to_col, equated_columns + + +class _ColInAnnotations: + """Serializable object that tests for names in c._annotations. + + TODO: does this need to be serializable anymore? can we find what the + use case was for that? + + """ + + __slots__ = ("names",) + + def __init__(self, *names: str): + self.names = frozenset(names) + + def __call__(self, c: ClauseElement) -> bool: + return bool(self.names.intersection(c._annotations)) + + +_local_col_exclude = _ColInAnnotations("local", "should_not_adapt") +_remote_col_exclude = _ColInAnnotations("remote", "should_not_adapt") + + +class Relationship( + RelationshipProperty[_T], + _DeclarativeMapped[_T], +): + """Describes an object property that holds a single item or list + of items that correspond to a related database table. + + Public constructor is the :func:`_orm.relationship` function. + + .. seealso:: + + :ref:`relationship_config_toplevel` + + .. versionchanged:: 2.0 Added :class:`_orm.Relationship` as a Declarative + compatible subclass for :class:`_orm.RelationshipProperty`. + + """ + + inherit_cache = True + """:meta private:""" + + +class _RelationshipDeclared( # type: ignore[misc] + Relationship[_T], + WriteOnlyMapped[_T], # not compatible with Mapped[_T] + DynamicMapped[_T], # not compatible with Mapped[_T] +): + """Relationship subclass used implicitly for declarative mapping.""" + + inherit_cache = True + """:meta private:""" + + @classmethod + def _mapper_property_name(cls) -> str: + return "Relationship" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py new file mode 100644 index 00000000..283f4a02 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py @@ -0,0 +1,2165 @@ +# orm/scoping.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .session import _S +from .session import Session +from .. import exc as sa_exc +from .. import util +from ..util import create_proxy_methods +from ..util import ScopedRegistry +from ..util import ThreadLocalRegistry +from ..util import warn +from ..util import warn_deprecated +from ..util.typing import Protocol + +if TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _IdentityKeyType + from ._typing import OrmExecuteOptionsParameter + from .identity import IdentityMap + from .interfaces import ORMOption + from .mapper import Mapper + from .query import Query + from .query import RowReturningQuery + from .session import _BindArguments + from .session import _EntityBindKey + from .session import _PKIdentityArgument + from .session import _SessionBind + from .session import sessionmaker + from .session import SessionTransaction + from ..engine import Connection + from ..engine import CursorResult + from ..engine import Engine + from ..engine import Result + from ..engine import Row + from ..engine import RowMapping + from ..engine.interfaces import _CoreAnyExecuteParams + from ..engine.interfaces import _CoreSingleExecuteParams + from ..engine.interfaces import CoreExecuteOptionsParameter + from ..engine.result import ScalarResult + from ..sql._typing import _ColumnsClauseArgument + from ..sql._typing import _T0 + from ..sql._typing import _T1 + from ..sql._typing import _T2 + from ..sql._typing import _T3 + from ..sql._typing import _T4 + from ..sql._typing import _T5 + from ..sql._typing import _T6 + from ..sql._typing import _T7 + from ..sql._typing import _TypedColumnClauseArgument as _TCCA + from ..sql.base import Executable + from ..sql.dml import UpdateBase + from ..sql.elements import ClauseElement + from ..sql.roles import TypedColumnsClauseRole + from ..sql.selectable import ForUpdateParameter + from ..sql.selectable import TypedReturnsRows + +_T = TypeVar("_T", bound=Any) + + +class QueryPropertyDescriptor(Protocol): + """Describes the type applied to a class-level + :meth:`_orm.scoped_session.query_property` attribute. + + .. versionadded:: 2.0.5 + + """ + + def __get__(self, instance: Any, owner: Type[_T]) -> Query[_T]: ... + + +_O = TypeVar("_O", bound=object) + +__all__ = ["scoped_session"] + + +@create_proxy_methods( + Session, + ":class:`_orm.Session`", + ":class:`_orm.scoping.scoped_session`", + classmethods=["close_all", "object_session", "identity_key"], + methods=[ + "__contains__", + "__iter__", + "add", + "add_all", + "begin", + "begin_nested", + "close", + "reset", + "commit", + "connection", + "delete", + "execute", + "expire", + "expire_all", + "expunge", + "expunge_all", + "flush", + "get", + "get_one", + "get_bind", + "is_modified", + "bulk_save_objects", + "bulk_insert_mappings", + "bulk_update_mappings", + "merge", + "query", + "refresh", + "rollback", + "scalar", + "scalars", + ], + attributes=[ + "bind", + "dirty", + "deleted", + "new", + "identity_map", + "is_active", + "autoflush", + "no_autoflush", + "info", + ], +) +class scoped_session(Generic[_S]): + """Provides scoped management of :class:`.Session` objects. + + See :ref:`unitofwork_contextual` for a tutorial. + + .. note:: + + When using :ref:`asyncio_toplevel`, the async-compatible + :class:`_asyncio.async_scoped_session` class should be + used in place of :class:`.scoped_session`. + + """ + + _support_async: bool = False + + session_factory: sessionmaker[_S] + """The `session_factory` provided to `__init__` is stored in this + attribute and may be accessed at a later time. This can be useful when + a new non-scoped :class:`.Session` is needed.""" + + registry: ScopedRegistry[_S] + + def __init__( + self, + session_factory: sessionmaker[_S], + scopefunc: Optional[Callable[[], Any]] = None, + ): + """Construct a new :class:`.scoped_session`. + + :param session_factory: a factory to create new :class:`.Session` + instances. This is usually, but not necessarily, an instance + of :class:`.sessionmaker`. + :param scopefunc: optional function which defines + the current scope. If not passed, the :class:`.scoped_session` + object assumes "thread-local" scope, and will use + a Python ``threading.local()`` in order to maintain the current + :class:`.Session`. If passed, the function should return + a hashable token; this token will be used as the key in a + dictionary in order to store and retrieve the current + :class:`.Session`. + + """ + self.session_factory = session_factory + + if scopefunc: + self.registry = ScopedRegistry(session_factory, scopefunc) + else: + self.registry = ThreadLocalRegistry(session_factory) + + @property + def _proxied(self) -> _S: + return self.registry() + + def __call__(self, **kw: Any) -> _S: + r"""Return the current :class:`.Session`, creating it + using the :attr:`.scoped_session.session_factory` if not present. + + :param \**kw: Keyword arguments will be passed to the + :attr:`.scoped_session.session_factory` callable, if an existing + :class:`.Session` is not present. If the :class:`.Session` is present + and keyword arguments have been passed, + :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. + + """ + if kw: + if self.registry.has(): + raise sa_exc.InvalidRequestError( + "Scoped session is already present; " + "no new arguments may be specified." + ) + else: + sess = self.session_factory(**kw) + self.registry.set(sess) + else: + sess = self.registry() + if not self._support_async and sess._is_asyncio: + warn_deprecated( + "Using `scoped_session` with asyncio is deprecated and " + "will raise an error in a future version. " + "Please use `async_scoped_session` instead.", + "1.4.23", + ) + return sess + + def configure(self, **kwargs: Any) -> None: + """reconfigure the :class:`.sessionmaker` used by this + :class:`.scoped_session`. + + See :meth:`.sessionmaker.configure`. + + """ + + if self.registry.has(): + warn( + "At least one scoped session is already present. " + " configure() can not affect sessions that have " + "already been created." + ) + + self.session_factory.configure(**kwargs) + + def remove(self) -> None: + """Dispose of the current :class:`.Session`, if present. + + This will first call :meth:`.Session.close` method + on the current :class:`.Session`, which releases any existing + transactional/connection resources still being held; transactions + specifically are rolled back. The :class:`.Session` is then + discarded. Upon next usage within the same scope, + the :class:`.scoped_session` will produce a new + :class:`.Session` object. + + """ + + if self.registry.has(): + self.registry().close() + self.registry.clear() + + def query_property( + self, query_cls: Optional[Type[Query[_T]]] = None + ) -> QueryPropertyDescriptor: + """return a class property which produces a legacy + :class:`_query.Query` object against the class and the current + :class:`.Session` when called. + + .. legacy:: The :meth:`_orm.scoped_session.query_property` accessor + is specific to the legacy :class:`.Query` object and is not + considered to be part of :term:`2.0-style` ORM use. + + e.g.:: + + from sqlalchemy.orm import QueryPropertyDescriptor + from sqlalchemy.orm import scoped_session + from sqlalchemy.orm import sessionmaker + + Session = scoped_session(sessionmaker()) + + class MyClass: + query: QueryPropertyDescriptor = Session.query_property() + + # after mappers are defined + result = MyClass.query.filter(MyClass.name=='foo').all() + + Produces instances of the session's configured query class by + default. To override and use a custom implementation, provide + a ``query_cls`` callable. The callable will be invoked with + the class's mapper as a positional argument and a session + keyword argument. + + There is no limit to the number of query properties placed on + a class. + + """ + + class query: + def __get__(s, instance: Any, owner: Type[_O]) -> Query[_O]: + if query_cls: + # custom query class + return query_cls(owner, session=self.registry()) # type: ignore # noqa: E501 + else: + # session's configured query class + return self.registry().query(owner) + + return query() + + # START PROXY METHODS scoped_session + + # code within this block is **programmatically, + # statically generated** by tools/generate_proxy_methods.py + + def __contains__(self, instance: object) -> bool: + r"""Return True if the instance is associated with this session. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + The instance may be pending or persistent within the Session for a + result of True. + + + """ # noqa: E501 + + return self._proxied.__contains__(instance) + + def __iter__(self) -> Iterator[object]: + r"""Iterate over all pending or persistent instances within this + Session. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + + """ # noqa: E501 + + return self._proxied.__iter__() + + def add(self, instance: object, _warn: bool = True) -> None: + r"""Place an object into this :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Objects that are in the :term:`transient` state when passed to the + :meth:`_orm.Session.add` method will move to the + :term:`pending` state, until the next flush, at which point they + will move to the :term:`persistent` state. + + Objects that are in the :term:`detached` state when passed to the + :meth:`_orm.Session.add` method will move to the :term:`persistent` + state directly. + + If the transaction used by the :class:`_orm.Session` is rolled back, + objects which were transient when they were passed to + :meth:`_orm.Session.add` will be moved back to the + :term:`transient` state, and will no longer be present within this + :class:`_orm.Session`. + + .. seealso:: + + :meth:`_orm.Session.add_all` + + :ref:`session_adding` - at :ref:`session_basics` + + + """ # noqa: E501 + + return self._proxied.add(instance, _warn=_warn) + + def add_all(self, instances: Iterable[object]) -> None: + r"""Add the given collection of instances to this :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + See the documentation for :meth:`_orm.Session.add` for a general + behavioral description. + + .. seealso:: + + :meth:`_orm.Session.add` + + :ref:`session_adding` - at :ref:`session_basics` + + + """ # noqa: E501 + + return self._proxied.add_all(instances) + + def begin(self, nested: bool = False) -> SessionTransaction: + r"""Begin a transaction, or nested transaction, + on this :class:`.Session`, if one is not already begun. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + The :class:`_orm.Session` object features **autobegin** behavior, + so that normally it is not necessary to call the + :meth:`_orm.Session.begin` + method explicitly. However, it may be used in order to control + the scope of when the transactional state is begun. + + When used to begin the outermost transaction, an error is raised + if this :class:`.Session` is already inside of a transaction. + + :param nested: if True, begins a SAVEPOINT transaction and is + equivalent to calling :meth:`~.Session.begin_nested`. For + documentation on SAVEPOINT transactions, please see + :ref:`session_begin_nested`. + + :return: the :class:`.SessionTransaction` object. Note that + :class:`.SessionTransaction` + acts as a Python context manager, allowing :meth:`.Session.begin` + to be used in a "with" block. See :ref:`session_explicit_begin` for + an example. + + .. seealso:: + + :ref:`session_autobegin` + + :ref:`unitofwork_transaction` + + :meth:`.Session.begin_nested` + + + + """ # noqa: E501 + + return self._proxied.begin(nested=nested) + + def begin_nested(self) -> SessionTransaction: + r"""Begin a "nested" transaction on this Session, e.g. SAVEPOINT. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + The target database(s) and associated drivers must support SQL + SAVEPOINT for this method to function correctly. + + For documentation on SAVEPOINT + transactions, please see :ref:`session_begin_nested`. + + :return: the :class:`.SessionTransaction` object. Note that + :class:`.SessionTransaction` acts as a context manager, allowing + :meth:`.Session.begin_nested` to be used in a "with" block. + See :ref:`session_begin_nested` for a usage example. + + .. seealso:: + + :ref:`session_begin_nested` + + :ref:`pysqlite_serializable` - special workarounds required + with the SQLite driver in order for SAVEPOINT to work + correctly. For asyncio use cases, see the section + :ref:`aiosqlite_serializable`. + + + """ # noqa: E501 + + return self._proxied.begin_nested() + + def close(self) -> None: + r"""Close out the transactional resources and ORM objects used by this + :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This expunges all ORM objects associated with this + :class:`_orm.Session`, ends any transaction in progress and + :term:`releases` any :class:`_engine.Connection` objects which this + :class:`_orm.Session` itself has checked out from associated + :class:`_engine.Engine` objects. The operation then leaves the + :class:`_orm.Session` in a state which it may be used again. + + .. tip:: + + In the default running mode the :meth:`_orm.Session.close` + method **does not prevent the Session from being used again**. + The :class:`_orm.Session` itself does not actually have a + distinct "closed" state; it merely means + the :class:`_orm.Session` will release all database connections + and ORM objects. + + Setting the parameter :paramref:`_orm.Session.close_resets_only` + to ``False`` will instead make the ``close`` final, meaning that + any further action on the session will be forbidden. + + .. versionchanged:: 1.4 The :meth:`.Session.close` method does not + immediately create a new :class:`.SessionTransaction` object; + instead, the new :class:`.SessionTransaction` is created only if + the :class:`.Session` is used again for a database operation. + + .. seealso:: + + :ref:`session_closing` - detail on the semantics of + :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`. + + :meth:`_orm.Session.reset` - a similar method that behaves like + ``close()`` with the parameter + :paramref:`_orm.Session.close_resets_only` set to ``True``. + + + """ # noqa: E501 + + return self._proxied.close() + + def reset(self) -> None: + r"""Close out the transactional resources and ORM objects used by this + :class:`_orm.Session`, resetting the session to its initial state. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This method provides for same "reset-only" behavior that the + :meth:`_orm.Session.close` method has provided historically, where the + state of the :class:`_orm.Session` is reset as though the object were + brand new, and ready to be used again. + This method may then be useful for :class:`_orm.Session` objects + which set :paramref:`_orm.Session.close_resets_only` to ``False``, + so that "reset only" behavior is still available. + + .. versionadded:: 2.0.22 + + .. seealso:: + + :ref:`session_closing` - detail on the semantics of + :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`. + + :meth:`_orm.Session.close` - a similar method will additionally + prevent re-use of the Session when the parameter + :paramref:`_orm.Session.close_resets_only` is set to ``False``. + + """ # noqa: E501 + + return self._proxied.reset() + + def commit(self) -> None: + r"""Flush pending changes and commit the current transaction. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + When the COMMIT operation is complete, all objects are fully + :term:`expired`, erasing their internal contents, which will be + automatically re-loaded when the objects are next accessed. In the + interim, these objects are in an expired state and will not function if + they are :term:`detached` from the :class:`.Session`. Additionally, + this re-load operation is not supported when using asyncio-oriented + APIs. The :paramref:`.Session.expire_on_commit` parameter may be used + to disable this behavior. + + When there is no transaction in place for the :class:`.Session`, + indicating that no operations were invoked on this :class:`.Session` + since the previous call to :meth:`.Session.commit`, the method will + begin and commit an internal-only "logical" transaction, that does not + normally affect the database unless pending flush changes were + detected, but will still invoke event handlers and object expiration + rules. + + The outermost database transaction is committed unconditionally, + automatically releasing any SAVEPOINTs in effect. + + .. seealso:: + + :ref:`session_committing` + + :ref:`unitofwork_transaction` + + :ref:`asyncio_orm_avoid_lazyloads` + + + """ # noqa: E501 + + return self._proxied.commit() + + def connection( + self, + bind_arguments: Optional[_BindArguments] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Connection: + r"""Return a :class:`_engine.Connection` object corresponding to this + :class:`.Session` object's transactional state. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Either the :class:`_engine.Connection` corresponding to the current + transaction is returned, or if no transaction is in progress, a new + one is begun and the :class:`_engine.Connection` + returned (note that no + transactional state is established with the DBAPI until the first + SQL statement is emitted). + + Ambiguity in multi-bind or unbound :class:`.Session` objects can be + resolved through any of the optional keyword arguments. This + ultimately makes usage of the :meth:`.get_bind` method for resolution. + + :param bind_arguments: dictionary of bind arguments. May include + "mapper", "bind", "clause", other custom arguments that are passed + to :meth:`.Session.get_bind`. + + :param execution_options: a dictionary of execution options that will + be passed to :meth:`_engine.Connection.execution_options`, **when the + connection is first procured only**. If the connection is already + present within the :class:`.Session`, a warning is emitted and + the arguments are ignored. + + .. seealso:: + + :ref:`session_transaction_isolation` + + + """ # noqa: E501 + + return self._proxied.connection( + bind_arguments=bind_arguments, execution_options=execution_options + ) + + def delete(self, instance: object) -> None: + r"""Mark an instance as deleted. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + The object is assumed to be either :term:`persistent` or + :term:`detached` when passed; after the method is called, the + object will remain in the :term:`persistent` state until the next + flush proceeds. During this time, the object will also be a member + of the :attr:`_orm.Session.deleted` collection. + + When the next flush proceeds, the object will move to the + :term:`deleted` state, indicating a ``DELETE`` statement was emitted + for its row within the current transaction. When the transaction + is successfully committed, + the deleted object is moved to the :term:`detached` state and is + no longer present within this :class:`_orm.Session`. + + .. seealso:: + + :ref:`session_deleting` - at :ref:`session_basics` + + + """ # noqa: E501 + + return self._proxied.delete(instance) + + @overload + def execute( + self, + statement: TypedReturnsRows[_T], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[_T]: ... + + @overload + def execute( + self, + statement: UpdateBase, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> CursorResult[Any]: ... + + @overload + def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[Any]: ... + + def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[Any]: + r"""Execute a SQL expression construct. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Returns a :class:`_engine.Result` object representing + results of the statement execution. + + E.g.:: + + from sqlalchemy import select + result = session.execute( + select(User).where(User.id == 5) + ) + + The API contract of :meth:`_orm.Session.execute` is similar to that + of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version + of :class:`_engine.Connection`. + + .. versionchanged:: 1.4 the :meth:`_orm.Session.execute` method is + now the primary point of ORM statement execution when using + :term:`2.0 style` ORM usage. + + :param statement: + An executable statement (i.e. an :class:`.Executable` expression + such as :func:`_expression.select`). + + :param params: + Optional dictionary, or list of dictionaries, containing + bound parameter values. If a single dictionary, single-row + execution occurs; if a list of dictionaries, an + "executemany" will be invoked. The keys in each dictionary + must correspond to parameter names present in the statement. + + :param execution_options: optional dictionary of execution options, + which will be associated with the statement execution. This + dictionary can provide a subset of the options that are accepted + by :meth:`_engine.Connection.execution_options`, and may also + provide additional options understood only in an ORM context. + + .. seealso:: + + :ref:`orm_queryguide_execution_options` - ORM-specific execution + options + + :param bind_arguments: dictionary of additional arguments to determine + the bind. May include "mapper", "bind", or other custom arguments. + Contents of this dictionary are passed to the + :meth:`.Session.get_bind` method. + + :return: a :class:`_engine.Result` object. + + + + """ # noqa: E501 + + return self._proxied.execute( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + _parent_execute_state=_parent_execute_state, + _add_event=_add_event, + ) + + def expire( + self, instance: object, attribute_names: Optional[Iterable[str]] = None + ) -> None: + r"""Expire the attributes on an instance. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Marks the attributes of an instance as out of date. When an expired + attribute is next accessed, a query will be issued to the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire all objects in the :class:`.Session` simultaneously, + use :meth:`Session.expire_all`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire` only makes sense for the specific + case that a non-ORM SQL statement was emitted in the current + transaction. + + :param instance: The instance to be refreshed. + :param attribute_names: optional list of string attribute names + indicating a subset of attributes to be expired. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + + """ # noqa: E501 + + return self._proxied.expire(instance, attribute_names=attribute_names) + + def expire_all(self) -> None: + r"""Expires all persistent instances within this Session. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + When any attributes on a persistent instance is next accessed, + a query will be issued using the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire individual objects and individual attributes + on those objects, use :meth:`Session.expire`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire_all` is not usually needed, + assuming the transaction is isolated. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + + """ # noqa: E501 + + return self._proxied.expire_all() + + def expunge(self, instance: object) -> None: + r"""Remove the `instance` from this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This will free all internal references to the instance. Cascading + will be applied according to the *expunge* cascade rule. + + + """ # noqa: E501 + + return self._proxied.expunge(instance) + + def expunge_all(self) -> None: + r"""Remove all object instances from this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This is equivalent to calling ``expunge(obj)`` on all objects in this + ``Session``. + + + """ # noqa: E501 + + return self._proxied.expunge_all() + + def flush(self, objects: Optional[Sequence[Any]] = None) -> None: + r"""Flush all the object changes to the database. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Writes out all pending object creations, deletions and modifications + to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are + automatically ordered by the Session's unit of work dependency + solver. + + Database operations will be issued in the current transactional + context and do not affect the state of the transaction, unless an + error occurs, in which case the entire transaction is rolled back. + You may flush() as often as you like within a transaction to move + changes from Python to the database's transaction buffer. + + :param objects: Optional; restricts the flush operation to operate + only on elements that are in the given collection. + + This feature is for an extremely narrow set of use cases where + particular objects may need to be operated upon before the + full flush() occurs. It is not intended for general use. + + + """ # noqa: E501 + + return self._proxied.flush(objects=objects) + + def get( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + ) -> Optional[_O]: + r"""Return an instance based on the given primary key identifier, + or ``None`` if not found. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + E.g.:: + + my_user = session.get(User, 5) + + some_object = session.get(VersionedFoo, (5, 10)) + + some_object = session.get( + VersionedFoo, + {"id": 5, "version_id": 10} + ) + + .. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved + from the now legacy :meth:`_orm.Query.get` method. + + :meth:`_orm.Session.get` is special in that it provides direct + access to the identity map of the :class:`.Session`. + If the given primary key identifier is present + in the local identity map, the object is returned + directly from this collection and no SQL is emitted, + unless the object has been marked fully expired. + If not present, + a SELECT is performed in order to locate the object. + + :meth:`_orm.Session.get` also will perform a check if + the object is present in the identity map and + marked as expired - a SELECT + is emitted to refresh the object as well as to + ensure that the row is still present. + If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + :param entity: a mapped class or :class:`.Mapper` indicating the + type of entity to be loaded. + + :param ident: A scalar, tuple, or dictionary representing the + primary key. For a composite (e.g. multiple column) primary key, + a tuple or dictionary should be passed. + + For a single-column primary key, the scalar calling form is typically + the most expedient. If the primary key of a row is the value "5", + the call looks like:: + + my_object = session.get(SomeClass, 5) + + The tuple form contains primary key values typically in + the order in which they correspond to the mapped + :class:`_schema.Table` + object's primary key columns, or if the + :paramref:`_orm.Mapper.primary_key` configuration parameter were + used, in + the order used for that parameter. For example, if the primary key + of a row is represented by the integer + digits "5, 10" the call would look like:: + + my_object = session.get(SomeClass, (5, 10)) + + The dictionary form should include as keys the mapped attribute names + corresponding to each element of the primary key. If the mapped class + has the attributes ``id``, ``version_id`` as the attributes which + store the object's primary key value, the call would look like:: + + my_object = session.get(SomeClass, {"id": 5, "version_id": 10}) + + :param options: optional sequence of loader options which will be + applied to the query, if one is emitted. + + :param populate_existing: causes the method to unconditionally emit + a SQL query and refresh the object with the newly loaded data, + regardless of whether or not the object is already present. + + :param with_for_update: optional boolean ``True`` indicating FOR UPDATE + should be used, or may be a dictionary containing flags to + indicate a more specific set of FOR UPDATE flags for the SELECT; + flags should match the parameters of + :meth:`_query.Query.with_for_update`. + Supersedes the :paramref:`.Session.refresh.lockmode` parameter. + + :param execution_options: optional dictionary of execution options, + which will be associated with the query execution if one is emitted. + This dictionary can provide a subset of the options that are + accepted by :meth:`_engine.Connection.execution_options`, and may + also provide additional options understood only in an ORM context. + + .. versionadded:: 1.4.29 + + .. seealso:: + + :ref:`orm_queryguide_execution_options` - ORM-specific execution + options + + :param bind_arguments: dictionary of additional arguments to determine + the bind. May include "mapper", "bind", or other custom arguments. + Contents of this dictionary are passed to the + :meth:`.Session.get_bind` method. + + .. versionadded: 2.0.0rc1 + + :return: The object instance, or ``None``. + + + """ # noqa: E501 + + return self._proxied.get( + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + + def get_one( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + ) -> _O: + r"""Return exactly one instance based on the given primary key + identifier, or raise an exception if not found. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query + selects no rows. + + For a detailed documentation of the arguments see the + method :meth:`.Session.get`. + + .. versionadded:: 2.0.22 + + :return: The object instance. + + .. seealso:: + + :meth:`.Session.get` - equivalent method that instead + returns ``None`` if no row was found with the provided primary + key + + + """ # noqa: E501 + + return self._proxied.get_one( + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + + def get_bind( + self, + mapper: Optional[_EntityBindKey[_O]] = None, + *, + clause: Optional[ClauseElement] = None, + bind: Optional[_SessionBind] = None, + _sa_skip_events: Optional[bool] = None, + _sa_skip_for_implicit_returning: bool = False, + **kw: Any, + ) -> Union[Engine, Connection]: + r"""Return a "bind" to which this :class:`.Session` is bound. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + The "bind" is usually an instance of :class:`_engine.Engine`, + except in the case where the :class:`.Session` has been + explicitly bound directly to a :class:`_engine.Connection`. + + For a multiply-bound or unbound :class:`.Session`, the + ``mapper`` or ``clause`` arguments are used to determine the + appropriate bind to return. + + Note that the "mapper" argument is usually present + when :meth:`.Session.get_bind` is called via an ORM + operation such as a :meth:`.Session.query`, each + individual INSERT/UPDATE/DELETE operation within a + :meth:`.Session.flush`, call, etc. + + The order of resolution is: + + 1. if mapper given and :paramref:`.Session.binds` is present, + locate a bind based first on the mapper in use, then + on the mapped class in use, then on any base classes that are + present in the ``__mro__`` of the mapped class, from more specific + superclasses to more general. + 2. if clause given and ``Session.binds`` is present, + locate a bind based on :class:`_schema.Table` objects + found in the given clause present in ``Session.binds``. + 3. if ``Session.binds`` is present, return that. + 4. if clause given, attempt to return a bind + linked to the :class:`_schema.MetaData` ultimately + associated with the clause. + 5. if mapper given, attempt to return a bind + linked to the :class:`_schema.MetaData` ultimately + associated with the :class:`_schema.Table` or other + selectable to which the mapper is mapped. + 6. No bind can be found, :exc:`~sqlalchemy.exc.UnboundExecutionError` + is raised. + + Note that the :meth:`.Session.get_bind` method can be overridden on + a user-defined subclass of :class:`.Session` to provide any kind + of bind resolution scheme. See the example at + :ref:`session_custom_partitioning`. + + :param mapper: + Optional mapped class or corresponding :class:`_orm.Mapper` instance. + The bind can be derived from a :class:`_orm.Mapper` first by + consulting the "binds" map associated with this :class:`.Session`, + and secondly by consulting the :class:`_schema.MetaData` associated + with the :class:`_schema.Table` to which the :class:`_orm.Mapper` is + mapped for a bind. + + :param clause: + A :class:`_expression.ClauseElement` (i.e. + :func:`_expression.select`, + :func:`_expression.text`, + etc.). If the ``mapper`` argument is not present or could not + produce a bind, the given expression construct will be searched + for a bound element, typically a :class:`_schema.Table` + associated with + bound :class:`_schema.MetaData`. + + .. seealso:: + + :ref:`session_partitioning` + + :paramref:`.Session.binds` + + :meth:`.Session.bind_mapper` + + :meth:`.Session.bind_table` + + + """ # noqa: E501 + + return self._proxied.get_bind( + mapper=mapper, + clause=clause, + bind=bind, + _sa_skip_events=_sa_skip_events, + _sa_skip_for_implicit_returning=_sa_skip_for_implicit_returning, + **kw, + ) + + def is_modified( + self, instance: object, include_collections: bool = True + ) -> bool: + r"""Return ``True`` if the given instance has locally + modified attributes. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This method retrieves the history for each instrumented + attribute on the instance and performs a comparison of the current + value to its previously flushed or committed value, if any. + + It is in effect a more expensive and accurate + version of checking for the given instance in the + :attr:`.Session.dirty` collection; a full test for + each attribute's net "dirty" status is performed. + + E.g.:: + + return session.is_modified(someobject) + + A few caveats to this method apply: + + * Instances present in the :attr:`.Session.dirty` collection may + report ``False`` when tested with this method. This is because + the object may have received change events via attribute mutation, + thus placing it in :attr:`.Session.dirty`, but ultimately the state + is the same as that loaded from the database, resulting in no net + change here. + * Scalar attributes may not have recorded the previously set + value when a new value was applied, if the attribute was not loaded, + or was expired, at the time the new value was received - in these + cases, the attribute is assumed to have a change, even if there is + ultimately no net change against its database value. SQLAlchemy in + most cases does not need the "old" value when a set event occurs, so + it skips the expense of a SQL call if the old value isn't present, + based on the assumption that an UPDATE of the scalar value is + usually needed, and in those few cases where it isn't, is less + expensive on average than issuing a defensive SELECT. + + The "old" value is fetched unconditionally upon set only if the + attribute container has the ``active_history`` flag set to ``True``. + This flag is set typically for primary key attributes and scalar + object references that are not a simple many-to-one. To set this + flag for any arbitrary mapped column, use the ``active_history`` + argument with :func:`.column_property`. + + :param instance: mapped instance to be tested for pending changes. + :param include_collections: Indicates if multivalued collections + should be included in the operation. Setting this to ``False`` is a + way to detect only local-column based properties (i.e. scalar columns + or many-to-one foreign keys) that would result in an UPDATE for this + instance upon flush. + + + """ # noqa: E501 + + return self._proxied.is_modified( + instance, include_collections=include_collections + ) + + def bulk_save_objects( + self, + objects: Iterable[object], + return_defaults: bool = False, + update_changed_only: bool = True, + preserve_order: bool = True, + ) -> None: + r"""Perform a bulk save of the given list of objects. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + .. legacy:: + + This method is a legacy feature as of the 2.0 series of + SQLAlchemy. For modern bulk INSERT and UPDATE, see + the sections :ref:`orm_queryguide_bulk_insert` and + :ref:`orm_queryguide_bulk_update`. + + For general INSERT and UPDATE of existing ORM mapped objects, + prefer standard :term:`unit of work` data management patterns, + introduced in the :ref:`unified_tutorial` at + :ref:`tutorial_orm_data_manipulation`. SQLAlchemy 2.0 + now uses :ref:`engine_insertmanyvalues` with modern dialects + which solves previous issues of bulk INSERT slowness. + + :param objects: a sequence of mapped object instances. The mapped + objects are persisted as is, and are **not** associated with the + :class:`.Session` afterwards. + + For each object, whether the object is sent as an INSERT or an + UPDATE is dependent on the same rules used by the :class:`.Session` + in traditional operation; if the object has the + :attr:`.InstanceState.key` + attribute set, then the object is assumed to be "detached" and + will result in an UPDATE. Otherwise, an INSERT is used. + + In the case of an UPDATE, statements are grouped based on which + attributes have changed, and are thus to be the subject of each + SET clause. If ``update_changed_only`` is False, then all + attributes present within each object are applied to the UPDATE + statement, which may help in allowing the statements to be grouped + together into a larger executemany(), and will also reduce the + overhead of checking history on attributes. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary key values ahead of time; however, + :paramref:`.Session.bulk_save_objects.return_defaults` **greatly + reduces the performance gains** of the method overall. It is strongly + advised to please use the standard :meth:`_orm.Session.add_all` + approach. + + :param update_changed_only: when True, UPDATE statements are rendered + based on those attributes in each state that have logged changes. + When False, all attributes present are rendered into the SET clause + with the exception of primary key attributes. + + :param preserve_order: when True, the order of inserts and updates + matches exactly the order in which the objects are given. When + False, common types of objects are grouped into inserts + and updates, to allow for more batching opportunities. + + .. seealso:: + + :doc:`queryguide/dml` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_update_mappings` + + + """ # noqa: E501 + + return self._proxied.bulk_save_objects( + objects, + return_defaults=return_defaults, + update_changed_only=update_changed_only, + preserve_order=preserve_order, + ) + + def bulk_insert_mappings( + self, + mapper: Mapper[Any], + mappings: Iterable[Dict[str, Any]], + return_defaults: bool = False, + render_nulls: bool = False, + ) -> None: + r"""Perform a bulk insert of the given list of mapping dictionaries. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + .. legacy:: + + This method is a legacy feature as of the 2.0 series of + SQLAlchemy. For modern bulk INSERT and UPDATE, see + the sections :ref:`orm_queryguide_bulk_insert` and + :ref:`orm_queryguide_bulk_update`. The 2.0 API shares + implementation details with this method and adds new features + as well. + + :param mapper: a mapped class, or the actual :class:`_orm.Mapper` + object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a sequence of dictionaries, each one containing the + state of the mapped row to be inserted, in terms of the attribute + names on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary must contain all + keys to be populated into all tables. + + :param return_defaults: when True, the INSERT process will be altered + to ensure that newly generated primary key values will be fetched. + The rationale for this parameter is typically to enable + :ref:`Joined Table Inheritance ` mappings to + be bulk inserted. + + .. note:: for backends that don't support RETURNING, the + :paramref:`_orm.Session.bulk_insert_mappings.return_defaults` + parameter can significantly decrease performance as INSERT + statements can no longer be batched. See + :ref:`engine_insertmanyvalues` + for background on which backends are affected. + + :param render_nulls: When True, a value of ``None`` will result + in a NULL value being included in the INSERT statement, rather + than the column being omitted from the INSERT. This allows all + the rows being INSERTed to have the identical set of columns which + allows the full set of rows to be batched to the DBAPI. Normally, + each column-set that contains a different combination of NULL values + than the previous row must omit a different series of columns from + the rendered INSERT statement, which means it must be emitted as a + separate statement. By passing this flag, the full set of rows + are guaranteed to be batchable into one batch; the cost however is + that server-side defaults which are invoked by an omitted column will + be skipped, so care must be taken to ensure that these are not + necessary. + + .. warning:: + + When this flag is set, **server side default SQL values will + not be invoked** for those columns that are inserted as NULL; + the NULL value will be sent explicitly. Care must be taken + to ensure that no server-side default functions need to be + invoked for the operation as a whole. + + .. seealso:: + + :doc:`queryguide/dml` + + :meth:`.Session.bulk_save_objects` + + :meth:`.Session.bulk_update_mappings` + + + """ # noqa: E501 + + return self._proxied.bulk_insert_mappings( + mapper, + mappings, + return_defaults=return_defaults, + render_nulls=render_nulls, + ) + + def bulk_update_mappings( + self, mapper: Mapper[Any], mappings: Iterable[Dict[str, Any]] + ) -> None: + r"""Perform a bulk update of the given list of mapping dictionaries. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + .. legacy:: + + This method is a legacy feature as of the 2.0 series of + SQLAlchemy. For modern bulk INSERT and UPDATE, see + the sections :ref:`orm_queryguide_bulk_insert` and + :ref:`orm_queryguide_bulk_update`. The 2.0 API shares + implementation details with this method and adds new features + as well. + + :param mapper: a mapped class, or the actual :class:`_orm.Mapper` + object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a sequence of dictionaries, each one containing the + state of the mapped row to be updated, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, such + as a joined-inheritance mapping, each dictionary may contain keys + corresponding to all tables. All those keys which are present and + are not part of the primary key are applied to the SET clause of the + UPDATE statement; the primary key values, which are required, are + applied to the WHERE clause. + + + .. seealso:: + + :doc:`queryguide/dml` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_save_objects` + + + """ # noqa: E501 + + return self._proxied.bulk_update_mappings(mapper, mappings) + + def merge( + self, + instance: _O, + *, + load: bool = True, + options: Optional[Sequence[ORMOption]] = None, + ) -> _O: + r"""Copy the state of a given instance into a corresponding instance + within this :class:`.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + :meth:`.Session.merge` examines the primary key attributes of the + source instance, and attempts to reconcile it with an instance of the + same primary key in the session. If not found locally, it attempts + to load the object from the database based on primary key, and if + none can be located, creates a new instance. The state of each + attribute on the source instance is then copied to the target + instance. The resulting target instance is then returned by the + method; the original source instance is left unmodified, and + un-associated with the :class:`.Session` if not already. + + This operation cascades to associated instances if the association is + mapped with ``cascade="merge"``. + + See :ref:`unitofwork_merging` for a detailed discussion of merging. + + :param instance: Instance to be merged. + :param load: Boolean, when False, :meth:`.merge` switches into + a "high performance" mode which causes it to forego emitting history + events as well as all database access. This flag is used for + cases such as transferring graphs of objects into a :class:`.Session` + from a second level cache, or to transfer just-loaded objects + into the :class:`.Session` owned by a worker thread or process + without re-querying the database. + + The ``load=False`` use case adds the caveat that the given + object has to be in a "clean" state, that is, has no pending changes + to be flushed - even if the incoming object is detached from any + :class:`.Session`. This is so that when + the merge operation populates local attributes and + cascades to related objects and + collections, the values can be "stamped" onto the + target object as is, without generating any history or attribute + events, and without the need to reconcile the incoming data with + any existing related objects or collections that might not + be loaded. The resulting objects from ``load=False`` are always + produced as "clean", so it is only appropriate that the given objects + should be "clean" as well, else this suggests a mis-use of the + method. + :param options: optional sequence of loader options which will be + applied to the :meth:`_orm.Session.get` method when the merge + operation loads the existing version of the object from the database. + + .. versionadded:: 1.4.24 + + + .. seealso:: + + :func:`.make_transient_to_detached` - provides for an alternative + means of "merging" a single object into the :class:`.Session` + + + """ # noqa: E501 + + return self._proxied.merge(instance, load=load, options=options) + + @overload + def query(self, _entity: _EntityType[_O]) -> Query[_O]: ... + + @overload + def query( + self, _colexpr: TypedColumnsClauseRole[_T] + ) -> RowReturningQuery[Tuple[_T]]: ... + + # START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def query( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> RowReturningQuery[Tuple[_T0, _T1]]: ... + + @overload + def query( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + + # END OVERLOADED FUNCTIONS self.query + + @overload + def query( + self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any + ) -> Query[Any]: ... + + def query( + self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any + ) -> Query[Any]: + r"""Return a new :class:`_query.Query` object corresponding to this + :class:`_orm.Session`. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Note that the :class:`_query.Query` object is legacy as of + SQLAlchemy 2.0; the :func:`_sql.select` construct is now used + to construct ORM queries. + + .. seealso:: + + :ref:`unified_tutorial` + + :ref:`queryguide_toplevel` + + :ref:`query_api_toplevel` - legacy API doc + + + """ # noqa: E501 + + return self._proxied.query(*entities, **kwargs) + + def refresh( + self, + instance: object, + attribute_names: Optional[Iterable[str]] = None, + with_for_update: ForUpdateParameter = None, + ) -> None: + r"""Expire and refresh attributes on the given instance. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + The selected attributes will first be expired as they would when using + :meth:`_orm.Session.expire`; then a SELECT statement will be issued to + the database to refresh column-oriented attributes with the current + value available in the current transaction. + + :func:`_orm.relationship` oriented attributes will also be immediately + loaded if they were already eagerly loaded on the object, using the + same eager loading strategy that they were loaded with originally. + + .. versionadded:: 1.4 - the :meth:`_orm.Session.refresh` method + can also refresh eagerly loaded attributes. + + :func:`_orm.relationship` oriented attributes that would normally + load using the ``select`` (or "lazy") loader strategy will also + load **if they are named explicitly in the attribute_names + collection**, emitting a SELECT statement for the attribute using the + ``immediate`` loader strategy. If lazy-loaded relationships are not + named in :paramref:`_orm.Session.refresh.attribute_names`, then + they remain as "lazy loaded" attributes and are not implicitly + refreshed. + + .. versionchanged:: 2.0.4 The :meth:`_orm.Session.refresh` method + will now refresh lazy-loaded :func:`_orm.relationship` oriented + attributes for those which are named explicitly in the + :paramref:`_orm.Session.refresh.attribute_names` collection. + + .. tip:: + + While the :meth:`_orm.Session.refresh` method is capable of + refreshing both column and relationship oriented attributes, its + primary focus is on refreshing of local column-oriented attributes + on a single instance. For more open ended "refresh" functionality, + including the ability to refresh the attributes on many objects at + once while having explicit control over relationship loader + strategies, use the + :ref:`populate existing ` feature + instead. + + Note that a highly isolated transaction will return the same values as + were previously read in that same transaction, regardless of changes + in database state outside of that transaction. Refreshing + attributes usually only makes sense at the start of a transaction + where database rows have not yet been accessed. + + :param attribute_names: optional. An iterable collection of + string attribute names indicating a subset of attributes to + be refreshed. + + :param with_for_update: optional boolean ``True`` indicating FOR UPDATE + should be used, or may be a dictionary containing flags to + indicate a more specific set of FOR UPDATE flags for the SELECT; + flags should match the parameters of + :meth:`_query.Query.with_for_update`. + Supersedes the :paramref:`.Session.refresh.lockmode` parameter. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.expire_all` + + :ref:`orm_queryguide_populate_existing` - allows any ORM query + to refresh objects as they would be loaded normally. + + + """ # noqa: E501 + + return self._proxied.refresh( + instance, + attribute_names=attribute_names, + with_for_update=with_for_update, + ) + + def rollback(self) -> None: + r"""Rollback the current transaction in progress. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + If no transaction is in progress, this method is a pass-through. + + The method always rolls back + the topmost database transaction, discarding any nested + transactions that may be in progress. + + .. seealso:: + + :ref:`session_rollback` + + :ref:`unitofwork_transaction` + + + """ # noqa: E501 + + return self._proxied.rollback() + + @overload + def scalar( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Optional[_T]: ... + + @overload + def scalar( + self, + statement: Executable, + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: ... + + def scalar( + self, + statement: Executable, + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: + r"""Execute a statement and return a scalar result. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Usage and parameters are the same as that of + :meth:`_orm.Session.execute`; the return result is a scalar Python + value. + + + """ # noqa: E501 + + return self._proxied.scalar( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + @overload + def scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[_T]: ... + + @overload + def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: ... + + def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: + r"""Execute a statement and return the results as scalars. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + Usage and parameters are the same as that of + :meth:`_orm.Session.execute`; the return result is a + :class:`_result.ScalarResult` filtering object which + will return single elements rather than :class:`_row.Row` objects. + + :return: a :class:`_result.ScalarResult` object + + .. versionadded:: 1.4.24 Added :meth:`_orm.Session.scalars` + + .. versionadded:: 1.4.26 Added :meth:`_orm.scoped_session.scalars` + + .. seealso:: + + :ref:`orm_queryguide_select_orm_entities` - contrasts the behavior + of :meth:`_orm.Session.execute` to :meth:`_orm.Session.scalars` + + + """ # noqa: E501 + + return self._proxied.scalars( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + **kw, + ) + + @property + def bind(self) -> Optional[Union[Engine, Connection]]: + r"""Proxy for the :attr:`_orm.Session.bind` attribute + on behalf of the :class:`_orm.scoping.scoped_session` class. + + """ # noqa: E501 + + return self._proxied.bind + + @bind.setter + def bind(self, attr: Optional[Union[Engine, Connection]]) -> None: + self._proxied.bind = attr + + @property + def dirty(self) -> Any: + r"""The set of all persistent instances considered dirty. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_orm.scoping.scoped_session` class. + + E.g.:: + + some_mapped_object in session.dirty + + Instances are considered dirty when they were modified but not + deleted. + + Note that this 'dirty' calculation is 'optimistic'; most + attribute-setting or collection modification operations will + mark an instance as 'dirty' and place it in this set, even if + there is no net change to the attribute's value. At flush + time, the value of each attribute is compared to its + previously saved value, and if there's no net change, no SQL + operation will occur (this is a more expensive operation so + it's only done at flush time). + + To check if an instance has actionable net changes to its + attributes, use the :meth:`.Session.is_modified` method. + + + """ # noqa: E501 + + return self._proxied.dirty + + @property + def deleted(self) -> Any: + r"""The set of all instances marked as 'deleted' within this ``Session`` + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_orm.scoping.scoped_session` class. + + """ # noqa: E501 + + return self._proxied.deleted + + @property + def new(self) -> Any: + r"""The set of all instances marked as 'new' within this ``Session``. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_orm.scoping.scoped_session` class. + + """ # noqa: E501 + + return self._proxied.new + + @property + def identity_map(self) -> IdentityMap: + r"""Proxy for the :attr:`_orm.Session.identity_map` attribute + on behalf of the :class:`_orm.scoping.scoped_session` class. + + """ # noqa: E501 + + return self._proxied.identity_map + + @identity_map.setter + def identity_map(self, attr: IdentityMap) -> None: + self._proxied.identity_map = attr + + @property + def is_active(self) -> Any: + r"""True if this :class:`.Session` not in "partial rollback" state. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_orm.scoping.scoped_session` class. + + .. versionchanged:: 1.4 The :class:`_orm.Session` no longer begins + a new transaction immediately, so this attribute will be False + when the :class:`_orm.Session` is first instantiated. + + "partial rollback" state typically indicates that the flush process + of the :class:`_orm.Session` has failed, and that the + :meth:`_orm.Session.rollback` method must be emitted in order to + fully roll back the transaction. + + If this :class:`_orm.Session` is not in a transaction at all, the + :class:`_orm.Session` will autobegin when it is first used, so in this + case :attr:`_orm.Session.is_active` will return True. + + Otherwise, if this :class:`_orm.Session` is within a transaction, + and that transaction has not been rolled back internally, the + :attr:`_orm.Session.is_active` will also return True. + + .. seealso:: + + :ref:`faq_session_rollback` + + :meth:`_orm.Session.in_transaction` + + + """ # noqa: E501 + + return self._proxied.is_active + + @property + def autoflush(self) -> bool: + r"""Proxy for the :attr:`_orm.Session.autoflush` attribute + on behalf of the :class:`_orm.scoping.scoped_session` class. + + """ # noqa: E501 + + return self._proxied.autoflush + + @autoflush.setter + def autoflush(self, attr: bool) -> None: + self._proxied.autoflush = attr + + @property + def no_autoflush(self) -> Any: + r"""Return a context manager that disables autoflush. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_orm.scoping.scoped_session` class. + + e.g.:: + + with session.no_autoflush: + + some_object = SomeClass() + session.add(some_object) + # won't autoflush + some_object.related_thing = session.query(SomeRelated).first() + + Operations that proceed within the ``with:`` block + will not be subject to flushes occurring upon query + access. This is useful when initializing a series + of objects which involve existing database queries, + where the uncompleted object should not yet be flushed. + + + """ # noqa: E501 + + return self._proxied.no_autoflush + + @property + def info(self) -> Any: + r"""A user-modifiable dictionary. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class + on behalf of the :class:`_orm.scoping.scoped_session` class. + + The initial value of this dictionary can be populated using the + ``info`` argument to the :class:`.Session` constructor or + :class:`.sessionmaker` constructor or factory methods. The dictionary + here is always local to this :class:`.Session` and can be modified + independently of all other :class:`.Session` objects. + + + """ # noqa: E501 + + return self._proxied.info + + @classmethod + def close_all(cls) -> None: + r"""Close *all* sessions in memory. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + .. deprecated:: 1.3 The :meth:`.Session.close_all` method is deprecated and will be removed in a future release. Please refer to :func:`.session.close_all_sessions`. + + """ # noqa: E501 + + return Session.close_all() + + @classmethod + def object_session(cls, instance: object) -> Optional[Session]: + r"""Return the :class:`.Session` to which an object belongs. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This is an alias of :func:`.object_session`. + + + """ # noqa: E501 + + return Session.object_session(instance) + + @classmethod + def identity_key( + cls, + class_: Optional[Type[Any]] = None, + ident: Union[Any, Tuple[Any, ...]] = None, + *, + instance: Optional[Any] = None, + row: Optional[Union[Row[Any], RowMapping]] = None, + identity_token: Optional[Any] = None, + ) -> _IdentityKeyType[Any]: + r"""Return an identity key. + + .. container:: class_bases + + Proxied for the :class:`_orm.Session` class on + behalf of the :class:`_orm.scoping.scoped_session` class. + + This is an alias of :func:`.util.identity_key`. + + + """ # noqa: E501 + + return Session.identity_key( + class_=class_, + ident=ident, + instance=instance, + row=row, + identity_token=identity_token, + ) + + # END PROXY METHODS scoped_session + + +ScopedSession = scoped_session +"""Old name for backwards compatibility.""" diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py new file mode 100644 index 00000000..eb81f16e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py @@ -0,0 +1,5301 @@ +# orm/session.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Provides the Session class and related utilities.""" + +from __future__ import annotations + +import contextlib +from enum import Enum +import itertools +import sys +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes +from . import bulk_persistence +from . import context +from . import descriptor_props +from . import exc +from . import identity +from . import loading +from . import query +from . import state as statelib +from ._typing import _O +from ._typing import insp_is_mapper +from ._typing import is_composite_class +from ._typing import is_orm_option +from ._typing import is_user_defined_option +from .base import _class_to_mapper +from .base import _none_set +from .base import _state_mapper +from .base import instance_str +from .base import LoaderCallableStatus +from .base import object_mapper +from .base import object_state +from .base import PassiveFlag +from .base import state_str +from .context import FromStatement +from .context import ORMCompileState +from .identity import IdentityMap +from .query import Query +from .state import InstanceState +from .state_changes import _StateChange +from .state_changes import _StateChangeState +from .state_changes import _StateChangeStates +from .unitofwork import UOWTransaction +from .. import engine +from .. import exc as sa_exc +from .. import sql +from .. import util +from ..engine import Connection +from ..engine import Engine +from ..engine.util import TransactionalContext +from ..event import dispatcher +from ..event import EventTarget +from ..inspection import inspect +from ..inspection import Inspectable +from ..sql import coercions +from ..sql import dml +from ..sql import roles +from ..sql import Select +from ..sql import TableClause +from ..sql import visitors +from ..sql.base import _NoArg +from ..sql.base import CompileState +from ..sql.schema import Table +from ..sql.selectable import ForUpdateArg +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from ..util import IdentitySet +from ..util.typing import Literal +from ..util.typing import Protocol + +if typing.TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _IdentityKeyType + from ._typing import _InstanceDict + from ._typing import OrmExecuteOptionsParameter + from .interfaces import ORMOption + from .interfaces import UserDefinedOption + from .mapper import Mapper + from .path_registry import PathRegistry + from .query import RowReturningQuery + from ..engine import CursorResult + from ..engine import Result + from ..engine import Row + from ..engine import RowMapping + from ..engine.base import Transaction + from ..engine.base import TwoPhaseTransaction + from ..engine.interfaces import _CoreAnyExecuteParams + from ..engine.interfaces import _CoreSingleExecuteParams + from ..engine.interfaces import _ExecuteOptions + from ..engine.interfaces import CoreExecuteOptionsParameter + from ..engine.result import ScalarResult + from ..event import _InstanceLevelDispatch + from ..sql._typing import _ColumnsClauseArgument + from ..sql._typing import _InfoType + from ..sql._typing import _T0 + from ..sql._typing import _T1 + from ..sql._typing import _T2 + from ..sql._typing import _T3 + from ..sql._typing import _T4 + from ..sql._typing import _T5 + from ..sql._typing import _T6 + from ..sql._typing import _T7 + from ..sql._typing import _TypedColumnClauseArgument as _TCCA + from ..sql.base import Executable + from ..sql.base import ExecutableOption + from ..sql.dml import UpdateBase + from ..sql.elements import ClauseElement + from ..sql.roles import TypedColumnsClauseRole + from ..sql.selectable import ForUpdateParameter + from ..sql.selectable import TypedReturnsRows + +_T = TypeVar("_T", bound=Any) + +__all__ = [ + "Session", + "SessionTransaction", + "sessionmaker", + "ORMExecuteState", + "close_all_sessions", + "make_transient", + "make_transient_to_detached", + "object_session", +] + +_sessions: weakref.WeakValueDictionary[int, Session] = ( + weakref.WeakValueDictionary() +) +"""Weak-referencing dictionary of :class:`.Session` objects. +""" + +statelib._sessions = _sessions + +_PKIdentityArgument = Union[Any, Tuple[Any, ...]] + +_BindArguments = Dict[str, Any] + +_EntityBindKey = Union[Type[_O], "Mapper[_O]"] +_SessionBindKey = Union[Type[Any], "Mapper[Any]", "TableClause", str] +_SessionBind = Union["Engine", "Connection"] + +JoinTransactionMode = Literal[ + "conditional_savepoint", + "rollback_only", + "control_fully", + "create_savepoint", +] + + +class _ConnectionCallableProto(Protocol): + """a callable that returns a :class:`.Connection` given an instance. + + This callable, when present on a :class:`.Session`, is called only from the + ORM's persistence mechanism (i.e. the unit of work flush process) to allow + for connection-per-instance schemes (i.e. horizontal sharding) to be used + as persistence time. + + This callable is not present on a plain :class:`.Session`, however + is established when using the horizontal sharding extension. + + """ + + def __call__( + self, + mapper: Optional[Mapper[Any]] = None, + instance: Optional[object] = None, + **kw: Any, + ) -> Connection: ... + + +def _state_session(state: InstanceState[Any]) -> Optional[Session]: + """Given an :class:`.InstanceState`, return the :class:`.Session` + associated, if any. + """ + return state.session + + +class _SessionClassMethods: + """Class-level methods for :class:`.Session`, :class:`.sessionmaker`.""" + + @classmethod + @util.deprecated( + "1.3", + "The :meth:`.Session.close_all` method is deprecated and will be " + "removed in a future release. Please refer to " + ":func:`.session.close_all_sessions`.", + ) + def close_all(cls) -> None: + """Close *all* sessions in memory.""" + + close_all_sessions() + + @classmethod + @util.preload_module("sqlalchemy.orm.util") + def identity_key( + cls, + class_: Optional[Type[Any]] = None, + ident: Union[Any, Tuple[Any, ...]] = None, + *, + instance: Optional[Any] = None, + row: Optional[Union[Row[Any], RowMapping]] = None, + identity_token: Optional[Any] = None, + ) -> _IdentityKeyType[Any]: + """Return an identity key. + + This is an alias of :func:`.util.identity_key`. + + """ + return util.preloaded.orm_util.identity_key( + class_, + ident, + instance=instance, + row=row, + identity_token=identity_token, + ) + + @classmethod + def object_session(cls, instance: object) -> Optional[Session]: + """Return the :class:`.Session` to which an object belongs. + + This is an alias of :func:`.object_session`. + + """ + + return object_session(instance) + + +class SessionTransactionState(_StateChangeState): + ACTIVE = 1 + PREPARED = 2 + COMMITTED = 3 + DEACTIVE = 4 + CLOSED = 5 + PROVISIONING_CONNECTION = 6 + + +# backwards compatibility +ACTIVE, PREPARED, COMMITTED, DEACTIVE, CLOSED, PROVISIONING_CONNECTION = tuple( + SessionTransactionState +) + + +class ORMExecuteState(util.MemoizedSlots): + """Represents a call to the :meth:`_orm.Session.execute` method, as passed + to the :meth:`.SessionEvents.do_orm_execute` event hook. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`session_execute_events` - top level documentation on how + to use :meth:`_orm.SessionEvents.do_orm_execute` + + """ + + __slots__ = ( + "session", + "statement", + "parameters", + "execution_options", + "local_execution_options", + "bind_arguments", + "identity_token", + "_compile_state_cls", + "_starting_event_idx", + "_events_todo", + "_update_execution_options", + ) + + session: Session + """The :class:`_orm.Session` in use.""" + + statement: Executable + """The SQL statement being invoked. + + For an ORM selection as would + be retrieved from :class:`_orm.Query`, this is an instance of + :class:`_sql.select` that was generated from the ORM query. + """ + + parameters: Optional[_CoreAnyExecuteParams] + """Dictionary of parameters that was passed to + :meth:`_orm.Session.execute`.""" + + execution_options: _ExecuteOptions + """The complete dictionary of current execution options. + + This is a merge of the statement level options with the + locally passed execution options. + + .. seealso:: + + :attr:`_orm.ORMExecuteState.local_execution_options` + + :meth:`_sql.Executable.execution_options` + + :ref:`orm_queryguide_execution_options` + + """ + + local_execution_options: _ExecuteOptions + """Dictionary view of the execution options passed to the + :meth:`.Session.execute` method. + + This does not include options that may be associated with the statement + being invoked. + + .. seealso:: + + :attr:`_orm.ORMExecuteState.execution_options` + + """ + + bind_arguments: _BindArguments + """The dictionary passed as the + :paramref:`_orm.Session.execute.bind_arguments` dictionary. + + This dictionary may be used by extensions to :class:`_orm.Session` to pass + arguments that will assist in determining amongst a set of database + connections which one should be used to invoke this statement. + + """ + + _compile_state_cls: Optional[Type[ORMCompileState]] + _starting_event_idx: int + _events_todo: List[Any] + _update_execution_options: Optional[_ExecuteOptions] + + def __init__( + self, + session: Session, + statement: Executable, + parameters: Optional[_CoreAnyExecuteParams], + execution_options: _ExecuteOptions, + bind_arguments: _BindArguments, + compile_state_cls: Optional[Type[ORMCompileState]], + events_todo: List[_InstanceLevelDispatch[Session]], + ): + """Construct a new :class:`_orm.ORMExecuteState`. + + this object is constructed internally. + + """ + self.session = session + self.statement = statement + self.parameters = parameters + self.local_execution_options = execution_options + self.execution_options = statement._execution_options.union( + execution_options + ) + self.bind_arguments = bind_arguments + self._compile_state_cls = compile_state_cls + self._events_todo = list(events_todo) + + def _remaining_events(self) -> List[_InstanceLevelDispatch[Session]]: + return self._events_todo[self._starting_event_idx + 1 :] + + def invoke_statement( + self, + statement: Optional[Executable] = None, + params: Optional[_CoreAnyExecuteParams] = None, + execution_options: Optional[OrmExecuteOptionsParameter] = None, + bind_arguments: Optional[_BindArguments] = None, + ) -> Result[Any]: + """Execute the statement represented by this + :class:`.ORMExecuteState`, without re-invoking events that have + already proceeded. + + This method essentially performs a re-entrant execution of the current + statement for which the :meth:`.SessionEvents.do_orm_execute` event is + being currently invoked. The use case for this is for event handlers + that want to override how the ultimate + :class:`_engine.Result` object is returned, such as for schemes that + retrieve results from an offline cache or which concatenate results + from multiple executions. + + When the :class:`_engine.Result` object is returned by the actual + handler function within :meth:`_orm.SessionEvents.do_orm_execute` and + is propagated to the calling + :meth:`_orm.Session.execute` method, the remainder of the + :meth:`_orm.Session.execute` method is preempted and the + :class:`_engine.Result` object is returned to the caller of + :meth:`_orm.Session.execute` immediately. + + :param statement: optional statement to be invoked, in place of the + statement currently represented by :attr:`.ORMExecuteState.statement`. + + :param params: optional dictionary of parameters or list of parameters + which will be merged into the existing + :attr:`.ORMExecuteState.parameters` of this :class:`.ORMExecuteState`. + + .. versionchanged:: 2.0 a list of parameter dictionaries is accepted + for executemany executions. + + :param execution_options: optional dictionary of execution options + will be merged into the existing + :attr:`.ORMExecuteState.execution_options` of this + :class:`.ORMExecuteState`. + + :param bind_arguments: optional dictionary of bind_arguments + which will be merged amongst the current + :attr:`.ORMExecuteState.bind_arguments` + of this :class:`.ORMExecuteState`. + + :return: a :class:`_engine.Result` object with ORM-level results. + + .. seealso:: + + :ref:`do_orm_execute_re_executing` - background and examples on the + appropriate usage of :meth:`_orm.ORMExecuteState.invoke_statement`. + + + """ + + if statement is None: + statement = self.statement + + _bind_arguments = dict(self.bind_arguments) + if bind_arguments: + _bind_arguments.update(bind_arguments) + _bind_arguments["_sa_skip_events"] = True + + _params: Optional[_CoreAnyExecuteParams] + if params: + if self.is_executemany: + _params = [] + exec_many_parameters = cast( + "List[Dict[str, Any]]", self.parameters + ) + for _existing_params, _new_params in itertools.zip_longest( + exec_many_parameters, + cast("List[Dict[str, Any]]", params), + ): + if _existing_params is None or _new_params is None: + raise sa_exc.InvalidRequestError( + f"Can't apply executemany parameters to " + f"statement; number of parameter sets passed to " + f"Session.execute() ({len(exec_many_parameters)}) " + f"does not match number of parameter sets given " + f"to ORMExecuteState.invoke_statement() " + f"({len(params)})" + ) + _existing_params = dict(_existing_params) + _existing_params.update(_new_params) + _params.append(_existing_params) + else: + _params = dict(cast("Dict[str, Any]", self.parameters)) + _params.update(cast("Dict[str, Any]", params)) + else: + _params = self.parameters + + _execution_options = self.local_execution_options + if execution_options: + _execution_options = _execution_options.union(execution_options) + + return self.session._execute_internal( + statement, + _params, + execution_options=_execution_options, + bind_arguments=_bind_arguments, + _parent_execute_state=self, + ) + + @property + def bind_mapper(self) -> Optional[Mapper[Any]]: + """Return the :class:`_orm.Mapper` that is the primary "bind" mapper. + + For an :class:`_orm.ORMExecuteState` object invoking an ORM + statement, that is, the :attr:`_orm.ORMExecuteState.is_orm_statement` + attribute is ``True``, this attribute will return the + :class:`_orm.Mapper` that is considered to be the "primary" mapper + of the statement. The term "bind mapper" refers to the fact that + a :class:`_orm.Session` object may be "bound" to multiple + :class:`_engine.Engine` objects keyed to mapped classes, and the + "bind mapper" determines which of those :class:`_engine.Engine` objects + would be selected. + + For a statement that is invoked against a single mapped class, + :attr:`_orm.ORMExecuteState.bind_mapper` is intended to be a reliable + way of getting this mapper. + + .. versionadded:: 1.4.0b2 + + .. seealso:: + + :attr:`_orm.ORMExecuteState.all_mappers` + + + """ + mp: Optional[Mapper[Any]] = self.bind_arguments.get("mapper", None) + return mp + + @property + def all_mappers(self) -> Sequence[Mapper[Any]]: + """Return a sequence of all :class:`_orm.Mapper` objects that are + involved at the top level of this statement. + + By "top level" we mean those :class:`_orm.Mapper` objects that would + be represented in the result set rows for a :func:`_sql.select` + query, or for a :func:`_dml.update` or :func:`_dml.delete` query, + the mapper that is the main subject of the UPDATE or DELETE. + + .. versionadded:: 1.4.0b2 + + .. seealso:: + + :attr:`_orm.ORMExecuteState.bind_mapper` + + + + """ + if not self.is_orm_statement: + return [] + elif isinstance(self.statement, (Select, FromStatement)): + result = [] + seen = set() + for d in self.statement.column_descriptions: + ent = d["entity"] + if ent: + insp = inspect(ent, raiseerr=False) + if insp and insp.mapper and insp.mapper not in seen: + seen.add(insp.mapper) + result.append(insp.mapper) + return result + elif self.statement.is_dml and self.bind_mapper: + return [self.bind_mapper] + else: + return [] + + @property + def is_orm_statement(self) -> bool: + """return True if the operation is an ORM statement. + + This indicates that the select(), insert(), update(), or delete() + being invoked contains ORM entities as subjects. For a statement + that does not have ORM entities and instead refers only to + :class:`.Table` metadata, it is invoked as a Core SQL statement + and no ORM-level automation takes place. + + """ + return self._compile_state_cls is not None + + @property + def is_executemany(self) -> bool: + """return True if the parameters are a multi-element list of + dictionaries with more than one dictionary. + + .. versionadded:: 2.0 + + """ + return isinstance(self.parameters, list) + + @property + def is_select(self) -> bool: + """return True if this is a SELECT operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Select` construct, such as + ``select(Entity).from_statement(select(..))`` + + """ + return self.statement.is_select + + @property + def is_from_statement(self) -> bool: + """return True if this operation is a + :meth:`_sql.Select.from_statement` operation. + + This is independent from :attr:`_orm.ORMExecuteState.is_select`, as a + ``select().from_statement()`` construct can be used with + INSERT/UPDATE/DELETE RETURNING types of statements as well. + :attr:`_orm.ORMExecuteState.is_select` will only be set if the + :meth:`_sql.Select.from_statement` is itself against a + :class:`_sql.Select` construct. + + .. versionadded:: 2.0.30 + + """ + return self.statement.is_from_statement + + @property + def is_insert(self) -> bool: + """return True if this is an INSERT operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Insert` construct, such as + ``select(Entity).from_statement(insert(..))`` + + """ + return self.statement.is_dml and self.statement.is_insert + + @property + def is_update(self) -> bool: + """return True if this is an UPDATE operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Update` construct, such as + ``select(Entity).from_statement(update(..))`` + + """ + return self.statement.is_dml and self.statement.is_update + + @property + def is_delete(self) -> bool: + """return True if this is a DELETE operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Delete` construct, such as + ``select(Entity).from_statement(delete(..))`` + + """ + return self.statement.is_dml and self.statement.is_delete + + @property + def _is_crud(self) -> bool: + return isinstance(self.statement, (dml.Update, dml.Delete)) + + def update_execution_options(self, **opts: Any) -> None: + """Update the local execution options with new values.""" + self.local_execution_options = self.local_execution_options.union(opts) + + def _orm_compile_options( + self, + ) -> Optional[ + Union[ + context.ORMCompileState.default_compile_options, + Type[context.ORMCompileState.default_compile_options], + ] + ]: + if not self.is_select: + return None + try: + opts = self.statement._compile_options + except AttributeError: + return None + + if opts is not None and opts.isinstance( + context.ORMCompileState.default_compile_options + ): + return opts # type: ignore + else: + return None + + @property + def lazy_loaded_from(self) -> Optional[InstanceState[Any]]: + """An :class:`.InstanceState` that is using this statement execution + for a lazy load operation. + + The primary rationale for this attribute is to support the horizontal + sharding extension, where it is available within specific query + execution time hooks created by this extension. To that end, the + attribute is only intended to be meaningful at **query execution + time**, and importantly not any time prior to that, including query + compilation time. + + """ + return self.load_options._lazy_loaded_from + + @property + def loader_strategy_path(self) -> Optional[PathRegistry]: + """Return the :class:`.PathRegistry` for the current load path. + + This object represents the "path" in a query along relationships + when a particular object or collection is being loaded. + + """ + opts = self._orm_compile_options() + if opts is not None: + return opts._current_path + else: + return None + + @property + def is_column_load(self) -> bool: + """Return True if the operation is refreshing column-oriented + attributes on an existing ORM object. + + This occurs during operations such as :meth:`_orm.Session.refresh`, + as well as when an attribute deferred by :func:`_orm.defer` is + being loaded, or an attribute that was expired either directly + by :meth:`_orm.Session.expire` or via a commit operation is being + loaded. + + Handlers will very likely not want to add any options to queries + when such an operation is occurring as the query should be a straight + primary key fetch which should not have any additional WHERE criteria, + and loader options travelling with the instance + will have already been added to the query. + + .. versionadded:: 1.4.0b2 + + .. seealso:: + + :attr:`_orm.ORMExecuteState.is_relationship_load` + + """ + opts = self._orm_compile_options() + return opts is not None and opts._for_refresh_state + + @property + def is_relationship_load(self) -> bool: + """Return True if this load is loading objects on behalf of a + relationship. + + This means, the loader in effect is either a LazyLoader, + SelectInLoader, SubqueryLoader, or similar, and the entire + SELECT statement being emitted is on behalf of a relationship + load. + + Handlers will very likely not want to add any options to queries + when such an operation is occurring, as loader options are already + capable of being propagated to relationship loaders and should + be already present. + + .. seealso:: + + :attr:`_orm.ORMExecuteState.is_column_load` + + """ + opts = self._orm_compile_options() + if opts is None: + return False + path = self.loader_strategy_path + return path is not None and not path.is_root + + @property + def load_options( + self, + ) -> Union[ + context.QueryContext.default_load_options, + Type[context.QueryContext.default_load_options], + ]: + """Return the load_options that will be used for this execution.""" + + if not self.is_select: + raise sa_exc.InvalidRequestError( + "This ORM execution is not against a SELECT statement " + "so there are no load options." + ) + + lo: Union[ + context.QueryContext.default_load_options, + Type[context.QueryContext.default_load_options], + ] = self.execution_options.get( + "_sa_orm_load_options", context.QueryContext.default_load_options + ) + return lo + + @property + def update_delete_options( + self, + ) -> Union[ + bulk_persistence.BulkUDCompileState.default_update_options, + Type[bulk_persistence.BulkUDCompileState.default_update_options], + ]: + """Return the update_delete_options that will be used for this + execution.""" + + if not self._is_crud: + raise sa_exc.InvalidRequestError( + "This ORM execution is not against an UPDATE or DELETE " + "statement so there are no update options." + ) + uo: Union[ + bulk_persistence.BulkUDCompileState.default_update_options, + Type[bulk_persistence.BulkUDCompileState.default_update_options], + ] = self.execution_options.get( + "_sa_orm_update_options", + bulk_persistence.BulkUDCompileState.default_update_options, + ) + return uo + + @property + def _non_compile_orm_options(self) -> Sequence[ORMOption]: + return [ + opt + for opt in self.statement._with_options + if is_orm_option(opt) and not opt._is_compile_state + ] + + @property + def user_defined_options(self) -> Sequence[UserDefinedOption]: + """The sequence of :class:`.UserDefinedOptions` that have been + associated with the statement being invoked. + + """ + return [ + opt + for opt in self.statement._with_options + if is_user_defined_option(opt) + ] + + +class SessionTransactionOrigin(Enum): + """indicates the origin of a :class:`.SessionTransaction`. + + This enumeration is present on the + :attr:`.SessionTransaction.origin` attribute of any + :class:`.SessionTransaction` object. + + .. versionadded:: 2.0 + + """ + + AUTOBEGIN = 0 + """transaction were started by autobegin""" + + BEGIN = 1 + """transaction were started by calling :meth:`_orm.Session.begin`""" + + BEGIN_NESTED = 2 + """tranaction were started by :meth:`_orm.Session.begin_nested`""" + + SUBTRANSACTION = 3 + """transaction is an internal "subtransaction" """ + + +class SessionTransaction(_StateChange, TransactionalContext): + """A :class:`.Session`-level transaction. + + :class:`.SessionTransaction` is produced from the + :meth:`_orm.Session.begin` + and :meth:`_orm.Session.begin_nested` methods. It's largely an internal + object that in modern use provides a context manager for session + transactions. + + Documentation on interacting with :class:`_orm.SessionTransaction` is + at: :ref:`unitofwork_transaction`. + + + .. versionchanged:: 1.4 The scoping and API methods to work with the + :class:`_orm.SessionTransaction` object directly have been simplified. + + .. seealso:: + + :ref:`unitofwork_transaction` + + :meth:`.Session.begin` + + :meth:`.Session.begin_nested` + + :meth:`.Session.rollback` + + :meth:`.Session.commit` + + :meth:`.Session.in_transaction` + + :meth:`.Session.in_nested_transaction` + + :meth:`.Session.get_transaction` + + :meth:`.Session.get_nested_transaction` + + + """ + + _rollback_exception: Optional[BaseException] = None + + _connections: Dict[ + Union[Engine, Connection], Tuple[Connection, Transaction, bool, bool] + ] + session: Session + _parent: Optional[SessionTransaction] + + _state: SessionTransactionState + + _new: weakref.WeakKeyDictionary[InstanceState[Any], object] + _deleted: weakref.WeakKeyDictionary[InstanceState[Any], object] + _dirty: weakref.WeakKeyDictionary[InstanceState[Any], object] + _key_switches: weakref.WeakKeyDictionary[ + InstanceState[Any], Tuple[Any, Any] + ] + + origin: SessionTransactionOrigin + """Origin of this :class:`_orm.SessionTransaction`. + + Refers to a :class:`.SessionTransactionOrigin` instance which is an + enumeration indicating the source event that led to constructing + this :class:`_orm.SessionTransaction`. + + .. versionadded:: 2.0 + + """ + + nested: bool = False + """Indicates if this is a nested, or SAVEPOINT, transaction. + + When :attr:`.SessionTransaction.nested` is True, it is expected + that :attr:`.SessionTransaction.parent` will be present as well, + linking to the enclosing :class:`.SessionTransaction`. + + .. seealso:: + + :attr:`.SessionTransaction.origin` + + """ + + def __init__( + self, + session: Session, + origin: SessionTransactionOrigin, + parent: Optional[SessionTransaction] = None, + ): + TransactionalContext._trans_ctx_check(session) + + self.session = session + self._connections = {} + self._parent = parent + self.nested = nested = origin is SessionTransactionOrigin.BEGIN_NESTED + self.origin = origin + + if session._close_state is _SessionCloseState.CLOSED: + raise sa_exc.InvalidRequestError( + "This Session has been permanently closed and is unable " + "to handle any more transaction requests." + ) + + if nested: + if not parent: + raise sa_exc.InvalidRequestError( + "Can't start a SAVEPOINT transaction when no existing " + "transaction is in progress" + ) + + self._previous_nested_transaction = session._nested_transaction + elif origin is SessionTransactionOrigin.SUBTRANSACTION: + assert parent is not None + else: + assert parent is None + + self._state = SessionTransactionState.ACTIVE + + self._take_snapshot() + + # make sure transaction is assigned before we call the + # dispatch + self.session._transaction = self + + self.session.dispatch.after_transaction_create(self.session, self) + + def _raise_for_prerequisite_state( + self, operation_name: str, state: _StateChangeState + ) -> NoReturn: + if state is SessionTransactionState.DEACTIVE: + if self._rollback_exception: + raise sa_exc.PendingRollbackError( + "This Session's transaction has been rolled back " + "due to a previous exception during flush." + " To begin a new transaction with this Session, " + "first issue Session.rollback()." + f" Original exception was: {self._rollback_exception}", + code="7s2a", + ) + else: + raise sa_exc.InvalidRequestError( + "This session is in 'inactive' state, due to the " + "SQL transaction being rolled back; no further SQL " + "can be emitted within this transaction." + ) + elif state is SessionTransactionState.CLOSED: + raise sa_exc.ResourceClosedError("This transaction is closed") + elif state is SessionTransactionState.PROVISIONING_CONNECTION: + raise sa_exc.InvalidRequestError( + "This session is provisioning a new connection; concurrent " + "operations are not permitted", + code="isce", + ) + else: + raise sa_exc.InvalidRequestError( + f"This session is in '{state.name.lower()}' state; no " + "further SQL can be emitted within this transaction." + ) + + @property + def parent(self) -> Optional[SessionTransaction]: + """The parent :class:`.SessionTransaction` of this + :class:`.SessionTransaction`. + + If this attribute is ``None``, indicates this + :class:`.SessionTransaction` is at the top of the stack, and + corresponds to a real "COMMIT"/"ROLLBACK" + block. If non-``None``, then this is either a "subtransaction" + (an internal marker object used by the flush process) or a + "nested" / SAVEPOINT transaction. If the + :attr:`.SessionTransaction.nested` attribute is ``True``, then + this is a SAVEPOINT, and if ``False``, indicates this a subtransaction. + + """ + return self._parent + + @property + def is_active(self) -> bool: + return ( + self.session is not None + and self._state is SessionTransactionState.ACTIVE + ) + + @property + def _is_transaction_boundary(self) -> bool: + return self.nested or not self._parent + + @_StateChange.declare_states( + (SessionTransactionState.ACTIVE,), _StateChangeStates.NO_CHANGE + ) + def connection( + self, + bindkey: Optional[Mapper[Any]], + execution_options: Optional[_ExecuteOptions] = None, + **kwargs: Any, + ) -> Connection: + bind = self.session.get_bind(bindkey, **kwargs) + return self._connection_for_bind(bind, execution_options) + + @_StateChange.declare_states( + (SessionTransactionState.ACTIVE,), _StateChangeStates.NO_CHANGE + ) + def _begin(self, nested: bool = False) -> SessionTransaction: + return SessionTransaction( + self.session, + ( + SessionTransactionOrigin.BEGIN_NESTED + if nested + else SessionTransactionOrigin.SUBTRANSACTION + ), + self, + ) + + def _iterate_self_and_parents( + self, upto: Optional[SessionTransaction] = None + ) -> Iterable[SessionTransaction]: + current = self + result: Tuple[SessionTransaction, ...] = () + while current: + result += (current,) + if current._parent is upto: + break + elif current._parent is None: + raise sa_exc.InvalidRequestError( + "Transaction %s is not on the active transaction list" + % (upto) + ) + else: + current = current._parent + + return result + + def _take_snapshot(self) -> None: + if not self._is_transaction_boundary: + parent = self._parent + assert parent is not None + self._new = parent._new + self._deleted = parent._deleted + self._dirty = parent._dirty + self._key_switches = parent._key_switches + return + + is_begin = self.origin in ( + SessionTransactionOrigin.BEGIN, + SessionTransactionOrigin.AUTOBEGIN, + ) + if not is_begin and not self.session._flushing: + self.session.flush() + + self._new = weakref.WeakKeyDictionary() + self._deleted = weakref.WeakKeyDictionary() + self._dirty = weakref.WeakKeyDictionary() + self._key_switches = weakref.WeakKeyDictionary() + + def _restore_snapshot(self, dirty_only: bool = False) -> None: + """Restore the restoration state taken before a transaction began. + + Corresponds to a rollback. + + """ + assert self._is_transaction_boundary + + to_expunge = set(self._new).union(self.session._new) + self.session._expunge_states(to_expunge, to_transient=True) + + for s, (oldkey, newkey) in self._key_switches.items(): + # we probably can do this conditionally based on + # if we expunged or not, but safe_discard does that anyway + self.session.identity_map.safe_discard(s) + + # restore the old key + s.key = oldkey + + # now restore the object, but only if we didn't expunge + if s not in to_expunge: + self.session.identity_map.replace(s) + + for s in set(self._deleted).union(self.session._deleted): + self.session._update_impl(s, revert_deletion=True) + + assert not self.session._deleted + + for s in self.session.identity_map.all_states(): + if not dirty_only or s.modified or s in self._dirty: + s._expire(s.dict, self.session.identity_map._modified) + + def _remove_snapshot(self) -> None: + """Remove the restoration state taken before a transaction began. + + Corresponds to a commit. + + """ + assert self._is_transaction_boundary + + if not self.nested and self.session.expire_on_commit: + for s in self.session.identity_map.all_states(): + s._expire(s.dict, self.session.identity_map._modified) + + statelib.InstanceState._detach_states( + list(self._deleted), self.session + ) + self._deleted.clear() + elif self.nested: + parent = self._parent + assert parent is not None + parent._new.update(self._new) + parent._dirty.update(self._dirty) + parent._deleted.update(self._deleted) + parent._key_switches.update(self._key_switches) + + @_StateChange.declare_states( + (SessionTransactionState.ACTIVE,), _StateChangeStates.NO_CHANGE + ) + def _connection_for_bind( + self, + bind: _SessionBind, + execution_options: Optional[CoreExecuteOptionsParameter], + ) -> Connection: + if bind in self._connections: + if execution_options: + util.warn( + "Connection is already established for the " + "given bind; execution_options ignored" + ) + return self._connections[bind][0] + + self._state = SessionTransactionState.PROVISIONING_CONNECTION + + local_connect = False + should_commit = True + + try: + if self._parent: + conn = self._parent._connection_for_bind( + bind, execution_options + ) + if not self.nested: + return conn + else: + if isinstance(bind, engine.Connection): + conn = bind + if conn.engine in self._connections: + raise sa_exc.InvalidRequestError( + "Session already has a Connection associated " + "for the given Connection's Engine" + ) + else: + conn = bind.connect() + local_connect = True + + try: + if execution_options: + conn = conn.execution_options(**execution_options) + + transaction: Transaction + if self.session.twophase and self._parent is None: + # TODO: shouldn't we only be here if not + # conn.in_transaction() ? + # if twophase is set and conn.in_transaction(), validate + # that it is in fact twophase. + transaction = conn.begin_twophase() + elif self.nested: + transaction = conn.begin_nested() + elif conn.in_transaction(): + join_transaction_mode = self.session.join_transaction_mode + + if join_transaction_mode == "conditional_savepoint": + if conn.in_nested_transaction(): + join_transaction_mode = "create_savepoint" + else: + join_transaction_mode = "rollback_only" + + if local_connect: + util.warn( + "The engine provided as bind produced a " + "connection that is already in a transaction. " + "This is usually caused by a core event, " + "such as 'engine_connect', that has left a " + "transaction open. The effective join " + "transaction mode used by this session is " + f"{join_transaction_mode!r}. To silence this " + "warning, do not leave transactions open" + ) + if join_transaction_mode in ( + "control_fully", + "rollback_only", + ): + if conn.in_nested_transaction(): + transaction = ( + conn._get_required_nested_transaction() + ) + else: + transaction = conn._get_required_transaction() + if join_transaction_mode == "rollback_only": + should_commit = False + elif join_transaction_mode == "create_savepoint": + transaction = conn.begin_nested() + else: + assert False, join_transaction_mode + else: + transaction = conn.begin() + except: + # connection will not not be associated with this Session; + # close it immediately so that it isn't closed under GC + if local_connect: + conn.close() + raise + else: + bind_is_connection = isinstance(bind, engine.Connection) + + self._connections[conn] = self._connections[conn.engine] = ( + conn, + transaction, + should_commit, + not bind_is_connection, + ) + self.session.dispatch.after_begin(self.session, self, conn) + return conn + finally: + self._state = SessionTransactionState.ACTIVE + + def prepare(self) -> None: + if self._parent is not None or not self.session.twophase: + raise sa_exc.InvalidRequestError( + "'twophase' mode not enabled, or not root transaction; " + "can't prepare." + ) + self._prepare_impl() + + @_StateChange.declare_states( + (SessionTransactionState.ACTIVE,), SessionTransactionState.PREPARED + ) + def _prepare_impl(self) -> None: + if self._parent is None or self.nested: + self.session.dispatch.before_commit(self.session) + + stx = self.session._transaction + assert stx is not None + if stx is not self: + for subtransaction in stx._iterate_self_and_parents(upto=self): + subtransaction.commit() + + if not self.session._flushing: + for _flush_guard in range(100): + if self.session._is_clean(): + break + self.session.flush() + else: + raise exc.FlushError( + "Over 100 subsequent flushes have occurred within " + "session.commit() - is an after_flush() hook " + "creating new objects?" + ) + + if self._parent is None and self.session.twophase: + try: + for t in set(self._connections.values()): + cast("TwoPhaseTransaction", t[1]).prepare() + except: + with util.safe_reraise(): + self.rollback() + + self._state = SessionTransactionState.PREPARED + + @_StateChange.declare_states( + (SessionTransactionState.ACTIVE, SessionTransactionState.PREPARED), + SessionTransactionState.CLOSED, + ) + def commit(self, _to_root: bool = False) -> None: + if self._state is not SessionTransactionState.PREPARED: + with self._expect_state(SessionTransactionState.PREPARED): + self._prepare_impl() + + if self._parent is None or self.nested: + for conn, trans, should_commit, autoclose in set( + self._connections.values() + ): + if should_commit: + trans.commit() + + self._state = SessionTransactionState.COMMITTED + self.session.dispatch.after_commit(self.session) + + self._remove_snapshot() + + with self._expect_state(SessionTransactionState.CLOSED): + self.close() + + if _to_root and self._parent: + self._parent.commit(_to_root=True) + + @_StateChange.declare_states( + ( + SessionTransactionState.ACTIVE, + SessionTransactionState.DEACTIVE, + SessionTransactionState.PREPARED, + ), + SessionTransactionState.CLOSED, + ) + def rollback( + self, _capture_exception: bool = False, _to_root: bool = False + ) -> None: + stx = self.session._transaction + assert stx is not None + if stx is not self: + for subtransaction in stx._iterate_self_and_parents(upto=self): + subtransaction.close() + + boundary = self + rollback_err = None + if self._state in ( + SessionTransactionState.ACTIVE, + SessionTransactionState.PREPARED, + ): + for transaction in self._iterate_self_and_parents(): + if transaction._parent is None or transaction.nested: + try: + for t in set(transaction._connections.values()): + t[1].rollback() + + transaction._state = SessionTransactionState.DEACTIVE + self.session.dispatch.after_rollback(self.session) + except: + rollback_err = sys.exc_info() + finally: + transaction._state = SessionTransactionState.DEACTIVE + transaction._restore_snapshot( + dirty_only=transaction.nested + ) + boundary = transaction + break + else: + transaction._state = SessionTransactionState.DEACTIVE + + sess = self.session + + if not rollback_err and not sess._is_clean(): + # if items were added, deleted, or mutated + # here, we need to re-restore the snapshot + util.warn( + "Session's state has been changed on " + "a non-active transaction - this state " + "will be discarded." + ) + boundary._restore_snapshot(dirty_only=boundary.nested) + + with self._expect_state(SessionTransactionState.CLOSED): + self.close() + + if self._parent and _capture_exception: + self._parent._rollback_exception = sys.exc_info()[1] + + if rollback_err and rollback_err[1]: + raise rollback_err[1].with_traceback(rollback_err[2]) + + sess.dispatch.after_soft_rollback(sess, self) + + if _to_root and self._parent: + self._parent.rollback(_to_root=True) + + @_StateChange.declare_states( + _StateChangeStates.ANY, SessionTransactionState.CLOSED + ) + def close(self, invalidate: bool = False) -> None: + if self.nested: + self.session._nested_transaction = ( + self._previous_nested_transaction + ) + + self.session._transaction = self._parent + + for connection, transaction, should_commit, autoclose in set( + self._connections.values() + ): + if invalidate and self._parent is None: + connection.invalidate() + if should_commit and transaction.is_active: + transaction.close() + if autoclose and self._parent is None: + connection.close() + + self._state = SessionTransactionState.CLOSED + sess = self.session + + # TODO: these two None sets were historically after the + # event hook below, and in 2.0 I changed it this way for some reason, + # and I remember there being a reason, but not what it was. + # Why do we need to get rid of them at all? test_memusage::CycleTest + # passes with these commented out. + # self.session = None # type: ignore + # self._connections = None # type: ignore + + sess.dispatch.after_transaction_end(sess, self) + + def _get_subject(self) -> Session: + return self.session + + def _transaction_is_active(self) -> bool: + return self._state is SessionTransactionState.ACTIVE + + def _transaction_is_closed(self) -> bool: + return self._state is SessionTransactionState.CLOSED + + def _rollback_can_be_called(self) -> bool: + return self._state not in (COMMITTED, CLOSED) + + +class _SessionCloseState(Enum): + ACTIVE = 1 + CLOSED = 2 + CLOSE_IS_RESET = 3 + + +class Session(_SessionClassMethods, EventTarget): + """Manages persistence operations for ORM-mapped objects. + + The :class:`_orm.Session` is **not safe for use in concurrent threads.**. + See :ref:`session_faq_threadsafe` for background. + + The Session's usage paradigm is described at :doc:`/orm/session`. + + + """ + + _is_asyncio = False + + dispatch: dispatcher[Session] + + identity_map: IdentityMap + """A mapping of object identities to objects themselves. + + Iterating through ``Session.identity_map.values()`` provides + access to the full set of persistent objects (i.e., those + that have row identity) currently in the session. + + .. seealso:: + + :func:`.identity_key` - helper function to produce the keys used + in this dictionary. + + """ + + _new: Dict[InstanceState[Any], Any] + _deleted: Dict[InstanceState[Any], Any] + bind: Optional[Union[Engine, Connection]] + __binds: Dict[_SessionBindKey, _SessionBind] + _flushing: bool + _warn_on_events: bool + _transaction: Optional[SessionTransaction] + _nested_transaction: Optional[SessionTransaction] + hash_key: int + autoflush: bool + expire_on_commit: bool + enable_baked_queries: bool + twophase: bool + join_transaction_mode: JoinTransactionMode + _query_cls: Type[Query[Any]] + _close_state: _SessionCloseState + + def __init__( + self, + bind: Optional[_SessionBind] = None, + *, + autoflush: bool = True, + future: Literal[True] = True, + expire_on_commit: bool = True, + autobegin: bool = True, + twophase: bool = False, + binds: Optional[Dict[_SessionBindKey, _SessionBind]] = None, + enable_baked_queries: bool = True, + info: Optional[_InfoType] = None, + query_cls: Optional[Type[Query[Any]]] = None, + autocommit: Literal[False] = False, + join_transaction_mode: JoinTransactionMode = "conditional_savepoint", + close_resets_only: Union[bool, _NoArg] = _NoArg.NO_ARG, + ): + r"""Construct a new :class:`_orm.Session`. + + See also the :class:`.sessionmaker` function which is used to + generate a :class:`.Session`-producing callable with a given + set of arguments. + + :param autoflush: When ``True``, all query operations will issue a + :meth:`~.Session.flush` call to this ``Session`` before proceeding. + This is a convenience feature so that :meth:`~.Session.flush` need + not be called repeatedly in order for database queries to retrieve + results. + + .. seealso:: + + :ref:`session_flushing` - additional background on autoflush + + :param autobegin: Automatically start transactions (i.e. equivalent to + invoking :meth:`_orm.Session.begin`) when database access is + requested by an operation. Defaults to ``True``. Set to + ``False`` to prevent a :class:`_orm.Session` from implicitly + beginning transactions after construction, as well as after any of + the :meth:`_orm.Session.rollback`, :meth:`_orm.Session.commit`, + or :meth:`_orm.Session.close` methods are called. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`session_autobegin_disable` + + :param bind: An optional :class:`_engine.Engine` or + :class:`_engine.Connection` to + which this ``Session`` should be bound. When specified, all SQL + operations performed by this session will execute via this + connectable. + + :param binds: A dictionary which may specify any number of + :class:`_engine.Engine` or :class:`_engine.Connection` + objects as the source of + connectivity for SQL operations on a per-entity basis. The keys + of the dictionary consist of any series of mapped classes, + arbitrary Python classes that are bases for mapped classes, + :class:`_schema.Table` objects and :class:`_orm.Mapper` objects. + The + values of the dictionary are then instances of + :class:`_engine.Engine` + or less commonly :class:`_engine.Connection` objects. + Operations which + proceed relative to a particular mapped class will consult this + dictionary for the closest matching entity in order to determine + which :class:`_engine.Engine` should be used for a particular SQL + operation. The complete heuristics for resolution are + described at :meth:`.Session.get_bind`. Usage looks like:: + + Session = sessionmaker(binds={ + SomeMappedClass: create_engine('postgresql+psycopg2://engine1'), + SomeDeclarativeBase: create_engine('postgresql+psycopg2://engine2'), + some_mapper: create_engine('postgresql+psycopg2://engine3'), + some_table: create_engine('postgresql+psycopg2://engine4'), + }) + + .. seealso:: + + :ref:`session_partitioning` + + :meth:`.Session.bind_mapper` + + :meth:`.Session.bind_table` + + :meth:`.Session.get_bind` + + + :param \class_: Specify an alternate class other than + ``sqlalchemy.orm.session.Session`` which should be used by the + returned class. This is the only argument that is local to the + :class:`.sessionmaker` function, and is not sent directly to the + constructor for ``Session``. + + :param enable_baked_queries: legacy; defaults to ``True``. + A parameter consumed + by the :mod:`sqlalchemy.ext.baked` extension to determine if + "baked queries" should be cached, as is the normal operation + of this extension. When set to ``False``, caching as used by + this particular extension is disabled. + + .. versionchanged:: 1.4 The ``sqlalchemy.ext.baked`` extension is + legacy and is not used by any of SQLAlchemy's internals. This + flag therefore only affects applications that are making explicit + use of this extension within their own code. + + :param expire_on_commit: Defaults to ``True``. When ``True``, all + instances will be fully expired after each :meth:`~.commit`, + so that all attribute/object access subsequent to a completed + transaction will load from the most recent database state. + + .. seealso:: + + :ref:`session_committing` + + :param future: Deprecated; this flag is always True. + + .. seealso:: + + :ref:`migration_20_toplevel` + + :param info: optional dictionary of arbitrary data to be associated + with this :class:`.Session`. Is available via the + :attr:`.Session.info` attribute. Note the dictionary is copied at + construction time so that modifications to the per- + :class:`.Session` dictionary will be local to that + :class:`.Session`. + + :param query_cls: Class which should be used to create new Query + objects, as returned by the :meth:`~.Session.query` method. + Defaults to :class:`_query.Query`. + + :param twophase: When ``True``, all transactions will be started as + a "two phase" transaction, i.e. using the "two phase" semantics + of the database in use along with an XID. During a + :meth:`~.commit`, after :meth:`~.flush` has been issued for all + attached databases, the :meth:`~.TwoPhaseTransaction.prepare` + method on each database's :class:`.TwoPhaseTransaction` will be + called. This allows each database to roll back the entire + transaction, before each transaction is committed. + + :param autocommit: the "autocommit" keyword is present for backwards + compatibility but must remain at its default value of ``False``. + + :param join_transaction_mode: Describes the transactional behavior to + take when a given bind is a :class:`_engine.Connection` that + has already begun a transaction outside the scope of this + :class:`_orm.Session`; in other words the + :meth:`_engine.Connection.in_transaction()` method returns True. + + The following behaviors only take effect when the :class:`_orm.Session` + **actually makes use of the connection given**; that is, a method + such as :meth:`_orm.Session.execute`, :meth:`_orm.Session.connection`, + etc. are actually invoked: + + * ``"conditional_savepoint"`` - this is the default. if the given + :class:`_engine.Connection` is begun within a transaction but + does not have a SAVEPOINT, then ``"rollback_only"`` is used. + If the :class:`_engine.Connection` is additionally within + a SAVEPOINT, in other words + :meth:`_engine.Connection.in_nested_transaction()` method returns + True, then ``"create_savepoint"`` is used. + + ``"conditional_savepoint"`` behavior attempts to make use of + savepoints in order to keep the state of the existing transaction + unchanged, but only if there is already a savepoint in progress; + otherwise, it is not assumed that the backend in use has adequate + support for SAVEPOINT, as availability of this feature varies. + ``"conditional_savepoint"`` also seeks to establish approximate + backwards compatibility with previous :class:`_orm.Session` + behavior, for applications that are not setting a specific mode. It + is recommended that one of the explicit settings be used. + + * ``"create_savepoint"`` - the :class:`_orm.Session` will use + :meth:`_engine.Connection.begin_nested()` in all cases to create + its own transaction. This transaction by its nature rides + "on top" of any existing transaction that's opened on the given + :class:`_engine.Connection`; if the underlying database and + the driver in use has full, non-broken support for SAVEPOINT, the + external transaction will remain unaffected throughout the + lifespan of the :class:`_orm.Session`. + + The ``"create_savepoint"`` mode is the most useful for integrating + a :class:`_orm.Session` into a test suite where an externally + initiated transaction should remain unaffected; however, it relies + on proper SAVEPOINT support from the underlying driver and + database. + + .. tip:: When using SQLite, the SQLite driver included through + Python 3.11 does not handle SAVEPOINTs correctly in all cases + without workarounds. See the sections + :ref:`pysqlite_serializable` and :ref:`aiosqlite_serializable` + for details on current workarounds. + + * ``"control_fully"`` - the :class:`_orm.Session` will take + control of the given transaction as its own; + :meth:`_orm.Session.commit` will call ``.commit()`` on the + transaction, :meth:`_orm.Session.rollback` will call + ``.rollback()`` on the transaction, :meth:`_orm.Session.close` will + call ``.rollback`` on the transaction. + + .. tip:: This mode of use is equivalent to how SQLAlchemy 1.4 would + handle a :class:`_engine.Connection` given with an existing + SAVEPOINT (i.e. :meth:`_engine.Connection.begin_nested`); the + :class:`_orm.Session` would take full control of the existing + SAVEPOINT. + + * ``"rollback_only"`` - the :class:`_orm.Session` will take control + of the given transaction for ``.rollback()`` calls only; + ``.commit()`` calls will not be propagated to the given + transaction. ``.close()`` calls will have no effect on the + given transaction. + + .. tip:: This mode of use is equivalent to how SQLAlchemy 1.4 would + handle a :class:`_engine.Connection` given with an existing + regular database transaction (i.e. + :meth:`_engine.Connection.begin`); the :class:`_orm.Session` + would propagate :meth:`_orm.Session.rollback` calls to the + underlying transaction, but not :meth:`_orm.Session.commit` or + :meth:`_orm.Session.close` calls. + + .. versionadded:: 2.0.0rc1 + + :param close_resets_only: Defaults to ``True``. Determines if + the session should reset itself after calling ``.close()`` + or should pass in a no longer usable state, disabling re-use. + + .. versionadded:: 2.0.22 added flag ``close_resets_only``. + A future SQLAlchemy version may change the default value of + this flag to ``False``. + + .. seealso:: + + :ref:`session_closing` - Detail on the semantics of + :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`. + + """ # noqa + + # considering allowing the "autocommit" keyword to still be accepted + # as long as it's False, so that external test suites, oslo.db etc + # continue to function as the argument appears to be passed in lots + # of cases including in our own test suite + if autocommit: + raise sa_exc.ArgumentError( + "autocommit=True is no longer supported" + ) + self.identity_map = identity.WeakInstanceDict() + + if not future: + raise sa_exc.ArgumentError( + "The 'future' parameter passed to " + "Session() may only be set to True." + ) + + self._new = {} # InstanceState->object, strong refs object + self._deleted = {} # same + self.bind = bind + self.__binds = {} + self._flushing = False + self._warn_on_events = False + self._transaction = None + self._nested_transaction = None + self.hash_key = _new_sessionid() + self.autobegin = autobegin + self.autoflush = autoflush + self.expire_on_commit = expire_on_commit + self.enable_baked_queries = enable_baked_queries + + # the idea is that at some point NO_ARG will warn that in the future + # the default will switch to close_resets_only=False. + if close_resets_only or close_resets_only is _NoArg.NO_ARG: + self._close_state = _SessionCloseState.CLOSE_IS_RESET + else: + self._close_state = _SessionCloseState.ACTIVE + if ( + join_transaction_mode + and join_transaction_mode + not in JoinTransactionMode.__args__ # type: ignore + ): + raise sa_exc.ArgumentError( + f"invalid selection for join_transaction_mode: " + f'"{join_transaction_mode}"' + ) + self.join_transaction_mode = join_transaction_mode + + self.twophase = twophase + self._query_cls = query_cls if query_cls else query.Query + if info: + self.info.update(info) + + if binds is not None: + for key, bind in binds.items(): + self._add_bind(key, bind) + + _sessions[self.hash_key] = self + + # used by sqlalchemy.engine.util.TransactionalContext + _trans_context_manager: Optional[TransactionalContext] = None + + connection_callable: Optional[_ConnectionCallableProto] = None + + def __enter__(self: _S) -> _S: + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self.close() + + @contextlib.contextmanager + def _maker_context_manager(self: _S) -> Iterator[_S]: + with self: + with self.begin(): + yield self + + def in_transaction(self) -> bool: + """Return True if this :class:`_orm.Session` has begun a transaction. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`_orm.Session.is_active` + + + """ + return self._transaction is not None + + def in_nested_transaction(self) -> bool: + """Return True if this :class:`_orm.Session` has begun a nested + transaction, e.g. SAVEPOINT. + + .. versionadded:: 1.4 + + """ + return self._nested_transaction is not None + + def get_transaction(self) -> Optional[SessionTransaction]: + """Return the current root transaction in progress, if any. + + .. versionadded:: 1.4 + + """ + trans = self._transaction + while trans is not None and trans._parent is not None: + trans = trans._parent + return trans + + def get_nested_transaction(self) -> Optional[SessionTransaction]: + """Return the current nested transaction in progress, if any. + + .. versionadded:: 1.4 + + """ + + return self._nested_transaction + + @util.memoized_property + def info(self) -> _InfoType: + """A user-modifiable dictionary. + + The initial value of this dictionary can be populated using the + ``info`` argument to the :class:`.Session` constructor or + :class:`.sessionmaker` constructor or factory methods. The dictionary + here is always local to this :class:`.Session` and can be modified + independently of all other :class:`.Session` objects. + + """ + return {} + + def _autobegin_t(self, begin: bool = False) -> SessionTransaction: + if self._transaction is None: + if not begin and not self.autobegin: + raise sa_exc.InvalidRequestError( + "Autobegin is disabled on this Session; please call " + "session.begin() to start a new transaction" + ) + trans = SessionTransaction( + self, + ( + SessionTransactionOrigin.BEGIN + if begin + else SessionTransactionOrigin.AUTOBEGIN + ), + ) + assert self._transaction is trans + return trans + + return self._transaction + + def begin(self, nested: bool = False) -> SessionTransaction: + """Begin a transaction, or nested transaction, + on this :class:`.Session`, if one is not already begun. + + The :class:`_orm.Session` object features **autobegin** behavior, + so that normally it is not necessary to call the + :meth:`_orm.Session.begin` + method explicitly. However, it may be used in order to control + the scope of when the transactional state is begun. + + When used to begin the outermost transaction, an error is raised + if this :class:`.Session` is already inside of a transaction. + + :param nested: if True, begins a SAVEPOINT transaction and is + equivalent to calling :meth:`~.Session.begin_nested`. For + documentation on SAVEPOINT transactions, please see + :ref:`session_begin_nested`. + + :return: the :class:`.SessionTransaction` object. Note that + :class:`.SessionTransaction` + acts as a Python context manager, allowing :meth:`.Session.begin` + to be used in a "with" block. See :ref:`session_explicit_begin` for + an example. + + .. seealso:: + + :ref:`session_autobegin` + + :ref:`unitofwork_transaction` + + :meth:`.Session.begin_nested` + + + """ + + trans = self._transaction + if trans is None: + trans = self._autobegin_t(begin=True) + + if not nested: + return trans + + assert trans is not None + + if nested: + trans = trans._begin(nested=nested) + assert self._transaction is trans + self._nested_transaction = trans + else: + raise sa_exc.InvalidRequestError( + "A transaction is already begun on this Session." + ) + + return trans # needed for __enter__/__exit__ hook + + def begin_nested(self) -> SessionTransaction: + """Begin a "nested" transaction on this Session, e.g. SAVEPOINT. + + The target database(s) and associated drivers must support SQL + SAVEPOINT for this method to function correctly. + + For documentation on SAVEPOINT + transactions, please see :ref:`session_begin_nested`. + + :return: the :class:`.SessionTransaction` object. Note that + :class:`.SessionTransaction` acts as a context manager, allowing + :meth:`.Session.begin_nested` to be used in a "with" block. + See :ref:`session_begin_nested` for a usage example. + + .. seealso:: + + :ref:`session_begin_nested` + + :ref:`pysqlite_serializable` - special workarounds required + with the SQLite driver in order for SAVEPOINT to work + correctly. For asyncio use cases, see the section + :ref:`aiosqlite_serializable`. + + """ + return self.begin(nested=True) + + def rollback(self) -> None: + """Rollback the current transaction in progress. + + If no transaction is in progress, this method is a pass-through. + + The method always rolls back + the topmost database transaction, discarding any nested + transactions that may be in progress. + + .. seealso:: + + :ref:`session_rollback` + + :ref:`unitofwork_transaction` + + """ + if self._transaction is None: + pass + else: + self._transaction.rollback(_to_root=True) + + def commit(self) -> None: + """Flush pending changes and commit the current transaction. + + When the COMMIT operation is complete, all objects are fully + :term:`expired`, erasing their internal contents, which will be + automatically re-loaded when the objects are next accessed. In the + interim, these objects are in an expired state and will not function if + they are :term:`detached` from the :class:`.Session`. Additionally, + this re-load operation is not supported when using asyncio-oriented + APIs. The :paramref:`.Session.expire_on_commit` parameter may be used + to disable this behavior. + + When there is no transaction in place for the :class:`.Session`, + indicating that no operations were invoked on this :class:`.Session` + since the previous call to :meth:`.Session.commit`, the method will + begin and commit an internal-only "logical" transaction, that does not + normally affect the database unless pending flush changes were + detected, but will still invoke event handlers and object expiration + rules. + + The outermost database transaction is committed unconditionally, + automatically releasing any SAVEPOINTs in effect. + + .. seealso:: + + :ref:`session_committing` + + :ref:`unitofwork_transaction` + + :ref:`asyncio_orm_avoid_lazyloads` + + """ + trans = self._transaction + if trans is None: + trans = self._autobegin_t() + + trans.commit(_to_root=True) + + def prepare(self) -> None: + """Prepare the current transaction in progress for two phase commit. + + If no transaction is in progress, this method raises an + :exc:`~sqlalchemy.exc.InvalidRequestError`. + + Only root transactions of two phase sessions can be prepared. If the + current transaction is not such, an + :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. + + """ + trans = self._transaction + if trans is None: + trans = self._autobegin_t() + + trans.prepare() + + def connection( + self, + bind_arguments: Optional[_BindArguments] = None, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + ) -> Connection: + r"""Return a :class:`_engine.Connection` object corresponding to this + :class:`.Session` object's transactional state. + + Either the :class:`_engine.Connection` corresponding to the current + transaction is returned, or if no transaction is in progress, a new + one is begun and the :class:`_engine.Connection` + returned (note that no + transactional state is established with the DBAPI until the first + SQL statement is emitted). + + Ambiguity in multi-bind or unbound :class:`.Session` objects can be + resolved through any of the optional keyword arguments. This + ultimately makes usage of the :meth:`.get_bind` method for resolution. + + :param bind_arguments: dictionary of bind arguments. May include + "mapper", "bind", "clause", other custom arguments that are passed + to :meth:`.Session.get_bind`. + + :param execution_options: a dictionary of execution options that will + be passed to :meth:`_engine.Connection.execution_options`, **when the + connection is first procured only**. If the connection is already + present within the :class:`.Session`, a warning is emitted and + the arguments are ignored. + + .. seealso:: + + :ref:`session_transaction_isolation` + + """ + + if bind_arguments: + bind = bind_arguments.pop("bind", None) + + if bind is None: + bind = self.get_bind(**bind_arguments) + else: + bind = self.get_bind() + + return self._connection_for_bind( + bind, + execution_options=execution_options, + ) + + def _connection_for_bind( + self, + engine: _SessionBind, + execution_options: Optional[CoreExecuteOptionsParameter] = None, + **kw: Any, + ) -> Connection: + TransactionalContext._trans_ctx_check(self) + + trans = self._transaction + if trans is None: + trans = self._autobegin_t() + return trans._connection_for_bind(engine, execution_options) + + @overload + def _execute_internal( + self, + statement: Executable, + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + _scalar_result: Literal[True] = ..., + ) -> Any: ... + + @overload + def _execute_internal( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + _scalar_result: bool = ..., + ) -> Result[Any]: ... + + def _execute_internal( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + _scalar_result: bool = False, + ) -> Any: + statement = coercions.expect(roles.StatementRole, statement) + + if not bind_arguments: + bind_arguments = {} + else: + bind_arguments = dict(bind_arguments) + + if ( + statement._propagate_attrs.get("compile_state_plugin", None) + == "orm" + ): + compile_state_cls = CompileState._get_plugin_class_for_plugin( + statement, "orm" + ) + if TYPE_CHECKING: + assert isinstance( + compile_state_cls, context.AbstractORMCompileState + ) + else: + compile_state_cls = None + bind_arguments.setdefault("clause", statement) + + execution_options = util.coerce_to_immutabledict(execution_options) + + if _parent_execute_state: + events_todo = _parent_execute_state._remaining_events() + else: + events_todo = self.dispatch.do_orm_execute + if _add_event: + events_todo = list(events_todo) + [_add_event] + + if events_todo: + if compile_state_cls is not None: + # for event handlers, do the orm_pre_session_exec + # pass ahead of the event handlers, so that things like + # .load_options, .update_delete_options etc. are populated. + # is_pre_event=True allows the hook to hold off on things + # it doesn't want to do twice, including autoflush as well + # as "pre fetch" for DML, etc. + ( + statement, + execution_options, + ) = compile_state_cls.orm_pre_session_exec( + self, + statement, + params, + execution_options, + bind_arguments, + True, + ) + + orm_exec_state = ORMExecuteState( + self, + statement, + params, + execution_options, + bind_arguments, + compile_state_cls, + events_todo, + ) + for idx, fn in enumerate(events_todo): + orm_exec_state._starting_event_idx = idx + fn_result: Optional[Result[Any]] = fn(orm_exec_state) + if fn_result: + if _scalar_result: + return fn_result.scalar() + else: + return fn_result + + statement = orm_exec_state.statement + execution_options = orm_exec_state.local_execution_options + + if compile_state_cls is not None: + # now run orm_pre_session_exec() "for real". if there were + # event hooks, this will re-run the steps that interpret + # new execution_options into load_options / update_delete_options, + # which we assume the event hook might have updated. + # autoflush will also be invoked in this step if enabled. + ( + statement, + execution_options, + ) = compile_state_cls.orm_pre_session_exec( + self, + statement, + params, + execution_options, + bind_arguments, + False, + ) + + bind = self.get_bind(**bind_arguments) + + conn = self._connection_for_bind(bind) + + if _scalar_result and not compile_state_cls: + if TYPE_CHECKING: + params = cast(_CoreSingleExecuteParams, params) + return conn.scalar( + statement, params or {}, execution_options=execution_options + ) + + if compile_state_cls: + result: Result[Any] = compile_state_cls.orm_execute_statement( + self, + statement, + params or {}, + execution_options, + bind_arguments, + conn, + ) + else: + result = conn.execute( + statement, params or {}, execution_options=execution_options + ) + + if _scalar_result: + return result.scalar() + else: + return result + + @overload + def execute( + self, + statement: TypedReturnsRows[_T], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[_T]: ... + + @overload + def execute( + self, + statement: UpdateBase, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> CursorResult[Any]: ... + + @overload + def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[Any]: ... + + def execute( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + _parent_execute_state: Optional[Any] = None, + _add_event: Optional[Any] = None, + ) -> Result[Any]: + r"""Execute a SQL expression construct. + + Returns a :class:`_engine.Result` object representing + results of the statement execution. + + E.g.:: + + from sqlalchemy import select + result = session.execute( + select(User).where(User.id == 5) + ) + + The API contract of :meth:`_orm.Session.execute` is similar to that + of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version + of :class:`_engine.Connection`. + + .. versionchanged:: 1.4 the :meth:`_orm.Session.execute` method is + now the primary point of ORM statement execution when using + :term:`2.0 style` ORM usage. + + :param statement: + An executable statement (i.e. an :class:`.Executable` expression + such as :func:`_expression.select`). + + :param params: + Optional dictionary, or list of dictionaries, containing + bound parameter values. If a single dictionary, single-row + execution occurs; if a list of dictionaries, an + "executemany" will be invoked. The keys in each dictionary + must correspond to parameter names present in the statement. + + :param execution_options: optional dictionary of execution options, + which will be associated with the statement execution. This + dictionary can provide a subset of the options that are accepted + by :meth:`_engine.Connection.execution_options`, and may also + provide additional options understood only in an ORM context. + + .. seealso:: + + :ref:`orm_queryguide_execution_options` - ORM-specific execution + options + + :param bind_arguments: dictionary of additional arguments to determine + the bind. May include "mapper", "bind", or other custom arguments. + Contents of this dictionary are passed to the + :meth:`.Session.get_bind` method. + + :return: a :class:`_engine.Result` object. + + + """ + return self._execute_internal( + statement, + params, + execution_options=execution_options, + bind_arguments=bind_arguments, + _parent_execute_state=_parent_execute_state, + _add_event=_add_event, + ) + + @overload + def scalar( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Optional[_T]: ... + + @overload + def scalar( + self, + statement: Executable, + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: ... + + def scalar( + self, + statement: Executable, + params: Optional[_CoreSingleExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> Any: + """Execute a statement and return a scalar result. + + Usage and parameters are the same as that of + :meth:`_orm.Session.execute`; the return result is a scalar Python + value. + + """ + + return self._execute_internal( + statement, + params, + execution_options=execution_options, + bind_arguments=bind_arguments, + _scalar_result=True, + **kw, + ) + + @overload + def scalars( + self, + statement: TypedReturnsRows[Tuple[_T]], + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[_T]: ... + + @overload + def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: ... + + def scalars( + self, + statement: Executable, + params: Optional[_CoreAnyExecuteParams] = None, + *, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + **kw: Any, + ) -> ScalarResult[Any]: + """Execute a statement and return the results as scalars. + + Usage and parameters are the same as that of + :meth:`_orm.Session.execute`; the return result is a + :class:`_result.ScalarResult` filtering object which + will return single elements rather than :class:`_row.Row` objects. + + :return: a :class:`_result.ScalarResult` object + + .. versionadded:: 1.4.24 Added :meth:`_orm.Session.scalars` + + .. versionadded:: 1.4.26 Added :meth:`_orm.scoped_session.scalars` + + .. seealso:: + + :ref:`orm_queryguide_select_orm_entities` - contrasts the behavior + of :meth:`_orm.Session.execute` to :meth:`_orm.Session.scalars` + + """ + + return self._execute_internal( + statement, + params=params, + execution_options=execution_options, + bind_arguments=bind_arguments, + _scalar_result=False, # mypy appreciates this + **kw, + ).scalars() + + def close(self) -> None: + """Close out the transactional resources and ORM objects used by this + :class:`_orm.Session`. + + This expunges all ORM objects associated with this + :class:`_orm.Session`, ends any transaction in progress and + :term:`releases` any :class:`_engine.Connection` objects which this + :class:`_orm.Session` itself has checked out from associated + :class:`_engine.Engine` objects. The operation then leaves the + :class:`_orm.Session` in a state which it may be used again. + + .. tip:: + + In the default running mode the :meth:`_orm.Session.close` + method **does not prevent the Session from being used again**. + The :class:`_orm.Session` itself does not actually have a + distinct "closed" state; it merely means + the :class:`_orm.Session` will release all database connections + and ORM objects. + + Setting the parameter :paramref:`_orm.Session.close_resets_only` + to ``False`` will instead make the ``close`` final, meaning that + any further action on the session will be forbidden. + + .. versionchanged:: 1.4 The :meth:`.Session.close` method does not + immediately create a new :class:`.SessionTransaction` object; + instead, the new :class:`.SessionTransaction` is created only if + the :class:`.Session` is used again for a database operation. + + .. seealso:: + + :ref:`session_closing` - detail on the semantics of + :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`. + + :meth:`_orm.Session.reset` - a similar method that behaves like + ``close()`` with the parameter + :paramref:`_orm.Session.close_resets_only` set to ``True``. + + """ + self._close_impl(invalidate=False) + + def reset(self) -> None: + """Close out the transactional resources and ORM objects used by this + :class:`_orm.Session`, resetting the session to its initial state. + + This method provides for same "reset-only" behavior that the + :meth:`_orm.Session.close` method has provided historically, where the + state of the :class:`_orm.Session` is reset as though the object were + brand new, and ready to be used again. + This method may then be useful for :class:`_orm.Session` objects + which set :paramref:`_orm.Session.close_resets_only` to ``False``, + so that "reset only" behavior is still available. + + .. versionadded:: 2.0.22 + + .. seealso:: + + :ref:`session_closing` - detail on the semantics of + :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`. + + :meth:`_orm.Session.close` - a similar method will additionally + prevent re-use of the Session when the parameter + :paramref:`_orm.Session.close_resets_only` is set to ``False``. + """ + self._close_impl(invalidate=False, is_reset=True) + + def invalidate(self) -> None: + """Close this Session, using connection invalidation. + + This is a variant of :meth:`.Session.close` that will additionally + ensure that the :meth:`_engine.Connection.invalidate` + method will be called on each :class:`_engine.Connection` object + that is currently in use for a transaction (typically there is only + one connection unless the :class:`_orm.Session` is used with + multiple engines). + + This can be called when the database is known to be in a state where + the connections are no longer safe to be used. + + Below illustrates a scenario when using `gevent + `_, which can produce ``Timeout`` exceptions + that may mean the underlying connection should be discarded:: + + import gevent + + try: + sess = Session() + sess.add(User()) + sess.commit() + except gevent.Timeout: + sess.invalidate() + raise + except: + sess.rollback() + raise + + The method additionally does everything that :meth:`_orm.Session.close` + does, including that all ORM objects are expunged. + + """ + self._close_impl(invalidate=True) + + def _close_impl(self, invalidate: bool, is_reset: bool = False) -> None: + if not is_reset and self._close_state is _SessionCloseState.ACTIVE: + self._close_state = _SessionCloseState.CLOSED + self.expunge_all() + if self._transaction is not None: + for transaction in self._transaction._iterate_self_and_parents(): + transaction.close(invalidate) + + def expunge_all(self) -> None: + """Remove all object instances from this ``Session``. + + This is equivalent to calling ``expunge(obj)`` on all objects in this + ``Session``. + + """ + + all_states = self.identity_map.all_states() + list(self._new) + self.identity_map._kill() + self.identity_map = identity.WeakInstanceDict() + self._new = {} + self._deleted = {} + + statelib.InstanceState._detach_states(all_states, self) + + def _add_bind(self, key: _SessionBindKey, bind: _SessionBind) -> None: + try: + insp = inspect(key) + except sa_exc.NoInspectionAvailable as err: + if not isinstance(key, type): + raise sa_exc.ArgumentError( + "Not an acceptable bind target: %s" % key + ) from err + else: + self.__binds[key] = bind + else: + if TYPE_CHECKING: + assert isinstance(insp, Inspectable) + + if isinstance(insp, TableClause): + self.__binds[insp] = bind + elif insp_is_mapper(insp): + self.__binds[insp.class_] = bind + for _selectable in insp._all_tables: + self.__binds[_selectable] = bind + else: + raise sa_exc.ArgumentError( + "Not an acceptable bind target: %s" % key + ) + + def bind_mapper( + self, mapper: _EntityBindKey[_O], bind: _SessionBind + ) -> None: + """Associate a :class:`_orm.Mapper` or arbitrary Python class with a + "bind", e.g. an :class:`_engine.Engine` or + :class:`_engine.Connection`. + + The given entity is added to a lookup used by the + :meth:`.Session.get_bind` method. + + :param mapper: a :class:`_orm.Mapper` object, + or an instance of a mapped + class, or any Python class that is the base of a set of mapped + classes. + + :param bind: an :class:`_engine.Engine` or :class:`_engine.Connection` + object. + + .. seealso:: + + :ref:`session_partitioning` + + :paramref:`.Session.binds` + + :meth:`.Session.bind_table` + + + """ + self._add_bind(mapper, bind) + + def bind_table(self, table: TableClause, bind: _SessionBind) -> None: + """Associate a :class:`_schema.Table` with a "bind", e.g. an + :class:`_engine.Engine` + or :class:`_engine.Connection`. + + The given :class:`_schema.Table` is added to a lookup used by the + :meth:`.Session.get_bind` method. + + :param table: a :class:`_schema.Table` object, + which is typically the target + of an ORM mapping, or is present within a selectable that is + mapped. + + :param bind: an :class:`_engine.Engine` or :class:`_engine.Connection` + object. + + .. seealso:: + + :ref:`session_partitioning` + + :paramref:`.Session.binds` + + :meth:`.Session.bind_mapper` + + + """ + self._add_bind(table, bind) + + def get_bind( + self, + mapper: Optional[_EntityBindKey[_O]] = None, + *, + clause: Optional[ClauseElement] = None, + bind: Optional[_SessionBind] = None, + _sa_skip_events: Optional[bool] = None, + _sa_skip_for_implicit_returning: bool = False, + **kw: Any, + ) -> Union[Engine, Connection]: + """Return a "bind" to which this :class:`.Session` is bound. + + The "bind" is usually an instance of :class:`_engine.Engine`, + except in the case where the :class:`.Session` has been + explicitly bound directly to a :class:`_engine.Connection`. + + For a multiply-bound or unbound :class:`.Session`, the + ``mapper`` or ``clause`` arguments are used to determine the + appropriate bind to return. + + Note that the "mapper" argument is usually present + when :meth:`.Session.get_bind` is called via an ORM + operation such as a :meth:`.Session.query`, each + individual INSERT/UPDATE/DELETE operation within a + :meth:`.Session.flush`, call, etc. + + The order of resolution is: + + 1. if mapper given and :paramref:`.Session.binds` is present, + locate a bind based first on the mapper in use, then + on the mapped class in use, then on any base classes that are + present in the ``__mro__`` of the mapped class, from more specific + superclasses to more general. + 2. if clause given and ``Session.binds`` is present, + locate a bind based on :class:`_schema.Table` objects + found in the given clause present in ``Session.binds``. + 3. if ``Session.binds`` is present, return that. + 4. if clause given, attempt to return a bind + linked to the :class:`_schema.MetaData` ultimately + associated with the clause. + 5. if mapper given, attempt to return a bind + linked to the :class:`_schema.MetaData` ultimately + associated with the :class:`_schema.Table` or other + selectable to which the mapper is mapped. + 6. No bind can be found, :exc:`~sqlalchemy.exc.UnboundExecutionError` + is raised. + + Note that the :meth:`.Session.get_bind` method can be overridden on + a user-defined subclass of :class:`.Session` to provide any kind + of bind resolution scheme. See the example at + :ref:`session_custom_partitioning`. + + :param mapper: + Optional mapped class or corresponding :class:`_orm.Mapper` instance. + The bind can be derived from a :class:`_orm.Mapper` first by + consulting the "binds" map associated with this :class:`.Session`, + and secondly by consulting the :class:`_schema.MetaData` associated + with the :class:`_schema.Table` to which the :class:`_orm.Mapper` is + mapped for a bind. + + :param clause: + A :class:`_expression.ClauseElement` (i.e. + :func:`_expression.select`, + :func:`_expression.text`, + etc.). If the ``mapper`` argument is not present or could not + produce a bind, the given expression construct will be searched + for a bound element, typically a :class:`_schema.Table` + associated with + bound :class:`_schema.MetaData`. + + .. seealso:: + + :ref:`session_partitioning` + + :paramref:`.Session.binds` + + :meth:`.Session.bind_mapper` + + :meth:`.Session.bind_table` + + """ + + # this function is documented as a subclassing hook, so we have + # to call this method even if the return is simple + if bind: + return bind + elif not self.__binds and self.bind: + # simplest and most common case, we have a bind and no + # per-mapper/table binds, we're done + return self.bind + + # we don't have self.bind and either have self.__binds + # or we don't have self.__binds (which is legacy). Look at the + # mapper and the clause + if mapper is None and clause is None: + if self.bind: + return self.bind + else: + raise sa_exc.UnboundExecutionError( + "This session is not bound to a single Engine or " + "Connection, and no context was provided to locate " + "a binding." + ) + + # look more closely at the mapper. + if mapper is not None: + try: + inspected_mapper = inspect(mapper) + except sa_exc.NoInspectionAvailable as err: + if isinstance(mapper, type): + raise exc.UnmappedClassError(mapper) from err + else: + raise + else: + inspected_mapper = None + + # match up the mapper or clause in the __binds + if self.__binds: + # matching mappers and selectables to entries in the + # binds dictionary; supported use case. + if inspected_mapper: + for cls in inspected_mapper.class_.__mro__: + if cls in self.__binds: + return self.__binds[cls] + if clause is None: + clause = inspected_mapper.persist_selectable + + if clause is not None: + plugin_subject = clause._propagate_attrs.get( + "plugin_subject", None + ) + + if plugin_subject is not None: + for cls in plugin_subject.mapper.class_.__mro__: + if cls in self.__binds: + return self.__binds[cls] + + for obj in visitors.iterate(clause): + if obj in self.__binds: + if TYPE_CHECKING: + assert isinstance(obj, Table) + return self.__binds[obj] + + # none of the __binds matched, but we have a fallback bind. + # return that + if self.bind: + return self.bind + + context = [] + if inspected_mapper is not None: + context.append(f"mapper {inspected_mapper}") + if clause is not None: + context.append("SQL expression") + + raise sa_exc.UnboundExecutionError( + f"Could not locate a bind configured on " + f'{", ".join(context)} or this Session.' + ) + + @overload + def query(self, _entity: _EntityType[_O]) -> Query[_O]: ... + + @overload + def query( + self, _colexpr: TypedColumnsClauseRole[_T] + ) -> RowReturningQuery[Tuple[_T]]: ... + + # START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def query( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> RowReturningQuery[Tuple[_T0, _T1]]: ... + + @overload + def query( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def query( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + + # END OVERLOADED FUNCTIONS self.query + + @overload + def query( + self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any + ) -> Query[Any]: ... + + def query( + self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any + ) -> Query[Any]: + """Return a new :class:`_query.Query` object corresponding to this + :class:`_orm.Session`. + + Note that the :class:`_query.Query` object is legacy as of + SQLAlchemy 2.0; the :func:`_sql.select` construct is now used + to construct ORM queries. + + .. seealso:: + + :ref:`unified_tutorial` + + :ref:`queryguide_toplevel` + + :ref:`query_api_toplevel` - legacy API doc + + """ + + return self._query_cls(entities, self, **kwargs) + + def _identity_lookup( + self, + mapper: Mapper[_O], + primary_key_identity: Union[Any, Tuple[Any, ...]], + identity_token: Any = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + lazy_loaded_from: Optional[InstanceState[Any]] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + ) -> Union[Optional[_O], LoaderCallableStatus]: + """Locate an object in the identity map. + + Given a primary key identity, constructs an identity key and then + looks in the session's identity map. If present, the object may + be run through unexpiration rules (e.g. load unloaded attributes, + check if was deleted). + + e.g.:: + + obj = session._identity_lookup(inspect(SomeClass), (1, )) + + :param mapper: mapper in use + :param primary_key_identity: the primary key we are searching for, as + a tuple. + :param identity_token: identity token that should be used to create + the identity key. Used as is, however overriding subclasses can + repurpose this in order to interpret the value in a special way, + such as if None then look among multiple target tokens. + :param passive: passive load flag passed to + :func:`.loading.get_from_identity`, which impacts the behavior if + the object is found; the object may be validated and/or unexpired + if the flag allows for SQL to be emitted. + :param lazy_loaded_from: an :class:`.InstanceState` that is + specifically asking for this identity as a related identity. Used + for sharding schemes where there is a correspondence between an object + and a related object being lazy-loaded (or otherwise + relationship-loaded). + + :return: None if the object is not found in the identity map, *or* + if the object was unexpired and found to have been deleted. + if passive flags disallow SQL and the object is expired, returns + PASSIVE_NO_RESULT. In all other cases the instance is returned. + + .. versionchanged:: 1.4.0 - the :meth:`.Session._identity_lookup` + method was moved from :class:`_query.Query` to + :class:`.Session`, to avoid having to instantiate the + :class:`_query.Query` object. + + + """ + + key = mapper.identity_key_from_primary_key( + primary_key_identity, identity_token=identity_token + ) + + # work around: https://github.com/python/typing/discussions/1143 + return_value = loading.get_from_identity(self, mapper, key, passive) + return return_value + + @util.non_memoized_property + @contextlib.contextmanager + def no_autoflush(self) -> Iterator[Session]: + """Return a context manager that disables autoflush. + + e.g.:: + + with session.no_autoflush: + + some_object = SomeClass() + session.add(some_object) + # won't autoflush + some_object.related_thing = session.query(SomeRelated).first() + + Operations that proceed within the ``with:`` block + will not be subject to flushes occurring upon query + access. This is useful when initializing a series + of objects which involve existing database queries, + where the uncompleted object should not yet be flushed. + + """ + autoflush = self.autoflush + self.autoflush = False + try: + yield self + finally: + self.autoflush = autoflush + + @util.langhelpers.tag_method_for_warnings( + "This warning originated from the Session 'autoflush' process, " + "which was invoked automatically in response to a user-initiated " + "operation.", + sa_exc.SAWarning, + ) + def _autoflush(self) -> None: + if self.autoflush and not self._flushing: + try: + self.flush() + except sa_exc.StatementError as e: + # note we are reraising StatementError as opposed to + # raising FlushError with "chaining" to remain compatible + # with code that catches StatementError, IntegrityError, + # etc. + e.add_detail( + "raised as a result of Query-invoked autoflush; " + "consider using a session.no_autoflush block if this " + "flush is occurring prematurely" + ) + raise e.with_traceback(sys.exc_info()[2]) + + def refresh( + self, + instance: object, + attribute_names: Optional[Iterable[str]] = None, + with_for_update: ForUpdateParameter = None, + ) -> None: + """Expire and refresh attributes on the given instance. + + The selected attributes will first be expired as they would when using + :meth:`_orm.Session.expire`; then a SELECT statement will be issued to + the database to refresh column-oriented attributes with the current + value available in the current transaction. + + :func:`_orm.relationship` oriented attributes will also be immediately + loaded if they were already eagerly loaded on the object, using the + same eager loading strategy that they were loaded with originally. + + .. versionadded:: 1.4 - the :meth:`_orm.Session.refresh` method + can also refresh eagerly loaded attributes. + + :func:`_orm.relationship` oriented attributes that would normally + load using the ``select`` (or "lazy") loader strategy will also + load **if they are named explicitly in the attribute_names + collection**, emitting a SELECT statement for the attribute using the + ``immediate`` loader strategy. If lazy-loaded relationships are not + named in :paramref:`_orm.Session.refresh.attribute_names`, then + they remain as "lazy loaded" attributes and are not implicitly + refreshed. + + .. versionchanged:: 2.0.4 The :meth:`_orm.Session.refresh` method + will now refresh lazy-loaded :func:`_orm.relationship` oriented + attributes for those which are named explicitly in the + :paramref:`_orm.Session.refresh.attribute_names` collection. + + .. tip:: + + While the :meth:`_orm.Session.refresh` method is capable of + refreshing both column and relationship oriented attributes, its + primary focus is on refreshing of local column-oriented attributes + on a single instance. For more open ended "refresh" functionality, + including the ability to refresh the attributes on many objects at + once while having explicit control over relationship loader + strategies, use the + :ref:`populate existing ` feature + instead. + + Note that a highly isolated transaction will return the same values as + were previously read in that same transaction, regardless of changes + in database state outside of that transaction. Refreshing + attributes usually only makes sense at the start of a transaction + where database rows have not yet been accessed. + + :param attribute_names: optional. An iterable collection of + string attribute names indicating a subset of attributes to + be refreshed. + + :param with_for_update: optional boolean ``True`` indicating FOR UPDATE + should be used, or may be a dictionary containing flags to + indicate a more specific set of FOR UPDATE flags for the SELECT; + flags should match the parameters of + :meth:`_query.Query.with_for_update`. + Supersedes the :paramref:`.Session.refresh.lockmode` parameter. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.expire_all` + + :ref:`orm_queryguide_populate_existing` - allows any ORM query + to refresh objects as they would be loaded normally. + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + + self._expire_state(state, attribute_names) + + # this autoflush previously used to occur as a secondary effect + # of the load_on_ident below. Meaning we'd organize the SELECT + # based on current DB pks, then flush, then if pks changed in that + # flush, crash. this was unticketed but discovered as part of + # #8703. So here, autoflush up front, dont autoflush inside + # load_on_ident. + self._autoflush() + + if with_for_update == {}: + raise sa_exc.ArgumentError( + "with_for_update should be the boolean value " + "True, or a dictionary with options. " + "A blank dictionary is ambiguous." + ) + + with_for_update = ForUpdateArg._from_argument(with_for_update) + + stmt: Select[Any] = sql.select(object_mapper(instance)) + if ( + loading.load_on_ident( + self, + stmt, + state.key, + refresh_state=state, + with_for_update=with_for_update, + only_load_props=attribute_names, + require_pk_cols=True, + # technically unnecessary as we just did autoflush + # above, however removes the additional unnecessary + # call to _autoflush() + no_autoflush=True, + is_user_refresh=True, + ) + is None + ): + raise sa_exc.InvalidRequestError( + "Could not refresh instance '%s'" % instance_str(instance) + ) + + def expire_all(self) -> None: + """Expires all persistent instances within this Session. + + When any attributes on a persistent instance is next accessed, + a query will be issued using the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire individual objects and individual attributes + on those objects, use :meth:`Session.expire`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire_all` is not usually needed, + assuming the transaction is isolated. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + """ + for state in self.identity_map.all_states(): + state._expire(state.dict, self.identity_map._modified) + + def expire( + self, instance: object, attribute_names: Optional[Iterable[str]] = None + ) -> None: + """Expire the attributes on an instance. + + Marks the attributes of an instance as out of date. When an expired + attribute is next accessed, a query will be issued to the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire all objects in the :class:`.Session` simultaneously, + use :meth:`Session.expire_all`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire` only makes sense for the specific + case that a non-ORM SQL statement was emitted in the current + transaction. + + :param instance: The instance to be refreshed. + :param attribute_names: optional list of string attribute names + indicating a subset of attributes to be expired. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + :meth:`_orm.Query.populate_existing` + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + self._expire_state(state, attribute_names) + + def _expire_state( + self, + state: InstanceState[Any], + attribute_names: Optional[Iterable[str]], + ) -> None: + self._validate_persistent(state) + if attribute_names: + state._expire_attributes(state.dict, attribute_names) + else: + # pre-fetch the full cascade since the expire is going to + # remove associations + cascaded = list( + state.manager.mapper.cascade_iterator("refresh-expire", state) + ) + self._conditional_expire(state) + for o, m, st_, dct_ in cascaded: + self._conditional_expire(st_) + + def _conditional_expire( + self, state: InstanceState[Any], autoflush: Optional[bool] = None + ) -> None: + """Expire a state if persistent, else expunge if pending""" + + if state.key: + state._expire(state.dict, self.identity_map._modified) + elif state in self._new: + self._new.pop(state) + state._detach(self) + + def expunge(self, instance: object) -> None: + """Remove the `instance` from this ``Session``. + + This will free all internal references to the instance. Cascading + will be applied according to the *expunge* cascade rule. + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + if state.session_id is not self.hash_key: + raise sa_exc.InvalidRequestError( + "Instance %s is not present in this Session" % state_str(state) + ) + + cascaded = list( + state.manager.mapper.cascade_iterator("expunge", state) + ) + self._expunge_states([state] + [st_ for o, m, st_, dct_ in cascaded]) + + def _expunge_states( + self, states: Iterable[InstanceState[Any]], to_transient: bool = False + ) -> None: + for state in states: + if state in self._new: + self._new.pop(state) + elif self.identity_map.contains_state(state): + self.identity_map.safe_discard(state) + self._deleted.pop(state, None) + elif self._transaction: + # state is "detached" from being deleted, but still present + # in the transaction snapshot + self._transaction._deleted.pop(state, None) + statelib.InstanceState._detach_states( + states, self, to_transient=to_transient + ) + + def _register_persistent(self, states: Set[InstanceState[Any]]) -> None: + """Register all persistent objects from a flush. + + This is used both for pending objects moving to the persistent + state as well as already persistent objects. + + """ + + pending_to_persistent = self.dispatch.pending_to_persistent or None + for state in states: + mapper = _state_mapper(state) + + # prevent against last minute dereferences of the object + obj = state.obj() + if obj is not None: + instance_key = mapper._identity_key_from_state(state) + + if ( + _none_set.intersection(instance_key[1]) + and not mapper.allow_partial_pks + or _none_set.issuperset(instance_key[1]) + ): + raise exc.FlushError( + "Instance %s has a NULL identity key. If this is an " + "auto-generated value, check that the database table " + "allows generation of new primary key values, and " + "that the mapped Column object is configured to " + "expect these generated values. Ensure also that " + "this flush() is not occurring at an inappropriate " + "time, such as within a load() event." + % state_str(state) + ) + + if state.key is None: + state.key = instance_key + elif state.key != instance_key: + # primary key switch. use safe_discard() in case another + # state has already replaced this one in the identity + # map (see test/orm/test_naturalpks.py ReversePKsTest) + self.identity_map.safe_discard(state) + trans = self._transaction + assert trans is not None + if state in trans._key_switches: + orig_key = trans._key_switches[state][0] + else: + orig_key = state.key + trans._key_switches[state] = ( + orig_key, + instance_key, + ) + state.key = instance_key + + # there can be an existing state in the identity map + # that is replaced when the primary keys of two instances + # are swapped; see test/orm/test_naturalpks.py -> test_reverse + old = self.identity_map.replace(state) + if ( + old is not None + and mapper._identity_key_from_state(old) == instance_key + and old.obj() is not None + ): + util.warn( + "Identity map already had an identity for %s, " + "replacing it with newly flushed object. Are there " + "load operations occurring inside of an event handler " + "within the flush?" % (instance_key,) + ) + state._orphaned_outside_of_session = False + + statelib.InstanceState._commit_all_states( + ((state, state.dict) for state in states), self.identity_map + ) + + self._register_altered(states) + + if pending_to_persistent is not None: + for state in states.intersection(self._new): + pending_to_persistent(self, state) + + # remove from new last, might be the last strong ref + for state in set(states).intersection(self._new): + self._new.pop(state) + + def _register_altered(self, states: Iterable[InstanceState[Any]]) -> None: + if self._transaction: + for state in states: + if state in self._new: + self._transaction._new[state] = True + else: + self._transaction._dirty[state] = True + + def _remove_newly_deleted( + self, states: Iterable[InstanceState[Any]] + ) -> None: + persistent_to_deleted = self.dispatch.persistent_to_deleted or None + for state in states: + if self._transaction: + self._transaction._deleted[state] = True + + if persistent_to_deleted is not None: + # get a strong reference before we pop out of + # self._deleted + obj = state.obj() # noqa + + self.identity_map.safe_discard(state) + self._deleted.pop(state, None) + state._deleted = True + # can't call state._detach() here, because this state + # is still in the transaction snapshot and needs to be + # tracked as part of that + if persistent_to_deleted is not None: + persistent_to_deleted(self, state) + + def add(self, instance: object, _warn: bool = True) -> None: + """Place an object into this :class:`_orm.Session`. + + Objects that are in the :term:`transient` state when passed to the + :meth:`_orm.Session.add` method will move to the + :term:`pending` state, until the next flush, at which point they + will move to the :term:`persistent` state. + + Objects that are in the :term:`detached` state when passed to the + :meth:`_orm.Session.add` method will move to the :term:`persistent` + state directly. + + If the transaction used by the :class:`_orm.Session` is rolled back, + objects which were transient when they were passed to + :meth:`_orm.Session.add` will be moved back to the + :term:`transient` state, and will no longer be present within this + :class:`_orm.Session`. + + .. seealso:: + + :meth:`_orm.Session.add_all` + + :ref:`session_adding` - at :ref:`session_basics` + + """ + if _warn and self._warn_on_events: + self._flush_warning("Session.add()") + + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + + self._save_or_update_state(state) + + def add_all(self, instances: Iterable[object]) -> None: + """Add the given collection of instances to this :class:`_orm.Session`. + + See the documentation for :meth:`_orm.Session.add` for a general + behavioral description. + + .. seealso:: + + :meth:`_orm.Session.add` + + :ref:`session_adding` - at :ref:`session_basics` + + """ + + if self._warn_on_events: + self._flush_warning("Session.add_all()") + + for instance in instances: + self.add(instance, _warn=False) + + def _save_or_update_state(self, state: InstanceState[Any]) -> None: + state._orphaned_outside_of_session = False + self._save_or_update_impl(state) + + mapper = _state_mapper(state) + for o, m, st_, dct_ in mapper.cascade_iterator( + "save-update", state, halt_on=self._contains_state + ): + self._save_or_update_impl(st_) + + def delete(self, instance: object) -> None: + """Mark an instance as deleted. + + The object is assumed to be either :term:`persistent` or + :term:`detached` when passed; after the method is called, the + object will remain in the :term:`persistent` state until the next + flush proceeds. During this time, the object will also be a member + of the :attr:`_orm.Session.deleted` collection. + + When the next flush proceeds, the object will move to the + :term:`deleted` state, indicating a ``DELETE`` statement was emitted + for its row within the current transaction. When the transaction + is successfully committed, + the deleted object is moved to the :term:`detached` state and is + no longer present within this :class:`_orm.Session`. + + .. seealso:: + + :ref:`session_deleting` - at :ref:`session_basics` + + """ + if self._warn_on_events: + self._flush_warning("Session.delete()") + + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + + self._delete_impl(state, instance, head=True) + + def _delete_impl( + self, state: InstanceState[Any], obj: object, head: bool + ) -> None: + if state.key is None: + if head: + raise sa_exc.InvalidRequestError( + "Instance '%s' is not persisted" % state_str(state) + ) + else: + return + + to_attach = self._before_attach(state, obj) + + if state in self._deleted: + return + + self.identity_map.add(state) + + if to_attach: + self._after_attach(state, obj) + + if head: + # grab the cascades before adding the item to the deleted list + # so that autoflush does not delete the item + # the strong reference to the instance itself is significant here + cascade_states = list( + state.manager.mapper.cascade_iterator("delete", state) + ) + else: + cascade_states = None + + self._deleted[state] = obj + + if head: + if TYPE_CHECKING: + assert cascade_states is not None + for o, m, st_, dct_ in cascade_states: + self._delete_impl(st_, o, False) + + def get( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + ) -> Optional[_O]: + """Return an instance based on the given primary key identifier, + or ``None`` if not found. + + E.g.:: + + my_user = session.get(User, 5) + + some_object = session.get(VersionedFoo, (5, 10)) + + some_object = session.get( + VersionedFoo, + {"id": 5, "version_id": 10} + ) + + .. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved + from the now legacy :meth:`_orm.Query.get` method. + + :meth:`_orm.Session.get` is special in that it provides direct + access to the identity map of the :class:`.Session`. + If the given primary key identifier is present + in the local identity map, the object is returned + directly from this collection and no SQL is emitted, + unless the object has been marked fully expired. + If not present, + a SELECT is performed in order to locate the object. + + :meth:`_orm.Session.get` also will perform a check if + the object is present in the identity map and + marked as expired - a SELECT + is emitted to refresh the object as well as to + ensure that the row is still present. + If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + :param entity: a mapped class or :class:`.Mapper` indicating the + type of entity to be loaded. + + :param ident: A scalar, tuple, or dictionary representing the + primary key. For a composite (e.g. multiple column) primary key, + a tuple or dictionary should be passed. + + For a single-column primary key, the scalar calling form is typically + the most expedient. If the primary key of a row is the value "5", + the call looks like:: + + my_object = session.get(SomeClass, 5) + + The tuple form contains primary key values typically in + the order in which they correspond to the mapped + :class:`_schema.Table` + object's primary key columns, or if the + :paramref:`_orm.Mapper.primary_key` configuration parameter were + used, in + the order used for that parameter. For example, if the primary key + of a row is represented by the integer + digits "5, 10" the call would look like:: + + my_object = session.get(SomeClass, (5, 10)) + + The dictionary form should include as keys the mapped attribute names + corresponding to each element of the primary key. If the mapped class + has the attributes ``id``, ``version_id`` as the attributes which + store the object's primary key value, the call would look like:: + + my_object = session.get(SomeClass, {"id": 5, "version_id": 10}) + + :param options: optional sequence of loader options which will be + applied to the query, if one is emitted. + + :param populate_existing: causes the method to unconditionally emit + a SQL query and refresh the object with the newly loaded data, + regardless of whether or not the object is already present. + + :param with_for_update: optional boolean ``True`` indicating FOR UPDATE + should be used, or may be a dictionary containing flags to + indicate a more specific set of FOR UPDATE flags for the SELECT; + flags should match the parameters of + :meth:`_query.Query.with_for_update`. + Supersedes the :paramref:`.Session.refresh.lockmode` parameter. + + :param execution_options: optional dictionary of execution options, + which will be associated with the query execution if one is emitted. + This dictionary can provide a subset of the options that are + accepted by :meth:`_engine.Connection.execution_options`, and may + also provide additional options understood only in an ORM context. + + .. versionadded:: 1.4.29 + + .. seealso:: + + :ref:`orm_queryguide_execution_options` - ORM-specific execution + options + + :param bind_arguments: dictionary of additional arguments to determine + the bind. May include "mapper", "bind", or other custom arguments. + Contents of this dictionary are passed to the + :meth:`.Session.get_bind` method. + + .. versionadded: 2.0.0rc1 + + :return: The object instance, or ``None``. + + """ + return self._get_impl( + entity, + ident, + loading.load_on_pk_identity, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + + def get_one( + self, + entity: _EntityBindKey[_O], + ident: _PKIdentityArgument, + *, + options: Optional[Sequence[ORMOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + ) -> _O: + """Return exactly one instance based on the given primary key + identifier, or raise an exception if not found. + + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query + selects no rows. + + For a detailed documentation of the arguments see the + method :meth:`.Session.get`. + + .. versionadded:: 2.0.22 + + :return: The object instance. + + .. seealso:: + + :meth:`.Session.get` - equivalent method that instead + returns ``None`` if no row was found with the provided primary + key + + """ + + instance = self.get( + entity, + ident, + options=options, + populate_existing=populate_existing, + with_for_update=with_for_update, + identity_token=identity_token, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + + if instance is None: + raise sa_exc.NoResultFound( + "No row was found when one was required" + ) + + return instance + + def _get_impl( + self, + entity: _EntityBindKey[_O], + primary_key_identity: _PKIdentityArgument, + db_load_fn: Callable[..., _O], + *, + options: Optional[Sequence[ExecutableOption]] = None, + populate_existing: bool = False, + with_for_update: ForUpdateParameter = None, + identity_token: Optional[Any] = None, + execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, + bind_arguments: Optional[_BindArguments] = None, + ) -> Optional[_O]: + # convert composite types to individual args + if ( + is_composite_class(primary_key_identity) + and type(primary_key_identity) + in descriptor_props._composite_getters + ): + getter = descriptor_props._composite_getters[ + type(primary_key_identity) + ] + primary_key_identity = getter(primary_key_identity) + + mapper: Optional[Mapper[_O]] = inspect(entity) + + if mapper is None or not mapper.is_mapper: + raise sa_exc.ArgumentError( + "Expected mapped class or mapper, got: %r" % entity + ) + + is_dict = isinstance(primary_key_identity, dict) + if not is_dict: + primary_key_identity = util.to_list( + primary_key_identity, default=[None] + ) + + if len(primary_key_identity) != len(mapper.primary_key): + raise sa_exc.InvalidRequestError( + "Incorrect number of values in identifier to formulate " + "primary key for session.get(); primary key columns " + "are %s" % ",".join("'%s'" % c for c in mapper.primary_key) + ) + + if is_dict: + pk_synonyms = mapper._pk_synonyms + + if pk_synonyms: + correct_keys = set(pk_synonyms).intersection( + primary_key_identity + ) + + if correct_keys: + primary_key_identity = dict(primary_key_identity) + for k in correct_keys: + primary_key_identity[pk_synonyms[k]] = ( + primary_key_identity[k] + ) + + try: + primary_key_identity = list( + primary_key_identity[prop.key] + for prop in mapper._identity_key_props + ) + + except KeyError as err: + raise sa_exc.InvalidRequestError( + "Incorrect names of values in identifier to formulate " + "primary key for session.get(); primary key attribute " + "names are %s (synonym names are also accepted)" + % ",".join( + "'%s'" % prop.key + for prop in mapper._identity_key_props + ) + ) from err + + if ( + not populate_existing + and not mapper.always_refresh + and with_for_update is None + ): + instance = self._identity_lookup( + mapper, + primary_key_identity, + identity_token=identity_token, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + + if instance is not None: + # reject calls for id in identity map but class + # mismatch. + if not isinstance(instance, mapper.class_): + return None + return instance + + # TODO: this was being tested before, but this is not possible + assert instance is not LoaderCallableStatus.PASSIVE_CLASS_MISMATCH + + # set_label_style() not strictly necessary, however this will ensure + # that tablename_colname style is used which at the moment is + # asserted in a lot of unit tests :) + + load_options = context.QueryContext.default_load_options + + if populate_existing: + load_options += {"_populate_existing": populate_existing} + statement = sql.select(mapper).set_label_style( + LABEL_STYLE_TABLENAME_PLUS_COL + ) + if with_for_update is not None: + statement._for_update_arg = ForUpdateArg._from_argument( + with_for_update + ) + + if options: + statement = statement.options(*options) + return db_load_fn( + self, + statement, + primary_key_identity, + load_options=load_options, + identity_token=identity_token, + execution_options=execution_options, + bind_arguments=bind_arguments, + ) + + def merge( + self, + instance: _O, + *, + load: bool = True, + options: Optional[Sequence[ORMOption]] = None, + ) -> _O: + """Copy the state of a given instance into a corresponding instance + within this :class:`.Session`. + + :meth:`.Session.merge` examines the primary key attributes of the + source instance, and attempts to reconcile it with an instance of the + same primary key in the session. If not found locally, it attempts + to load the object from the database based on primary key, and if + none can be located, creates a new instance. The state of each + attribute on the source instance is then copied to the target + instance. The resulting target instance is then returned by the + method; the original source instance is left unmodified, and + un-associated with the :class:`.Session` if not already. + + This operation cascades to associated instances if the association is + mapped with ``cascade="merge"``. + + See :ref:`unitofwork_merging` for a detailed discussion of merging. + + :param instance: Instance to be merged. + :param load: Boolean, when False, :meth:`.merge` switches into + a "high performance" mode which causes it to forego emitting history + events as well as all database access. This flag is used for + cases such as transferring graphs of objects into a :class:`.Session` + from a second level cache, or to transfer just-loaded objects + into the :class:`.Session` owned by a worker thread or process + without re-querying the database. + + The ``load=False`` use case adds the caveat that the given + object has to be in a "clean" state, that is, has no pending changes + to be flushed - even if the incoming object is detached from any + :class:`.Session`. This is so that when + the merge operation populates local attributes and + cascades to related objects and + collections, the values can be "stamped" onto the + target object as is, without generating any history or attribute + events, and without the need to reconcile the incoming data with + any existing related objects or collections that might not + be loaded. The resulting objects from ``load=False`` are always + produced as "clean", so it is only appropriate that the given objects + should be "clean" as well, else this suggests a mis-use of the + method. + :param options: optional sequence of loader options which will be + applied to the :meth:`_orm.Session.get` method when the merge + operation loads the existing version of the object from the database. + + .. versionadded:: 1.4.24 + + + .. seealso:: + + :func:`.make_transient_to_detached` - provides for an alternative + means of "merging" a single object into the :class:`.Session` + + """ + + if self._warn_on_events: + self._flush_warning("Session.merge()") + + _recursive: Dict[InstanceState[Any], object] = {} + _resolve_conflict_map: Dict[_IdentityKeyType[Any], object] = {} + + if load: + # flush current contents if we expect to load data + self._autoflush() + + object_mapper(instance) # verify mapped + autoflush = self.autoflush + try: + self.autoflush = False + return self._merge( + attributes.instance_state(instance), + attributes.instance_dict(instance), + load=load, + options=options, + _recursive=_recursive, + _resolve_conflict_map=_resolve_conflict_map, + ) + finally: + self.autoflush = autoflush + + def _merge( + self, + state: InstanceState[_O], + state_dict: _InstanceDict, + *, + options: Optional[Sequence[ORMOption]] = None, + load: bool, + _recursive: Dict[Any, object], + _resolve_conflict_map: Dict[_IdentityKeyType[Any], object], + ) -> _O: + mapper: Mapper[_O] = _state_mapper(state) + if state in _recursive: + return cast(_O, _recursive[state]) + + new_instance = False + key = state.key + + merged: Optional[_O] + + if key is None: + if state in self._new: + util.warn( + "Instance %s is already pending in this Session yet is " + "being merged again; this is probably not what you want " + "to do" % state_str(state) + ) + + if not load: + raise sa_exc.InvalidRequestError( + "merge() with load=False option does not support " + "objects transient (i.e. unpersisted) objects. flush() " + "all changes on mapped instances before merging with " + "load=False." + ) + key = mapper._identity_key_from_state(state) + key_is_persistent = LoaderCallableStatus.NEVER_SET not in key[ + 1 + ] and ( + not _none_set.intersection(key[1]) + or ( + mapper.allow_partial_pks + and not _none_set.issuperset(key[1]) + ) + ) + else: + key_is_persistent = True + + if key in self.identity_map: + try: + merged = self.identity_map[key] + except KeyError: + # object was GC'ed right as we checked for it + merged = None + else: + merged = None + + if merged is None: + if key_is_persistent and key in _resolve_conflict_map: + merged = cast(_O, _resolve_conflict_map[key]) + + elif not load: + if state.modified: + raise sa_exc.InvalidRequestError( + "merge() with load=False option does not support " + "objects marked as 'dirty'. flush() all changes on " + "mapped instances before merging with load=False." + ) + merged = mapper.class_manager.new_instance() + merged_state = attributes.instance_state(merged) + merged_state.key = key + self._update_impl(merged_state) + new_instance = True + + elif key_is_persistent: + merged = self.get( + mapper.class_, + key[1], + identity_token=key[2], + options=options, + ) + + if merged is None: + merged = mapper.class_manager.new_instance() + merged_state = attributes.instance_state(merged) + merged_dict = attributes.instance_dict(merged) + new_instance = True + self._save_or_update_state(merged_state) + else: + merged_state = attributes.instance_state(merged) + merged_dict = attributes.instance_dict(merged) + + _recursive[state] = merged + _resolve_conflict_map[key] = merged + + # check that we didn't just pull the exact same + # state out. + if state is not merged_state: + # version check if applicable + if mapper.version_id_col is not None: + existing_version = mapper._get_state_attr_by_column( + state, + state_dict, + mapper.version_id_col, + passive=PassiveFlag.PASSIVE_NO_INITIALIZE, + ) + + merged_version = mapper._get_state_attr_by_column( + merged_state, + merged_dict, + mapper.version_id_col, + passive=PassiveFlag.PASSIVE_NO_INITIALIZE, + ) + + if ( + existing_version + is not LoaderCallableStatus.PASSIVE_NO_RESULT + and merged_version + is not LoaderCallableStatus.PASSIVE_NO_RESULT + and existing_version != merged_version + ): + raise exc.StaleDataError( + "Version id '%s' on merged state %s " + "does not match existing version '%s'. " + "Leave the version attribute unset when " + "merging to update the most recent version." + % ( + existing_version, + state_str(merged_state), + merged_version, + ) + ) + + merged_state.load_path = state.load_path + merged_state.load_options = state.load_options + + # since we are copying load_options, we need to copy + # the callables_ that would have been generated by those + # load_options. + # assumes that the callables we put in state.callables_ + # are not instance-specific (which they should not be) + merged_state._copy_callables(state) + + for prop in mapper.iterate_properties: + prop.merge( + self, + state, + state_dict, + merged_state, + merged_dict, + load, + _recursive, + _resolve_conflict_map, + ) + + if not load: + # remove any history + merged_state._commit_all(merged_dict, self.identity_map) + merged_state.manager.dispatch._sa_event_merge_wo_load( + merged_state, None + ) + + if new_instance: + merged_state.manager.dispatch.load(merged_state, None) + + return merged + + def _validate_persistent(self, state: InstanceState[Any]) -> None: + if not self.identity_map.contains_state(state): + raise sa_exc.InvalidRequestError( + "Instance '%s' is not persistent within this Session" + % state_str(state) + ) + + def _save_impl(self, state: InstanceState[Any]) -> None: + if state.key is not None: + raise sa_exc.InvalidRequestError( + "Object '%s' already has an identity - " + "it can't be registered as pending" % state_str(state) + ) + + obj = state.obj() + to_attach = self._before_attach(state, obj) + if state not in self._new: + self._new[state] = obj + state.insert_order = len(self._new) + if to_attach: + self._after_attach(state, obj) + + def _update_impl( + self, state: InstanceState[Any], revert_deletion: bool = False + ) -> None: + if state.key is None: + raise sa_exc.InvalidRequestError( + "Instance '%s' is not persisted" % state_str(state) + ) + + if state._deleted: + if revert_deletion: + if not state._attached: + return + del state._deleted + else: + raise sa_exc.InvalidRequestError( + "Instance '%s' has been deleted. " + "Use the make_transient() " + "function to send this object back " + "to the transient state." % state_str(state) + ) + + obj = state.obj() + + # check for late gc + if obj is None: + return + + to_attach = self._before_attach(state, obj) + + self._deleted.pop(state, None) + if revert_deletion: + self.identity_map.replace(state) + else: + self.identity_map.add(state) + + if to_attach: + self._after_attach(state, obj) + elif revert_deletion: + self.dispatch.deleted_to_persistent(self, state) + + def _save_or_update_impl(self, state: InstanceState[Any]) -> None: + if state.key is None: + self._save_impl(state) + else: + self._update_impl(state) + + def enable_relationship_loading(self, obj: object) -> None: + """Associate an object with this :class:`.Session` for related + object loading. + + .. warning:: + + :meth:`.enable_relationship_loading` exists to serve special + use cases and is not recommended for general use. + + Accesses of attributes mapped with :func:`_orm.relationship` + will attempt to load a value from the database using this + :class:`.Session` as the source of connectivity. The values + will be loaded based on foreign key and primary key values + present on this object - if not present, then those relationships + will be unavailable. + + The object will be attached to this session, but will + **not** participate in any persistence operations; its state + for almost all purposes will remain either "transient" or + "detached", except for the case of relationship loading. + + Also note that backrefs will often not work as expected. + Altering a relationship-bound attribute on the target object + may not fire off a backref event, if the effective value + is what was already loaded from a foreign-key-holding value. + + The :meth:`.Session.enable_relationship_loading` method is + similar to the ``load_on_pending`` flag on :func:`_orm.relationship`. + Unlike that flag, :meth:`.Session.enable_relationship_loading` allows + an object to remain transient while still being able to load + related items. + + To make a transient object associated with a :class:`.Session` + via :meth:`.Session.enable_relationship_loading` pending, add + it to the :class:`.Session` using :meth:`.Session.add` normally. + If the object instead represents an existing identity in the database, + it should be merged using :meth:`.Session.merge`. + + :meth:`.Session.enable_relationship_loading` does not improve + behavior when the ORM is used normally - object references should be + constructed at the object level, not at the foreign key level, so + that they are present in an ordinary way before flush() + proceeds. This method is not intended for general use. + + .. seealso:: + + :paramref:`_orm.relationship.load_on_pending` - this flag + allows per-relationship loading of many-to-ones on items that + are pending. + + :func:`.make_transient_to_detached` - allows for an object to + be added to a :class:`.Session` without SQL emitted, which then + will unexpire attributes on access. + + """ + try: + state = attributes.instance_state(obj) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(obj) from err + + to_attach = self._before_attach(state, obj) + state._load_pending = True + if to_attach: + self._after_attach(state, obj) + + def _before_attach(self, state: InstanceState[Any], obj: object) -> bool: + self._autobegin_t() + + if state.session_id == self.hash_key: + return False + + if state.session_id and state.session_id in _sessions: + raise sa_exc.InvalidRequestError( + "Object '%s' is already attached to session '%s' " + "(this is '%s')" + % (state_str(state), state.session_id, self.hash_key) + ) + + self.dispatch.before_attach(self, state) + + return True + + def _after_attach(self, state: InstanceState[Any], obj: object) -> None: + state.session_id = self.hash_key + if state.modified and state._strong_obj is None: + state._strong_obj = obj + self.dispatch.after_attach(self, state) + + if state.key: + self.dispatch.detached_to_persistent(self, state) + else: + self.dispatch.transient_to_pending(self, state) + + def __contains__(self, instance: object) -> bool: + """Return True if the instance is associated with this session. + + The instance may be pending or persistent within the Session for a + result of True. + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + return self._contains_state(state) + + def __iter__(self) -> Iterator[object]: + """Iterate over all pending or persistent instances within this + Session. + + """ + return iter( + list(self._new.values()) + list(self.identity_map.values()) + ) + + def _contains_state(self, state: InstanceState[Any]) -> bool: + return state in self._new or self.identity_map.contains_state(state) + + def flush(self, objects: Optional[Sequence[Any]] = None) -> None: + """Flush all the object changes to the database. + + Writes out all pending object creations, deletions and modifications + to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are + automatically ordered by the Session's unit of work dependency + solver. + + Database operations will be issued in the current transactional + context and do not affect the state of the transaction, unless an + error occurs, in which case the entire transaction is rolled back. + You may flush() as often as you like within a transaction to move + changes from Python to the database's transaction buffer. + + :param objects: Optional; restricts the flush operation to operate + only on elements that are in the given collection. + + This feature is for an extremely narrow set of use cases where + particular objects may need to be operated upon before the + full flush() occurs. It is not intended for general use. + + """ + + if self._flushing: + raise sa_exc.InvalidRequestError("Session is already flushing") + + if self._is_clean(): + return + try: + self._flushing = True + self._flush(objects) + finally: + self._flushing = False + + def _flush_warning(self, method: Any) -> None: + util.warn( + "Usage of the '%s' operation is not currently supported " + "within the execution stage of the flush process. " + "Results may not be consistent. Consider using alternative " + "event listeners or connection-level operations instead." % method + ) + + def _is_clean(self) -> bool: + return ( + not self.identity_map.check_modified() + and not self._deleted + and not self._new + ) + + def _flush(self, objects: Optional[Sequence[object]] = None) -> None: + dirty = self._dirty_states + if not dirty and not self._deleted and not self._new: + self.identity_map._modified.clear() + return + + flush_context = UOWTransaction(self) + + if self.dispatch.before_flush: + self.dispatch.before_flush(self, flush_context, objects) + # re-establish "dirty states" in case the listeners + # added + dirty = self._dirty_states + + deleted = set(self._deleted) + new = set(self._new) + + dirty = set(dirty).difference(deleted) + + # create the set of all objects we want to operate upon + if objects: + # specific list passed in + objset = set() + for o in objects: + try: + state = attributes.instance_state(o) + + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(o) from err + objset.add(state) + else: + objset = None + + # store objects whose fate has been decided + processed = set() + + # put all saves/updates into the flush context. detect top-level + # orphans and throw them into deleted. + if objset: + proc = new.union(dirty).intersection(objset).difference(deleted) + else: + proc = new.union(dirty).difference(deleted) + + for state in proc: + is_orphan = _state_mapper(state)._is_orphan(state) + + is_persistent_orphan = is_orphan and state.has_identity + + if ( + is_orphan + and not is_persistent_orphan + and state._orphaned_outside_of_session + ): + self._expunge_states([state]) + else: + _reg = flush_context.register_object( + state, isdelete=is_persistent_orphan + ) + assert _reg, "Failed to add object to the flush context!" + processed.add(state) + + # put all remaining deletes into the flush context. + if objset: + proc = deleted.intersection(objset).difference(processed) + else: + proc = deleted.difference(processed) + for state in proc: + _reg = flush_context.register_object(state, isdelete=True) + assert _reg, "Failed to add object to the flush context!" + + if not flush_context.has_work: + return + + flush_context.transaction = transaction = self._autobegin_t()._begin() + try: + self._warn_on_events = True + try: + flush_context.execute() + finally: + self._warn_on_events = False + + self.dispatch.after_flush(self, flush_context) + + flush_context.finalize_flush_changes() + + if not objects and self.identity_map._modified: + len_ = len(self.identity_map._modified) + + statelib.InstanceState._commit_all_states( + [ + (state, state.dict) + for state in self.identity_map._modified + ], + instance_dict=self.identity_map, + ) + util.warn( + "Attribute history events accumulated on %d " + "previously clean instances " + "within inner-flush event handlers have been " + "reset, and will not result in database updates. " + "Consider using set_committed_value() within " + "inner-flush event handlers to avoid this warning." % len_ + ) + + # useful assertions: + # if not objects: + # assert not self.identity_map._modified + # else: + # assert self.identity_map._modified == \ + # self.identity_map._modified.difference(objects) + + self.dispatch.after_flush_postexec(self, flush_context) + + transaction.commit() + + except: + with util.safe_reraise(): + transaction.rollback(_capture_exception=True) + + def bulk_save_objects( + self, + objects: Iterable[object], + return_defaults: bool = False, + update_changed_only: bool = True, + preserve_order: bool = True, + ) -> None: + """Perform a bulk save of the given list of objects. + + .. legacy:: + + This method is a legacy feature as of the 2.0 series of + SQLAlchemy. For modern bulk INSERT and UPDATE, see + the sections :ref:`orm_queryguide_bulk_insert` and + :ref:`orm_queryguide_bulk_update`. + + For general INSERT and UPDATE of existing ORM mapped objects, + prefer standard :term:`unit of work` data management patterns, + introduced in the :ref:`unified_tutorial` at + :ref:`tutorial_orm_data_manipulation`. SQLAlchemy 2.0 + now uses :ref:`engine_insertmanyvalues` with modern dialects + which solves previous issues of bulk INSERT slowness. + + :param objects: a sequence of mapped object instances. The mapped + objects are persisted as is, and are **not** associated with the + :class:`.Session` afterwards. + + For each object, whether the object is sent as an INSERT or an + UPDATE is dependent on the same rules used by the :class:`.Session` + in traditional operation; if the object has the + :attr:`.InstanceState.key` + attribute set, then the object is assumed to be "detached" and + will result in an UPDATE. Otherwise, an INSERT is used. + + In the case of an UPDATE, statements are grouped based on which + attributes have changed, and are thus to be the subject of each + SET clause. If ``update_changed_only`` is False, then all + attributes present within each object are applied to the UPDATE + statement, which may help in allowing the statements to be grouped + together into a larger executemany(), and will also reduce the + overhead of checking history on attributes. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary key values ahead of time; however, + :paramref:`.Session.bulk_save_objects.return_defaults` **greatly + reduces the performance gains** of the method overall. It is strongly + advised to please use the standard :meth:`_orm.Session.add_all` + approach. + + :param update_changed_only: when True, UPDATE statements are rendered + based on those attributes in each state that have logged changes. + When False, all attributes present are rendered into the SET clause + with the exception of primary key attributes. + + :param preserve_order: when True, the order of inserts and updates + matches exactly the order in which the objects are given. When + False, common types of objects are grouped into inserts + and updates, to allow for more batching opportunities. + + .. seealso:: + + :doc:`queryguide/dml` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_update_mappings` + + """ + + obj_states: Iterable[InstanceState[Any]] + + obj_states = (attributes.instance_state(obj) for obj in objects) + + if not preserve_order: + # the purpose of this sort is just so that common mappers + # and persistence states are grouped together, so that groupby + # will return a single group for a particular type of mapper. + # it's not trying to be deterministic beyond that. + obj_states = sorted( + obj_states, + key=lambda state: (id(state.mapper), state.key is not None), + ) + + def grouping_key( + state: InstanceState[_O], + ) -> Tuple[Mapper[_O], bool]: + return (state.mapper, state.key is not None) + + for (mapper, isupdate), states in itertools.groupby( + obj_states, grouping_key + ): + self._bulk_save_mappings( + mapper, + states, + isupdate=isupdate, + isstates=True, + return_defaults=return_defaults, + update_changed_only=update_changed_only, + render_nulls=False, + ) + + def bulk_insert_mappings( + self, + mapper: Mapper[Any], + mappings: Iterable[Dict[str, Any]], + return_defaults: bool = False, + render_nulls: bool = False, + ) -> None: + """Perform a bulk insert of the given list of mapping dictionaries. + + .. legacy:: + + This method is a legacy feature as of the 2.0 series of + SQLAlchemy. For modern bulk INSERT and UPDATE, see + the sections :ref:`orm_queryguide_bulk_insert` and + :ref:`orm_queryguide_bulk_update`. The 2.0 API shares + implementation details with this method and adds new features + as well. + + :param mapper: a mapped class, or the actual :class:`_orm.Mapper` + object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a sequence of dictionaries, each one containing the + state of the mapped row to be inserted, in terms of the attribute + names on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary must contain all + keys to be populated into all tables. + + :param return_defaults: when True, the INSERT process will be altered + to ensure that newly generated primary key values will be fetched. + The rationale for this parameter is typically to enable + :ref:`Joined Table Inheritance ` mappings to + be bulk inserted. + + .. note:: for backends that don't support RETURNING, the + :paramref:`_orm.Session.bulk_insert_mappings.return_defaults` + parameter can significantly decrease performance as INSERT + statements can no longer be batched. See + :ref:`engine_insertmanyvalues` + for background on which backends are affected. + + :param render_nulls: When True, a value of ``None`` will result + in a NULL value being included in the INSERT statement, rather + than the column being omitted from the INSERT. This allows all + the rows being INSERTed to have the identical set of columns which + allows the full set of rows to be batched to the DBAPI. Normally, + each column-set that contains a different combination of NULL values + than the previous row must omit a different series of columns from + the rendered INSERT statement, which means it must be emitted as a + separate statement. By passing this flag, the full set of rows + are guaranteed to be batchable into one batch; the cost however is + that server-side defaults which are invoked by an omitted column will + be skipped, so care must be taken to ensure that these are not + necessary. + + .. warning:: + + When this flag is set, **server side default SQL values will + not be invoked** for those columns that are inserted as NULL; + the NULL value will be sent explicitly. Care must be taken + to ensure that no server-side default functions need to be + invoked for the operation as a whole. + + .. seealso:: + + :doc:`queryguide/dml` + + :meth:`.Session.bulk_save_objects` + + :meth:`.Session.bulk_update_mappings` + + """ + self._bulk_save_mappings( + mapper, + mappings, + isupdate=False, + isstates=False, + return_defaults=return_defaults, + update_changed_only=False, + render_nulls=render_nulls, + ) + + def bulk_update_mappings( + self, mapper: Mapper[Any], mappings: Iterable[Dict[str, Any]] + ) -> None: + """Perform a bulk update of the given list of mapping dictionaries. + + .. legacy:: + + This method is a legacy feature as of the 2.0 series of + SQLAlchemy. For modern bulk INSERT and UPDATE, see + the sections :ref:`orm_queryguide_bulk_insert` and + :ref:`orm_queryguide_bulk_update`. The 2.0 API shares + implementation details with this method and adds new features + as well. + + :param mapper: a mapped class, or the actual :class:`_orm.Mapper` + object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a sequence of dictionaries, each one containing the + state of the mapped row to be updated, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, such + as a joined-inheritance mapping, each dictionary may contain keys + corresponding to all tables. All those keys which are present and + are not part of the primary key are applied to the SET clause of the + UPDATE statement; the primary key values, which are required, are + applied to the WHERE clause. + + + .. seealso:: + + :doc:`queryguide/dml` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_save_objects` + + """ + self._bulk_save_mappings( + mapper, + mappings, + isupdate=True, + isstates=False, + return_defaults=False, + update_changed_only=False, + render_nulls=False, + ) + + def _bulk_save_mappings( + self, + mapper: Mapper[_O], + mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + *, + isupdate: bool, + isstates: bool, + return_defaults: bool, + update_changed_only: bool, + render_nulls: bool, + ) -> None: + mapper = _class_to_mapper(mapper) + self._flushing = True + + transaction = self._autobegin_t()._begin() + try: + if isupdate: + bulk_persistence._bulk_update( + mapper, + mappings, + transaction, + isstates=isstates, + update_changed_only=update_changed_only, + ) + else: + bulk_persistence._bulk_insert( + mapper, + mappings, + transaction, + isstates=isstates, + return_defaults=return_defaults, + render_nulls=render_nulls, + ) + transaction.commit() + + except: + with util.safe_reraise(): + transaction.rollback(_capture_exception=True) + finally: + self._flushing = False + + def is_modified( + self, instance: object, include_collections: bool = True + ) -> bool: + r"""Return ``True`` if the given instance has locally + modified attributes. + + This method retrieves the history for each instrumented + attribute on the instance and performs a comparison of the current + value to its previously flushed or committed value, if any. + + It is in effect a more expensive and accurate + version of checking for the given instance in the + :attr:`.Session.dirty` collection; a full test for + each attribute's net "dirty" status is performed. + + E.g.:: + + return session.is_modified(someobject) + + A few caveats to this method apply: + + * Instances present in the :attr:`.Session.dirty` collection may + report ``False`` when tested with this method. This is because + the object may have received change events via attribute mutation, + thus placing it in :attr:`.Session.dirty`, but ultimately the state + is the same as that loaded from the database, resulting in no net + change here. + * Scalar attributes may not have recorded the previously set + value when a new value was applied, if the attribute was not loaded, + or was expired, at the time the new value was received - in these + cases, the attribute is assumed to have a change, even if there is + ultimately no net change against its database value. SQLAlchemy in + most cases does not need the "old" value when a set event occurs, so + it skips the expense of a SQL call if the old value isn't present, + based on the assumption that an UPDATE of the scalar value is + usually needed, and in those few cases where it isn't, is less + expensive on average than issuing a defensive SELECT. + + The "old" value is fetched unconditionally upon set only if the + attribute container has the ``active_history`` flag set to ``True``. + This flag is set typically for primary key attributes and scalar + object references that are not a simple many-to-one. To set this + flag for any arbitrary mapped column, use the ``active_history`` + argument with :func:`.column_property`. + + :param instance: mapped instance to be tested for pending changes. + :param include_collections: Indicates if multivalued collections + should be included in the operation. Setting this to ``False`` is a + way to detect only local-column based properties (i.e. scalar columns + or many-to-one foreign keys) that would result in an UPDATE for this + instance upon flush. + + """ + state = object_state(instance) + + if not state.modified: + return False + + dict_ = state.dict + + for attr in state.manager.attributes: + if ( + not include_collections + and hasattr(attr.impl, "get_collection") + ) or not hasattr(attr.impl, "get_history"): + continue + + (added, unchanged, deleted) = attr.impl.get_history( + state, dict_, passive=PassiveFlag.NO_CHANGE + ) + + if added or deleted: + return True + else: + return False + + @property + def is_active(self) -> bool: + """True if this :class:`.Session` not in "partial rollback" state. + + .. versionchanged:: 1.4 The :class:`_orm.Session` no longer begins + a new transaction immediately, so this attribute will be False + when the :class:`_orm.Session` is first instantiated. + + "partial rollback" state typically indicates that the flush process + of the :class:`_orm.Session` has failed, and that the + :meth:`_orm.Session.rollback` method must be emitted in order to + fully roll back the transaction. + + If this :class:`_orm.Session` is not in a transaction at all, the + :class:`_orm.Session` will autobegin when it is first used, so in this + case :attr:`_orm.Session.is_active` will return True. + + Otherwise, if this :class:`_orm.Session` is within a transaction, + and that transaction has not been rolled back internally, the + :attr:`_orm.Session.is_active` will also return True. + + .. seealso:: + + :ref:`faq_session_rollback` + + :meth:`_orm.Session.in_transaction` + + """ + return self._transaction is None or self._transaction.is_active + + @property + def _dirty_states(self) -> Iterable[InstanceState[Any]]: + """The set of all persistent states considered dirty. + + This method returns all states that were modified including + those that were possibly deleted. + + """ + return self.identity_map._dirty_states() + + @property + def dirty(self) -> IdentitySet: + """The set of all persistent instances considered dirty. + + E.g.:: + + some_mapped_object in session.dirty + + Instances are considered dirty when they were modified but not + deleted. + + Note that this 'dirty' calculation is 'optimistic'; most + attribute-setting or collection modification operations will + mark an instance as 'dirty' and place it in this set, even if + there is no net change to the attribute's value. At flush + time, the value of each attribute is compared to its + previously saved value, and if there's no net change, no SQL + operation will occur (this is a more expensive operation so + it's only done at flush time). + + To check if an instance has actionable net changes to its + attributes, use the :meth:`.Session.is_modified` method. + + """ + return IdentitySet( + [ + state.obj() + for state in self._dirty_states + if state not in self._deleted + ] + ) + + @property + def deleted(self) -> IdentitySet: + "The set of all instances marked as 'deleted' within this ``Session``" + + return util.IdentitySet(list(self._deleted.values())) + + @property + def new(self) -> IdentitySet: + "The set of all instances marked as 'new' within this ``Session``." + + return util.IdentitySet(list(self._new.values())) + + +_S = TypeVar("_S", bound="Session") + + +class sessionmaker(_SessionClassMethods, Generic[_S]): + """A configurable :class:`.Session` factory. + + The :class:`.sessionmaker` factory generates new + :class:`.Session` objects when called, creating them given + the configurational arguments established here. + + e.g.:: + + from sqlalchemy import create_engine + from sqlalchemy.orm import sessionmaker + + # an Engine, which the Session will use for connection + # resources + engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/') + + Session = sessionmaker(engine) + + with Session() as session: + session.add(some_object) + session.add(some_other_object) + session.commit() + + Context manager use is optional; otherwise, the returned + :class:`_orm.Session` object may be closed explicitly via the + :meth:`_orm.Session.close` method. Using a + ``try:/finally:`` block is optional, however will ensure that the close + takes place even if there are database errors:: + + session = Session() + try: + session.add(some_object) + session.add(some_other_object) + session.commit() + finally: + session.close() + + :class:`.sessionmaker` acts as a factory for :class:`_orm.Session` + objects in the same way as an :class:`_engine.Engine` acts as a factory + for :class:`_engine.Connection` objects. In this way it also includes + a :meth:`_orm.sessionmaker.begin` method, that provides a context + manager which both begins and commits a transaction, as well as closes + out the :class:`_orm.Session` when complete, rolling back the transaction + if any errors occur:: + + Session = sessionmaker(engine) + + with Session.begin() as session: + session.add(some_object) + session.add(some_other_object) + # commits transaction, closes session + + .. versionadded:: 1.4 + + When calling upon :class:`_orm.sessionmaker` to construct a + :class:`_orm.Session`, keyword arguments may also be passed to the + method; these arguments will override that of the globally configured + parameters. Below we use a :class:`_orm.sessionmaker` bound to a certain + :class:`_engine.Engine` to produce a :class:`_orm.Session` that is instead + bound to a specific :class:`_engine.Connection` procured from that engine:: + + Session = sessionmaker(engine) + + # bind an individual session to a connection + + with engine.connect() as connection: + with Session(bind=connection) as session: + # work with session + + The class also includes a method :meth:`_orm.sessionmaker.configure`, which + can be used to specify additional keyword arguments to the factory, which + will take effect for subsequent :class:`.Session` objects generated. This + is usually used to associate one or more :class:`_engine.Engine` objects + with an existing + :class:`.sessionmaker` factory before it is first used:: + + # application starts, sessionmaker does not have + # an engine bound yet + Session = sessionmaker() + + # ... later, when an engine URL is read from a configuration + # file or other events allow the engine to be created + engine = create_engine('sqlite:///foo.db') + Session.configure(bind=engine) + + sess = Session() + # work with session + + .. seealso:: + + :ref:`session_getting` - introductory text on creating + sessions using :class:`.sessionmaker`. + + """ + + class_: Type[_S] + + @overload + def __init__( + self, + bind: Optional[_SessionBind] = ..., + *, + class_: Type[_S], + autoflush: bool = ..., + expire_on_commit: bool = ..., + info: Optional[_InfoType] = ..., + **kw: Any, + ): ... + + @overload + def __init__( + self: "sessionmaker[Session]", + bind: Optional[_SessionBind] = ..., + *, + autoflush: bool = ..., + expire_on_commit: bool = ..., + info: Optional[_InfoType] = ..., + **kw: Any, + ): ... + + def __init__( + self, + bind: Optional[_SessionBind] = None, + *, + class_: Type[_S] = Session, # type: ignore + autoflush: bool = True, + expire_on_commit: bool = True, + info: Optional[_InfoType] = None, + **kw: Any, + ): + r"""Construct a new :class:`.sessionmaker`. + + All arguments here except for ``class_`` correspond to arguments + accepted by :class:`.Session` directly. See the + :meth:`.Session.__init__` docstring for more details on parameters. + + :param bind: a :class:`_engine.Engine` or other :class:`.Connectable` + with + which newly created :class:`.Session` objects will be associated. + :param class\_: class to use in order to create new :class:`.Session` + objects. Defaults to :class:`.Session`. + :param autoflush: The autoflush setting to use with newly created + :class:`.Session` objects. + + .. seealso:: + + :ref:`session_flushing` - additional background on autoflush + + :param expire_on_commit=True: the + :paramref:`_orm.Session.expire_on_commit` setting to use + with newly created :class:`.Session` objects. + + :param info: optional dictionary of information that will be available + via :attr:`.Session.info`. Note this dictionary is *updated*, not + replaced, when the ``info`` parameter is specified to the specific + :class:`.Session` construction operation. + + :param \**kw: all other keyword arguments are passed to the + constructor of newly created :class:`.Session` objects. + + """ + kw["bind"] = bind + kw["autoflush"] = autoflush + kw["expire_on_commit"] = expire_on_commit + if info is not None: + kw["info"] = info + self.kw = kw + # make our own subclass of the given class, so that + # events can be associated with it specifically. + self.class_ = type(class_.__name__, (class_,), {}) + + def begin(self) -> contextlib.AbstractContextManager[_S]: + """Produce a context manager that both provides a new + :class:`_orm.Session` as well as a transaction that commits. + + + e.g.:: + + Session = sessionmaker(some_engine) + + with Session.begin() as session: + session.add(some_object) + + # commits transaction, closes session + + .. versionadded:: 1.4 + + + """ + + session = self() + return session._maker_context_manager() + + def __call__(self, **local_kw: Any) -> _S: + """Produce a new :class:`.Session` object using the configuration + established in this :class:`.sessionmaker`. + + In Python, the ``__call__`` method is invoked on an object when + it is "called" in the same way as a function:: + + Session = sessionmaker(some_engine) + session = Session() # invokes sessionmaker.__call__() + + """ + for k, v in self.kw.items(): + if k == "info" and "info" in local_kw: + d = v.copy() + d.update(local_kw["info"]) + local_kw["info"] = d + else: + local_kw.setdefault(k, v) + return self.class_(**local_kw) + + def configure(self, **new_kw: Any) -> None: + """(Re)configure the arguments for this sessionmaker. + + e.g.:: + + Session = sessionmaker() + + Session.configure(bind=create_engine('sqlite://')) + """ + self.kw.update(new_kw) + + def __repr__(self) -> str: + return "%s(class_=%r, %s)" % ( + self.__class__.__name__, + self.class_.__name__, + ", ".join("%s=%r" % (k, v) for k, v in self.kw.items()), + ) + + +def close_all_sessions() -> None: + """Close all sessions in memory. + + This function consults a global registry of all :class:`.Session` objects + and calls :meth:`.Session.close` on them, which resets them to a clean + state. + + This function is not for general use but may be useful for test suites + within the teardown scheme. + + .. versionadded:: 1.3 + + """ + + for sess in _sessions.values(): + sess.close() + + +def make_transient(instance: object) -> None: + """Alter the state of the given instance so that it is :term:`transient`. + + .. note:: + + :func:`.make_transient` is a special-case function for + advanced use cases only. + + The given mapped instance is assumed to be in the :term:`persistent` or + :term:`detached` state. The function will remove its association with any + :class:`.Session` as well as its :attr:`.InstanceState.identity`. The + effect is that the object will behave as though it were newly constructed, + except retaining any attribute / collection values that were loaded at the + time of the call. The :attr:`.InstanceState.deleted` flag is also reset + if this object had been deleted as a result of using + :meth:`.Session.delete`. + + .. warning:: + + :func:`.make_transient` does **not** "unexpire" or otherwise eagerly + load ORM-mapped attributes that are not currently loaded at the time + the function is called. This includes attributes which: + + * were expired via :meth:`.Session.expire` + + * were expired as the natural effect of committing a session + transaction, e.g. :meth:`.Session.commit` + + * are normally :term:`lazy loaded` but are not currently loaded + + * are "deferred" (see :ref:`orm_queryguide_column_deferral`) and are + not yet loaded + + * were not present in the query which loaded this object, such as that + which is common in joined table inheritance and other scenarios. + + After :func:`.make_transient` is called, unloaded attributes such + as those above will normally resolve to the value ``None`` when + accessed, or an empty collection for a collection-oriented attribute. + As the object is transient and un-associated with any database + identity, it will no longer retrieve these values. + + .. seealso:: + + :func:`.make_transient_to_detached` + + """ + state = attributes.instance_state(instance) + s = _state_session(state) + if s: + s._expunge_states([state]) + + # remove expired state + state.expired_attributes.clear() + + # remove deferred callables + if state.callables: + del state.callables + + if state.key: + del state.key + if state._deleted: + del state._deleted + + +def make_transient_to_detached(instance: object) -> None: + """Make the given transient instance :term:`detached`. + + .. note:: + + :func:`.make_transient_to_detached` is a special-case function for + advanced use cases only. + + All attribute history on the given instance + will be reset as though the instance were freshly loaded + from a query. Missing attributes will be marked as expired. + The primary key attributes of the object, which are required, will be made + into the "key" of the instance. + + The object can then be added to a session, or merged + possibly with the load=False flag, at which point it will look + as if it were loaded that way, without emitting SQL. + + This is a special use case function that differs from a normal + call to :meth:`.Session.merge` in that a given persistent state + can be manufactured without any SQL calls. + + .. seealso:: + + :func:`.make_transient` + + :meth:`.Session.enable_relationship_loading` + + """ + state = attributes.instance_state(instance) + if state.session_id or state.key: + raise sa_exc.InvalidRequestError("Given object must be transient") + state.key = state.mapper._identity_key_from_state(state) + if state._deleted: + del state._deleted + state._commit_all(state.dict) + state._expire_attributes(state.dict, state.unloaded) + + +def object_session(instance: object) -> Optional[Session]: + """Return the :class:`.Session` to which the given instance belongs. + + This is essentially the same as the :attr:`.InstanceState.session` + accessor. See that attribute for details. + + """ + + try: + state = attributes.instance_state(instance) + except exc.NO_STATE as err: + raise exc.UnmappedInstanceError(instance) from err + else: + return _state_session(state) + + +_new_sessionid = util.counter() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py new file mode 100644 index 00000000..9dfd7f64 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py @@ -0,0 +1,1143 @@ +# orm/state.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Defines instrumentation of instances. + +This module is usually not directly visible to user applications, but +defines a large part of the ORM's interactivity. + +""" + +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Optional +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union +import weakref + +from . import base +from . import exc as orm_exc +from . import interfaces +from ._typing import _O +from ._typing import is_collection_impl +from .base import ATTR_WAS_SET +from .base import INIT_OK +from .base import LoaderCallableStatus +from .base import NEVER_SET +from .base import NO_VALUE +from .base import PASSIVE_NO_INITIALIZE +from .base import PASSIVE_NO_RESULT +from .base import PASSIVE_OFF +from .base import SQL_OK +from .path_registry import PathRegistry +from .. import exc as sa_exc +from .. import inspection +from .. import util +from ..util.typing import Literal +from ..util.typing import Protocol + +if TYPE_CHECKING: + from ._typing import _IdentityKeyType + from ._typing import _InstanceDict + from ._typing import _LoaderCallable + from .attributes import AttributeImpl + from .attributes import History + from .base import PassiveFlag + from .collections import _AdaptedCollectionProtocol + from .identity import IdentityMap + from .instrumentation import ClassManager + from .interfaces import ORMOption + from .mapper import Mapper + from .session import Session + from ..engine import Row + from ..ext.asyncio.session import async_session as _async_provider + from ..ext.asyncio.session import AsyncSession + +if TYPE_CHECKING: + _sessions: weakref.WeakValueDictionary[int, Session] +else: + # late-populated by session.py + _sessions = None + + +if not TYPE_CHECKING: + # optionally late-provided by sqlalchemy.ext.asyncio.session + + _async_provider = None # noqa + + +class _InstanceDictProto(Protocol): + def __call__(self) -> Optional[IdentityMap]: ... + + +class _InstallLoaderCallableProto(Protocol[_O]): + """used at result loading time to install a _LoaderCallable callable + upon a specific InstanceState, which will be used to populate an + attribute when that attribute is accessed. + + Concrete examples are per-instance deferred column loaders and + relationship lazy loaders. + + """ + + def __call__( + self, state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any] + ) -> None: ... + + +@inspection._self_inspects +class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]): + """Tracks state information at the instance level. + + The :class:`.InstanceState` is a key object used by the + SQLAlchemy ORM in order to track the state of an object; + it is created the moment an object is instantiated, typically + as a result of :term:`instrumentation` which SQLAlchemy applies + to the ``__init__()`` method of the class. + + :class:`.InstanceState` is also a semi-public object, + available for runtime inspection as to the state of a + mapped instance, including information such as its current + status within a particular :class:`.Session` and details + about data on individual attributes. The public API + in order to acquire a :class:`.InstanceState` object + is to use the :func:`_sa.inspect` system:: + + >>> from sqlalchemy import inspect + >>> insp = inspect(some_mapped_object) + >>> insp.attrs.nickname.history + History(added=['new nickname'], unchanged=(), deleted=['nickname']) + + .. seealso:: + + :ref:`orm_mapper_inspection_instancestate` + + """ + + __slots__ = ( + "__dict__", + "__weakref__", + "class_", + "manager", + "obj", + "committed_state", + "expired_attributes", + ) + + manager: ClassManager[_O] + session_id: Optional[int] = None + key: Optional[_IdentityKeyType[_O]] = None + runid: Optional[int] = None + load_options: Tuple[ORMOption, ...] = () + load_path: PathRegistry = PathRegistry.root + insert_order: Optional[int] = None + _strong_obj: Optional[object] = None + obj: weakref.ref[_O] + + committed_state: Dict[str, Any] + + modified: bool = False + """When ``True`` the object was modified.""" + expired: bool = False + """When ``True`` the object is :term:`expired`. + + .. seealso:: + + :ref:`session_expire` + """ + _deleted: bool = False + _load_pending: bool = False + _orphaned_outside_of_session: bool = False + is_instance: bool = True + identity_token: object = None + _last_known_values: Optional[Dict[str, Any]] = None + + _instance_dict: _InstanceDictProto + """A weak reference, or in the default case a plain callable, that + returns a reference to the current :class:`.IdentityMap`, if any. + + """ + if not TYPE_CHECKING: + + def _instance_dict(self): + """default 'weak reference' for _instance_dict""" + return None + + expired_attributes: Set[str] + """The set of keys which are 'expired' to be loaded by + the manager's deferred scalar loader, assuming no pending + changes. + + See also the ``unmodified`` collection which is intersected + against this set when a refresh operation occurs. + """ + + callables: Dict[str, Callable[[InstanceState[_O], PassiveFlag], Any]] + """A namespace where a per-state loader callable can be associated. + + In SQLAlchemy 1.0, this is only used for lazy loaders / deferred + loaders that were set up via query option. + + Previously, callables was used also to indicate expired attributes + by storing a link to the InstanceState itself in this dictionary. + This role is now handled by the expired_attributes set. + + """ + + if not TYPE_CHECKING: + callables = util.EMPTY_DICT + + def __init__(self, obj: _O, manager: ClassManager[_O]): + self.class_ = obj.__class__ + self.manager = manager + self.obj = weakref.ref(obj, self._cleanup) + self.committed_state = {} + self.expired_attributes = set() + + @util.memoized_property + def attrs(self) -> util.ReadOnlyProperties[AttributeState]: + """Return a namespace representing each attribute on + the mapped object, including its current value + and history. + + The returned object is an instance of :class:`.AttributeState`. + This object allows inspection of the current data + within an attribute as well as attribute history + since the last flush. + + """ + return util.ReadOnlyProperties( + {key: AttributeState(self, key) for key in self.manager} + ) + + @property + def transient(self) -> bool: + """Return ``True`` if the object is :term:`transient`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is None and not self._attached + + @property + def pending(self) -> bool: + """Return ``True`` if the object is :term:`pending`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is None and self._attached + + @property + def deleted(self) -> bool: + """Return ``True`` if the object is :term:`deleted`. + + An object that is in the deleted state is guaranteed to + not be within the :attr:`.Session.identity_map` of its parent + :class:`.Session`; however if the session's transaction is rolled + back, the object will be restored to the persistent state and + the identity map. + + .. note:: + + The :attr:`.InstanceState.deleted` attribute refers to a specific + state of the object that occurs between the "persistent" and + "detached" states; once the object is :term:`detached`, the + :attr:`.InstanceState.deleted` attribute **no longer returns + True**; in order to detect that a state was deleted, regardless + of whether or not the object is associated with a + :class:`.Session`, use the :attr:`.InstanceState.was_deleted` + accessor. + + .. versionadded: 1.1 + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is not None and self._attached and self._deleted + + @property + def was_deleted(self) -> bool: + """Return True if this object is or was previously in the + "deleted" state and has not been reverted to persistent. + + This flag returns True once the object was deleted in flush. + When the object is expunged from the session either explicitly + or via transaction commit and enters the "detached" state, + this flag will continue to report True. + + .. seealso:: + + :attr:`.InstanceState.deleted` - refers to the "deleted" state + + :func:`.orm.util.was_deleted` - standalone function + + :ref:`session_object_states` + + """ + return self._deleted + + @property + def persistent(self) -> bool: + """Return ``True`` if the object is :term:`persistent`. + + An object that is in the persistent state is guaranteed to + be within the :attr:`.Session.identity_map` of its parent + :class:`.Session`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is not None and self._attached and not self._deleted + + @property + def detached(self) -> bool: + """Return ``True`` if the object is :term:`detached`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is not None and not self._attached + + @util.non_memoized_property + @util.preload_module("sqlalchemy.orm.session") + def _attached(self) -> bool: + return ( + self.session_id is not None + and self.session_id in util.preloaded.orm_session._sessions + ) + + def _track_last_known_value(self, key: str) -> None: + """Track the last known value of a particular key after expiration + operations. + + .. versionadded:: 1.3 + + """ + + lkv = self._last_known_values + if lkv is None: + self._last_known_values = lkv = {} + if key not in lkv: + lkv[key] = NO_VALUE + + @property + def session(self) -> Optional[Session]: + """Return the owning :class:`.Session` for this instance, + or ``None`` if none available. + + Note that the result here can in some cases be *different* + from that of ``obj in session``; an object that's been deleted + will report as not ``in session``, however if the transaction is + still in progress, this attribute will still refer to that session. + Only when the transaction is completed does the object become + fully detached under normal circumstances. + + .. seealso:: + + :attr:`_orm.InstanceState.async_session` + + """ + if self.session_id: + try: + return _sessions[self.session_id] + except KeyError: + pass + return None + + @property + def async_session(self) -> Optional[AsyncSession]: + """Return the owning :class:`_asyncio.AsyncSession` for this instance, + or ``None`` if none available. + + This attribute is only non-None when the :mod:`sqlalchemy.ext.asyncio` + API is in use for this ORM object. The returned + :class:`_asyncio.AsyncSession` object will be a proxy for the + :class:`_orm.Session` object that would be returned from the + :attr:`_orm.InstanceState.session` attribute for this + :class:`_orm.InstanceState`. + + .. versionadded:: 1.4.18 + + .. seealso:: + + :ref:`asyncio_toplevel` + + """ + if _async_provider is None: + return None + + sess = self.session + if sess is not None: + return _async_provider(sess) + else: + return None + + @property + def object(self) -> Optional[_O]: + """Return the mapped object represented by this + :class:`.InstanceState`. + + Returns None if the object has been garbage collected + + """ + return self.obj() + + @property + def identity(self) -> Optional[Tuple[Any, ...]]: + """Return the mapped identity of the mapped object. + This is the primary key identity as persisted by the ORM + which can always be passed directly to + :meth:`_query.Query.get`. + + Returns ``None`` if the object has no primary key identity. + + .. note:: + An object which is :term:`transient` or :term:`pending` + does **not** have a mapped identity until it is flushed, + even if its attributes include primary key values. + + """ + if self.key is None: + return None + else: + return self.key[1] + + @property + def identity_key(self) -> Optional[_IdentityKeyType[_O]]: + """Return the identity key for the mapped object. + + This is the key used to locate the object within + the :attr:`.Session.identity_map` mapping. It contains + the identity as returned by :attr:`.identity` within it. + + + """ + return self.key + + @util.memoized_property + def parents(self) -> Dict[int, Union[Literal[False], InstanceState[Any]]]: + return {} + + @util.memoized_property + def _pending_mutations(self) -> Dict[str, PendingCollection]: + return {} + + @util.memoized_property + def _empty_collections(self) -> Dict[str, _AdaptedCollectionProtocol]: + return {} + + @util.memoized_property + def mapper(self) -> Mapper[_O]: + """Return the :class:`_orm.Mapper` used for this mapped object.""" + return self.manager.mapper + + @property + def has_identity(self) -> bool: + """Return ``True`` if this object has an identity key. + + This should always have the same value as the + expression ``state.persistent`` or ``state.detached``. + + """ + return bool(self.key) + + @classmethod + def _detach_states( + self, + states: Iterable[InstanceState[_O]], + session: Session, + to_transient: bool = False, + ) -> None: + persistent_to_detached = ( + session.dispatch.persistent_to_detached or None + ) + deleted_to_detached = session.dispatch.deleted_to_detached or None + pending_to_transient = session.dispatch.pending_to_transient or None + persistent_to_transient = ( + session.dispatch.persistent_to_transient or None + ) + + for state in states: + deleted = state._deleted + pending = state.key is None + persistent = not pending and not deleted + + state.session_id = None + + if to_transient and state.key: + del state.key + if persistent: + if to_transient: + if persistent_to_transient is not None: + persistent_to_transient(session, state) + elif persistent_to_detached is not None: + persistent_to_detached(session, state) + elif deleted and deleted_to_detached is not None: + deleted_to_detached(session, state) + elif pending and pending_to_transient is not None: + pending_to_transient(session, state) + + state._strong_obj = None + + def _detach(self, session: Optional[Session] = None) -> None: + if session: + InstanceState._detach_states([self], session) + else: + self.session_id = self._strong_obj = None + + def _dispose(self) -> None: + # used by the test suite, apparently + self._detach() + + def _cleanup(self, ref: weakref.ref[_O]) -> None: + """Weakref callback cleanup. + + This callable cleans out the state when it is being garbage + collected. + + this _cleanup **assumes** that there are no strong refs to us! + Will not work otherwise! + + """ + + # Python builtins become undefined during interpreter shutdown. + # Guard against exceptions during this phase, as the method cannot + # proceed in any case if builtins have been undefined. + if dict is None: + return + + instance_dict = self._instance_dict() + if instance_dict is not None: + instance_dict._fast_discard(self) + del self._instance_dict + + # we can't possibly be in instance_dict._modified + # b.c. this is weakref cleanup only, that set + # is strong referencing! + # assert self not in instance_dict._modified + + self.session_id = self._strong_obj = None + + @property + def dict(self) -> _InstanceDict: + """Return the instance dict used by the object. + + Under normal circumstances, this is always synonymous + with the ``__dict__`` attribute of the mapped object, + unless an alternative instrumentation system has been + configured. + + In the case that the actual object has been garbage + collected, this accessor returns a blank dictionary. + + """ + o = self.obj() + if o is not None: + return base.instance_dict(o) + else: + return {} + + def _initialize_instance(*mixed: Any, **kwargs: Any) -> None: + self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa + manager = self.manager + + manager.dispatch.init(self, args, kwargs) + + try: + manager.original_init(*mixed[1:], **kwargs) + except: + with util.safe_reraise(): + manager.dispatch.init_failure(self, args, kwargs) + + def get_history(self, key: str, passive: PassiveFlag) -> History: + return self.manager[key].impl.get_history(self, self.dict, passive) + + def get_impl(self, key: str) -> AttributeImpl: + return self.manager[key].impl + + def _get_pending_mutation(self, key: str) -> PendingCollection: + if key not in self._pending_mutations: + self._pending_mutations[key] = PendingCollection() + return self._pending_mutations[key] + + def __getstate__(self) -> Dict[str, Any]: + state_dict: Dict[str, Any] = { + "instance": self.obj(), + "class_": self.class_, + "committed_state": self.committed_state, + "expired_attributes": self.expired_attributes, + } + state_dict.update( + (k, self.__dict__[k]) + for k in ( + "_pending_mutations", + "modified", + "expired", + "callables", + "key", + "parents", + "load_options", + "class_", + "expired_attributes", + "info", + ) + if k in self.__dict__ + ) + if self.load_path: + state_dict["load_path"] = self.load_path.serialize() + + state_dict["manager"] = self.manager._serialize(self, state_dict) + + return state_dict + + def __setstate__(self, state_dict: Dict[str, Any]) -> None: + inst = state_dict["instance"] + if inst is not None: + self.obj = weakref.ref(inst, self._cleanup) + self.class_ = inst.__class__ + else: + self.obj = lambda: None # type: ignore + self.class_ = state_dict["class_"] + + self.committed_state = state_dict.get("committed_state", {}) + self._pending_mutations = state_dict.get("_pending_mutations", {}) + self.parents = state_dict.get("parents", {}) + self.modified = state_dict.get("modified", False) + self.expired = state_dict.get("expired", False) + if "info" in state_dict: + self.info.update(state_dict["info"]) + if "callables" in state_dict: + self.callables = state_dict["callables"] + + self.expired_attributes = state_dict["expired_attributes"] + else: + if "expired_attributes" in state_dict: + self.expired_attributes = state_dict["expired_attributes"] + else: + self.expired_attributes = set() + + self.__dict__.update( + [ + (k, state_dict[k]) + for k in ("key", "load_options") + if k in state_dict + ] + ) + if self.key: + self.identity_token = self.key[2] + + if "load_path" in state_dict: + self.load_path = PathRegistry.deserialize(state_dict["load_path"]) + + state_dict["manager"](self, inst, state_dict) + + def _reset(self, dict_: _InstanceDict, key: str) -> None: + """Remove the given attribute and any + callables associated with it.""" + + old = dict_.pop(key, None) + manager_impl = self.manager[key].impl + if old is not None and is_collection_impl(manager_impl): + manager_impl._invalidate_collection(old) + self.expired_attributes.discard(key) + if self.callables: + self.callables.pop(key, None) + + def _copy_callables(self, from_: InstanceState[Any]) -> None: + if "callables" in from_.__dict__: + self.callables = dict(from_.callables) + + @classmethod + def _instance_level_callable_processor( + cls, manager: ClassManager[_O], fn: _LoaderCallable, key: Any + ) -> _InstallLoaderCallableProto[_O]: + impl = manager[key].impl + if is_collection_impl(impl): + fixed_impl = impl + + def _set_callable( + state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any] + ) -> None: + if "callables" not in state.__dict__: + state.callables = {} + old = dict_.pop(key, None) + if old is not None: + fixed_impl._invalidate_collection(old) + state.callables[key] = fn + + else: + + def _set_callable( + state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any] + ) -> None: + if "callables" not in state.__dict__: + state.callables = {} + state.callables[key] = fn + + return _set_callable + + def _expire( + self, dict_: _InstanceDict, modified_set: Set[InstanceState[Any]] + ) -> None: + self.expired = True + if self.modified: + modified_set.discard(self) + self.committed_state.clear() + self.modified = False + + self._strong_obj = None + + if "_pending_mutations" in self.__dict__: + del self.__dict__["_pending_mutations"] + + if "parents" in self.__dict__: + del self.__dict__["parents"] + + self.expired_attributes.update( + [impl.key for impl in self.manager._loader_impls] + ) + + if self.callables: + # the per state loader callables we can remove here are + # LoadDeferredColumns, which undefers a column at the instance + # level that is mapped with deferred, and LoadLazyAttribute, + # which lazy loads a relationship at the instance level that + # is mapped with "noload" or perhaps "immediateload". + # Before 1.4, only column-based + # attributes could be considered to be "expired", so here they + # were the only ones "unexpired", which means to make them deferred + # again. For the moment, as of 1.4 we also apply the same + # treatment relationships now, that is, an instance level lazy + # loader is reset in the same way as a column loader. + for k in self.expired_attributes.intersection(self.callables): + del self.callables[k] + + for k in self.manager._collection_impl_keys.intersection(dict_): + collection = dict_.pop(k) + collection._sa_adapter.invalidated = True + + if self._last_known_values: + self._last_known_values.update( + {k: dict_[k] for k in self._last_known_values if k in dict_} + ) + + for key in self.manager._all_key_set.intersection(dict_): + del dict_[key] + + self.manager.dispatch.expire(self, None) + + def _expire_attributes( + self, + dict_: _InstanceDict, + attribute_names: Iterable[str], + no_loader: bool = False, + ) -> None: + pending = self.__dict__.get("_pending_mutations", None) + + callables = self.callables + + for key in attribute_names: + impl = self.manager[key].impl + if impl.accepts_scalar_loader: + if no_loader and (impl.callable_ or key in callables): + continue + + self.expired_attributes.add(key) + if callables and key in callables: + del callables[key] + old = dict_.pop(key, NO_VALUE) + if is_collection_impl(impl) and old is not NO_VALUE: + impl._invalidate_collection(old) + + lkv = self._last_known_values + if lkv is not None and key in lkv and old is not NO_VALUE: + lkv[key] = old + + self.committed_state.pop(key, None) + if pending: + pending.pop(key, None) + + self.manager.dispatch.expire(self, attribute_names) + + def _load_expired( + self, state: InstanceState[_O], passive: PassiveFlag + ) -> LoaderCallableStatus: + """__call__ allows the InstanceState to act as a deferred + callable for loading expired attributes, which is also + serializable (picklable). + + """ + + if not passive & SQL_OK: + return PASSIVE_NO_RESULT + + toload = self.expired_attributes.intersection(self.unmodified) + toload = toload.difference( + attr + for attr in toload + if not self.manager[attr].impl.load_on_unexpire + ) + + self.manager.expired_attribute_loader(self, toload, passive) + + # if the loader failed, or this + # instance state didn't have an identity, + # the attributes still might be in the callables + # dict. ensure they are removed. + self.expired_attributes.clear() + + return ATTR_WAS_SET + + @property + def unmodified(self) -> Set[str]: + """Return the set of keys which have no uncommitted changes""" + + return set(self.manager).difference(self.committed_state) + + def unmodified_intersection(self, keys: Iterable[str]) -> Set[str]: + """Return self.unmodified.intersection(keys).""" + + return ( + set(keys) + .intersection(self.manager) + .difference(self.committed_state) + ) + + @property + def unloaded(self) -> Set[str]: + """Return the set of keys which do not have a loaded value. + + This includes expired attributes and any other attribute that was never + populated or modified. + + """ + return ( + set(self.manager) + .difference(self.committed_state) + .difference(self.dict) + ) + + @property + @util.deprecated( + "2.0", + "The :attr:`.InstanceState.unloaded_expirable` attribute is " + "deprecated. Please use :attr:`.InstanceState.unloaded`.", + ) + def unloaded_expirable(self) -> Set[str]: + """Synonymous with :attr:`.InstanceState.unloaded`. + + This attribute was added as an implementation-specific detail at some + point and should be considered to be private. + + """ + return self.unloaded + + @property + def _unloaded_non_object(self) -> Set[str]: + return self.unloaded.intersection( + attr + for attr in self.manager + if self.manager[attr].impl.accepts_scalar_loader + ) + + def _modified_event( + self, + dict_: _InstanceDict, + attr: Optional[AttributeImpl], + previous: Any, + collection: bool = False, + is_userland: bool = False, + ) -> None: + if attr: + if not attr.send_modified_events: + return + if is_userland and attr.key not in dict_: + raise sa_exc.InvalidRequestError( + "Can't flag attribute '%s' modified; it's not present in " + "the object state" % attr.key + ) + if attr.key not in self.committed_state or is_userland: + if collection: + if TYPE_CHECKING: + assert is_collection_impl(attr) + if previous is NEVER_SET: + if attr.key in dict_: + previous = dict_[attr.key] + + if previous not in (None, NO_VALUE, NEVER_SET): + previous = attr.copy(previous) + self.committed_state[attr.key] = previous + + lkv = self._last_known_values + if lkv is not None and attr.key in lkv: + lkv[attr.key] = NO_VALUE + + # assert self._strong_obj is None or self.modified + + if (self.session_id and self._strong_obj is None) or not self.modified: + self.modified = True + instance_dict = self._instance_dict() + if instance_dict: + has_modified = bool(instance_dict._modified) + instance_dict._modified.add(self) + else: + has_modified = False + + # only create _strong_obj link if attached + # to a session + + inst = self.obj() + if self.session_id: + self._strong_obj = inst + + # if identity map already had modified objects, + # assume autobegin already occurred, else check + # for autobegin + if not has_modified: + # inline of autobegin, to ensure session transaction + # snapshot is established + try: + session = _sessions[self.session_id] + except KeyError: + pass + else: + if session._transaction is None: + session._autobegin_t() + + if inst is None and attr: + raise orm_exc.ObjectDereferencedError( + "Can't emit change event for attribute '%s' - " + "parent object of type %s has been garbage " + "collected." + % (self.manager[attr.key], base.state_class_str(self)) + ) + + def _commit(self, dict_: _InstanceDict, keys: Iterable[str]) -> None: + """Commit attributes. + + This is used by a partial-attribute load operation to mark committed + those attributes which were refreshed from the database. + + Attributes marked as "expired" can potentially remain "expired" after + this step if a value was not populated in state.dict. + + """ + for key in keys: + self.committed_state.pop(key, None) + + self.expired = False + + self.expired_attributes.difference_update( + set(keys).intersection(dict_) + ) + + # the per-keys commit removes object-level callables, + # while that of commit_all does not. it's not clear + # if this behavior has a clear rationale, however tests do + # ensure this is what it does. + if self.callables: + for key in ( + set(self.callables).intersection(keys).intersection(dict_) + ): + del self.callables[key] + + def _commit_all( + self, dict_: _InstanceDict, instance_dict: Optional[IdentityMap] = None + ) -> None: + """commit all attributes unconditionally. + + This is used after a flush() or a full load/refresh + to remove all pending state from the instance. + + - all attributes are marked as "committed" + - the "strong dirty reference" is removed + - the "modified" flag is set to False + - any "expired" markers for scalar attributes loaded are removed. + - lazy load callables for objects / collections *stay* + + Attributes marked as "expired" can potentially remain + "expired" after this step if a value was not populated in state.dict. + + """ + self._commit_all_states([(self, dict_)], instance_dict) + + @classmethod + def _commit_all_states( + self, + iter_: Iterable[Tuple[InstanceState[Any], _InstanceDict]], + instance_dict: Optional[IdentityMap] = None, + ) -> None: + """Mass / highly inlined version of commit_all().""" + + for state, dict_ in iter_: + state_dict = state.__dict__ + + state.committed_state.clear() + + if "_pending_mutations" in state_dict: + del state_dict["_pending_mutations"] + + state.expired_attributes.difference_update(dict_) + + if instance_dict and state.modified: + instance_dict._modified.discard(state) + + state.modified = state.expired = False + state._strong_obj = None + + +class AttributeState: + """Provide an inspection interface corresponding + to a particular attribute on a particular mapped object. + + The :class:`.AttributeState` object is accessed + via the :attr:`.InstanceState.attrs` collection + of a particular :class:`.InstanceState`:: + + from sqlalchemy import inspect + + insp = inspect(some_mapped_object) + attr_state = insp.attrs.some_attribute + + """ + + __slots__ = ("state", "key") + + state: InstanceState[Any] + key: str + + def __init__(self, state: InstanceState[Any], key: str): + self.state = state + self.key = key + + @property + def loaded_value(self) -> Any: + """The current value of this attribute as loaded from the database. + + If the value has not been loaded, or is otherwise not present + in the object's dictionary, returns NO_VALUE. + + """ + return self.state.dict.get(self.key, NO_VALUE) + + @property + def value(self) -> Any: + """Return the value of this attribute. + + This operation is equivalent to accessing the object's + attribute directly or via ``getattr()``, and will fire + off any pending loader callables if needed. + + """ + return self.state.manager[self.key].__get__( + self.state.obj(), self.state.class_ + ) + + @property + def history(self) -> History: + """Return the current **pre-flush** change history for + this attribute, via the :class:`.History` interface. + + This method will **not** emit loader callables if the value of the + attribute is unloaded. + + .. note:: + + The attribute history system tracks changes on a **per flush + basis**. Each time the :class:`.Session` is flushed, the history + of each attribute is reset to empty. The :class:`.Session` by + default autoflushes each time a :class:`_query.Query` is invoked. + For + options on how to control this, see :ref:`session_flushing`. + + + .. seealso:: + + :meth:`.AttributeState.load_history` - retrieve history + using loader callables if the value is not locally present. + + :func:`.attributes.get_history` - underlying function + + """ + return self.state.get_history(self.key, PASSIVE_NO_INITIALIZE) + + def load_history(self) -> History: + """Return the current **pre-flush** change history for + this attribute, via the :class:`.History` interface. + + This method **will** emit loader callables if the value of the + attribute is unloaded. + + .. note:: + + The attribute history system tracks changes on a **per flush + basis**. Each time the :class:`.Session` is flushed, the history + of each attribute is reset to empty. The :class:`.Session` by + default autoflushes each time a :class:`_query.Query` is invoked. + For + options on how to control this, see :ref:`session_flushing`. + + .. seealso:: + + :attr:`.AttributeState.history` + + :func:`.attributes.get_history` - underlying function + + """ + return self.state.get_history(self.key, PASSIVE_OFF ^ INIT_OK) + + +class PendingCollection: + """A writable placeholder for an unloaded collection. + + Stores items appended to and removed from a collection that has not yet + been loaded. When the collection is loaded, the changes stored in + PendingCollection are applied to it to produce the final result. + + """ + + __slots__ = ("deleted_items", "added_items") + + deleted_items: util.IdentitySet + added_items: util.OrderedIdentitySet + + def __init__(self) -> None: + self.deleted_items = util.IdentitySet() + self.added_items = util.OrderedIdentitySet() + + def merge_with_history(self, history: History) -> History: + return history._merge(self.added_items, self.deleted_items) + + def append(self, value: Any) -> None: + if value in self.deleted_items: + self.deleted_items.remove(value) + else: + self.added_items.add(value) + + def remove(self, value: Any) -> None: + if value in self.added_items: + self.added_items.remove(value) + else: + self.deleted_items.add(value) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py new file mode 100644 index 00000000..56963c6a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py @@ -0,0 +1,198 @@ +# orm/state_changes.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""State tracking utilities used by :class:`_orm.Session`. + +""" + +from __future__ import annotations + +import contextlib +from enum import Enum +from typing import Any +from typing import Callable +from typing import cast +from typing import Iterator +from typing import NoReturn +from typing import Optional +from typing import Tuple +from typing import TypeVar +from typing import Union + +from .. import exc as sa_exc +from .. import util +from ..util.typing import Literal + +_F = TypeVar("_F", bound=Callable[..., Any]) + + +class _StateChangeState(Enum): + pass + + +class _StateChangeStates(_StateChangeState): + ANY = 1 + NO_CHANGE = 2 + CHANGE_IN_PROGRESS = 3 + + +class _StateChange: + """Supplies state assertion decorators. + + The current use case is for the :class:`_orm.SessionTransaction` class. The + :class:`_StateChange` class itself is agnostic of the + :class:`_orm.SessionTransaction` class so could in theory be generalized + for other systems as well. + + """ + + _next_state: _StateChangeState = _StateChangeStates.ANY + _state: _StateChangeState = _StateChangeStates.NO_CHANGE + _current_fn: Optional[Callable[..., Any]] = None + + def _raise_for_prerequisite_state( + self, operation_name: str, state: _StateChangeState + ) -> NoReturn: + raise sa_exc.IllegalStateChangeError( + f"Can't run operation '{operation_name}()' when Session " + f"is in state {state!r}", + code="isce", + ) + + @classmethod + def declare_states( + cls, + prerequisite_states: Union[ + Literal[_StateChangeStates.ANY], Tuple[_StateChangeState, ...] + ], + moves_to: _StateChangeState, + ) -> Callable[[_F], _F]: + """Method decorator declaring valid states. + + :param prerequisite_states: sequence of acceptable prerequisite + states. Can be the single constant _State.ANY to indicate no + prerequisite state + + :param moves_to: the expected state at the end of the method, assuming + no exceptions raised. Can be the constant _State.NO_CHANGE to + indicate state should not change at the end of the method. + + """ + assert prerequisite_states, "no prequisite states sent" + has_prerequisite_states = ( + prerequisite_states is not _StateChangeStates.ANY + ) + + prerequisite_state_collection = cast( + "Tuple[_StateChangeState, ...]", prerequisite_states + ) + expect_state_change = moves_to is not _StateChangeStates.NO_CHANGE + + @util.decorator + def _go(fn: _F, self: Any, *arg: Any, **kw: Any) -> Any: + current_state = self._state + + if ( + has_prerequisite_states + and current_state not in prerequisite_state_collection + ): + self._raise_for_prerequisite_state(fn.__name__, current_state) + + next_state = self._next_state + existing_fn = self._current_fn + expect_state = moves_to if expect_state_change else current_state + + if ( + # destination states are restricted + next_state is not _StateChangeStates.ANY + # method seeks to change state + and expect_state_change + # destination state incorrect + and next_state is not expect_state + ): + if existing_fn and next_state in ( + _StateChangeStates.NO_CHANGE, + _StateChangeStates.CHANGE_IN_PROGRESS, + ): + raise sa_exc.IllegalStateChangeError( + f"Method '{fn.__name__}()' can't be called here; " + f"method '{existing_fn.__name__}()' is already " + f"in progress and this would cause an unexpected " + f"state change to {moves_to!r}", + code="isce", + ) + else: + raise sa_exc.IllegalStateChangeError( + f"Cant run operation '{fn.__name__}()' here; " + f"will move to state {moves_to!r} where we are " + f"expecting {next_state!r}", + code="isce", + ) + + self._current_fn = fn + self._next_state = _StateChangeStates.CHANGE_IN_PROGRESS + try: + ret_value = fn(self, *arg, **kw) + except: + raise + else: + if self._state is expect_state: + return ret_value + + if self._state is current_state: + raise sa_exc.IllegalStateChangeError( + f"Method '{fn.__name__}()' failed to " + "change state " + f"to {moves_to!r} as expected", + code="isce", + ) + elif existing_fn: + raise sa_exc.IllegalStateChangeError( + f"While method '{existing_fn.__name__}()' was " + "running, " + f"method '{fn.__name__}()' caused an " + "unexpected " + f"state change to {self._state!r}", + code="isce", + ) + else: + raise sa_exc.IllegalStateChangeError( + f"Method '{fn.__name__}()' caused an unexpected " + f"state change to {self._state!r}", + code="isce", + ) + + finally: + self._next_state = next_state + self._current_fn = existing_fn + + return _go + + @contextlib.contextmanager + def _expect_state(self, expected: _StateChangeState) -> Iterator[Any]: + """called within a method that changes states. + + method must also use the ``@declare_states()`` decorator. + + """ + assert self._next_state is _StateChangeStates.CHANGE_IN_PROGRESS, ( + "Unexpected call to _expect_state outside of " + "state-changing method" + ) + + self._next_state = expected + try: + yield + except: + raise + else: + if self._state is not expected: + raise sa_exc.IllegalStateChangeError( + f"Unexpected state change to {self._state!r}", code="isce" + ) + finally: + self._next_state = _StateChangeStates.CHANGE_IN_PROGRESS diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py new file mode 100644 index 00000000..790ce28e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py @@ -0,0 +1,3402 @@ +# orm/strategies.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""sqlalchemy.orm.interfaces.LoaderStrategy + implementations, and related MapperOptions.""" + +from __future__ import annotations + +import collections +import itertools +from typing import Any +from typing import Dict +from typing import Tuple +from typing import TYPE_CHECKING + +from . import attributes +from . import exc as orm_exc +from . import interfaces +from . import loading +from . import path_registry +from . import properties +from . import query +from . import relationships +from . import unitofwork +from . import util as orm_util +from .base import _DEFER_FOR_STATE +from .base import _RAISE_FOR_STATE +from .base import _SET_DEFERRED_EXPIRED +from .base import ATTR_WAS_SET +from .base import LoaderCallableStatus +from .base import PASSIVE_OFF +from .base import PassiveFlag +from .context import _column_descriptions +from .context import ORMCompileState +from .context import ORMSelectCompileState +from .context import QueryContext +from .interfaces import LoaderStrategy +from .interfaces import StrategizedProperty +from .session import _state_session +from .state import InstanceState +from .strategy_options import Load +from .util import _none_set +from .util import AliasedClass +from .. import event +from .. import exc as sa_exc +from .. import inspect +from .. import log +from .. import sql +from .. import util +from ..sql import util as sql_util +from ..sql import visitors +from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from ..sql.selectable import Select + +if TYPE_CHECKING: + from .relationships import RelationshipProperty + from ..sql.elements import ColumnElement + + +def _register_attribute( + prop, + mapper, + useobject, + compare_function=None, + typecallable=None, + callable_=None, + proxy_property=None, + active_history=False, + impl_class=None, + **kw, +): + listen_hooks = [] + + uselist = useobject and prop.uselist + + if useobject and prop.single_parent: + listen_hooks.append(single_parent_validator) + + if prop.key in prop.parent.validators: + fn, opts = prop.parent.validators[prop.key] + listen_hooks.append( + lambda desc, prop: orm_util._validator_events( + desc, prop.key, fn, **opts + ) + ) + + if useobject: + listen_hooks.append(unitofwork.track_cascade_events) + + # need to assemble backref listeners + # after the singleparentvalidator, mapper validator + if useobject: + backref = prop.back_populates + if backref and prop._effective_sync_backref: + listen_hooks.append( + lambda desc, prop: attributes.backref_listeners( + desc, backref, uselist + ) + ) + + # a single MapperProperty is shared down a class inheritance + # hierarchy, so we set up attribute instrumentation and backref event + # for each mapper down the hierarchy. + + # typically, "mapper" is the same as prop.parent, due to the way + # the configure_mappers() process runs, however this is not strongly + # enforced, and in the case of a second configure_mappers() run the + # mapper here might not be prop.parent; also, a subclass mapper may + # be called here before a superclass mapper. That is, can't depend + # on mappers not already being set up so we have to check each one. + + for m in mapper.self_and_descendants: + if prop is m._props.get( + prop.key + ) and not m.class_manager._attr_has_impl(prop.key): + desc = attributes.register_attribute_impl( + m.class_, + prop.key, + parent_token=prop, + uselist=uselist, + compare_function=compare_function, + useobject=useobject, + trackparent=useobject + and ( + prop.single_parent + or prop.direction is interfaces.ONETOMANY + ), + typecallable=typecallable, + callable_=callable_, + active_history=active_history, + impl_class=impl_class, + send_modified_events=not useobject or not prop.viewonly, + doc=prop.doc, + **kw, + ) + + for hook in listen_hooks: + hook(desc, prop) + + +@properties.ColumnProperty.strategy_for(instrument=False, deferred=False) +class UninstrumentedColumnLoader(LoaderStrategy): + """Represent a non-instrumented MapperProperty. + + The polymorphic_on argument of mapper() often results in this, + if the argument is against the with_polymorphic selectable. + + """ + + __slots__ = ("columns",) + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.columns = self.parent_property.columns + + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection=None, + **kwargs, + ): + for c in self.columns: + if adapter: + c = adapter.columns[c] + compile_state._append_dedupe_col_collection(c, column_collection) + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + pass + + +@log.class_logger +@properties.ColumnProperty.strategy_for(instrument=True, deferred=False) +class ColumnLoader(LoaderStrategy): + """Provide loading behavior for a :class:`.ColumnProperty`.""" + + __slots__ = "columns", "is_composite" + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.columns = self.parent_property.columns + self.is_composite = hasattr(self.parent_property, "composite_class") + + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection, + memoized_populators, + check_for_adapt=False, + **kwargs, + ): + for c in self.columns: + if adapter: + if check_for_adapt: + c = adapter.adapt_check_present(c) + if c is None: + return + else: + c = adapter.columns[c] + + compile_state._append_dedupe_col_collection(c, column_collection) + + fetch = self.columns[0] + if adapter: + fetch = adapter.columns[fetch] + if fetch is None: + # None happens here only for dml bulk_persistence cases + # when context.DMLReturningColFilter is used + return + + memoized_populators[self.parent_property] = fetch + + def init_class_attribute(self, mapper): + self.is_class_level = True + coltype = self.columns[0].type + # TODO: check all columns ? check for foreign key as well? + active_history = ( + self.parent_property.active_history + or self.columns[0].primary_key + or ( + mapper.version_id_col is not None + and mapper._columntoproperty.get(mapper.version_id_col, None) + is self.parent_property + ) + ) + + _register_attribute( + self.parent_property, + mapper, + useobject=False, + compare_function=coltype.compare_values, + active_history=active_history, + ) + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + # look through list of columns represented here + # to see which, if any, is present in the row. + + for col in self.columns: + if adapter: + col = adapter.columns[col] + getter = result._getter(col, False) + if getter: + populators["quick"].append((self.key, getter)) + break + else: + populators["expire"].append((self.key, True)) + + +@log.class_logger +@properties.ColumnProperty.strategy_for(query_expression=True) +class ExpressionColumnLoader(ColumnLoader): + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + + # compare to the "default" expression that is mapped in + # the column. If it's sql.null, we don't need to render + # unless an expr is passed in the options. + null = sql.null().label(None) + self._have_default_expression = any( + not c.compare(null) for c in self.parent_property.columns + ) + + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection, + memoized_populators, + **kwargs, + ): + columns = None + if loadopt and loadopt._extra_criteria: + columns = loadopt._extra_criteria + + elif self._have_default_expression: + columns = self.parent_property.columns + + if columns is None: + return + + for c in columns: + if adapter: + c = adapter.columns[c] + compile_state._append_dedupe_col_collection(c, column_collection) + + fetch = columns[0] + if adapter: + fetch = adapter.columns[fetch] + if fetch is None: + # None is not expected to be the result of any + # adapter implementation here, however there may be theoretical + # usages of returning() with context.DMLReturningColFilter + return + + memoized_populators[self.parent_property] = fetch + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + # look through list of columns represented here + # to see which, if any, is present in the row. + if loadopt and loadopt._extra_criteria: + columns = loadopt._extra_criteria + + for col in columns: + if adapter: + col = adapter.columns[col] + getter = result._getter(col, False) + if getter: + populators["quick"].append((self.key, getter)) + break + else: + populators["expire"].append((self.key, True)) + + def init_class_attribute(self, mapper): + self.is_class_level = True + + _register_attribute( + self.parent_property, + mapper, + useobject=False, + compare_function=self.columns[0].type.compare_values, + accepts_scalar_loader=False, + ) + + +@log.class_logger +@properties.ColumnProperty.strategy_for(deferred=True, instrument=True) +@properties.ColumnProperty.strategy_for( + deferred=True, instrument=True, raiseload=True +) +@properties.ColumnProperty.strategy_for(do_nothing=True) +class DeferredColumnLoader(LoaderStrategy): + """Provide loading behavior for a deferred :class:`.ColumnProperty`.""" + + __slots__ = "columns", "group", "raiseload" + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + if hasattr(self.parent_property, "composite_class"): + raise NotImplementedError( + "Deferred loading for composite types not implemented yet" + ) + self.raiseload = self.strategy_opts.get("raiseload", False) + self.columns = self.parent_property.columns + self.group = self.parent_property.group + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + # for a DeferredColumnLoader, this method is only used during a + # "row processor only" query; see test_deferred.py -> + # tests with "rowproc_only" in their name. As of the 1.0 series, + # loading._instance_processor doesn't use a "row processing" function + # to populate columns, instead it uses data in the "populators" + # dictionary. Normally, the DeferredColumnLoader.setup_query() + # sets up that data in the "memoized_populators" dictionary + # and "create_row_processor()" here is never invoked. + + if ( + context.refresh_state + and context.query._compile_options._only_load_props + and self.key in context.query._compile_options._only_load_props + ): + self.parent_property._get_strategy( + (("deferred", False), ("instrument", True)) + ).create_row_processor( + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ) + + elif not self.is_class_level: + if self.raiseload: + set_deferred_for_local_state = ( + self.parent_property._raise_column_loader + ) + else: + set_deferred_for_local_state = ( + self.parent_property._deferred_column_loader + ) + populators["new"].append((self.key, set_deferred_for_local_state)) + else: + populators["expire"].append((self.key, False)) + + def init_class_attribute(self, mapper): + self.is_class_level = True + + _register_attribute( + self.parent_property, + mapper, + useobject=False, + compare_function=self.columns[0].type.compare_values, + callable_=self._load_for_state, + load_on_unexpire=False, + ) + + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection, + memoized_populators, + only_load_props=None, + **kw, + ): + if ( + ( + compile_state.compile_options._render_for_subquery + and self.parent_property._renders_in_subqueries + ) + or ( + loadopt + and set(self.columns).intersection( + self.parent._should_undefer_in_wildcard + ) + ) + or ( + loadopt + and self.group + and loadopt.local_opts.get( + "undefer_group_%s" % self.group, False + ) + ) + or (only_load_props and self.key in only_load_props) + ): + self.parent_property._get_strategy( + (("deferred", False), ("instrument", True)) + ).setup_query( + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection, + memoized_populators, + **kw, + ) + elif self.is_class_level: + memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED + elif not self.raiseload: + memoized_populators[self.parent_property] = _DEFER_FOR_STATE + else: + memoized_populators[self.parent_property] = _RAISE_FOR_STATE + + def _load_for_state(self, state, passive): + if not state.key: + return LoaderCallableStatus.ATTR_EMPTY + + if not passive & PassiveFlag.SQL_OK: + return LoaderCallableStatus.PASSIVE_NO_RESULT + + localparent = state.manager.mapper + + if self.group: + toload = [ + p.key + for p in localparent.iterate_properties + if isinstance(p, StrategizedProperty) + and isinstance(p.strategy, DeferredColumnLoader) + and p.group == self.group + ] + else: + toload = [self.key] + + # narrow the keys down to just those which have no history + group = [k for k in toload if k in state.unmodified] + + session = _state_session(state) + if session is None: + raise orm_exc.DetachedInstanceError( + "Parent instance %s is not bound to a Session; " + "deferred load operation of attribute '%s' cannot proceed" + % (orm_util.state_str(state), self.key) + ) + + if self.raiseload: + self._invoke_raise_load(state, passive, "raise") + + loading.load_scalar_attributes( + state.mapper, state, set(group), PASSIVE_OFF + ) + + return LoaderCallableStatus.ATTR_WAS_SET + + def _invoke_raise_load(self, state, passive, lazy): + raise sa_exc.InvalidRequestError( + "'%s' is not available due to raiseload=True" % (self,) + ) + + +class LoadDeferredColumns: + """serializable loader object used by DeferredColumnLoader""" + + def __init__(self, key: str, raiseload: bool = False): + self.key = key + self.raiseload = raiseload + + def __call__(self, state, passive=attributes.PASSIVE_OFF): + key = self.key + + localparent = state.manager.mapper + prop = localparent._props[key] + if self.raiseload: + strategy_key = ( + ("deferred", True), + ("instrument", True), + ("raiseload", True), + ) + else: + strategy_key = (("deferred", True), ("instrument", True)) + strategy = prop._get_strategy(strategy_key) + return strategy._load_for_state(state, passive) + + +class AbstractRelationshipLoader(LoaderStrategy): + """LoaderStratgies which deal with related objects.""" + + __slots__ = "mapper", "target", "uselist", "entity" + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.mapper = self.parent_property.mapper + self.entity = self.parent_property.entity + self.target = self.parent_property.target + self.uselist = self.parent_property.uselist + + def _immediateload_create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + return self.parent_property._get_strategy( + (("lazy", "immediate"),) + ).create_row_processor( + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(do_nothing=True) +class DoNothingLoader(LoaderStrategy): + """Relationship loader that makes no change to the object's state. + + Compared to NoLoader, this loader does not initialize the + collection/attribute to empty/none; the usual default LazyLoader will + take effect. + + """ + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy="noload") +@relationships.RelationshipProperty.strategy_for(lazy=None) +class NoLoader(AbstractRelationshipLoader): + """Provide loading behavior for a :class:`.Relationship` + with "lazy=None". + + """ + + __slots__ = () + + def init_class_attribute(self, mapper): + self.is_class_level = True + + _register_attribute( + self.parent_property, + mapper, + useobject=True, + typecallable=self.parent_property.collection_class, + ) + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + def invoke_no_load(state, dict_, row): + if self.uselist: + attributes.init_state_collection(state, dict_, self.key) + else: + dict_[self.key] = None + + populators["new"].append((self.key, invoke_no_load)) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy=True) +@relationships.RelationshipProperty.strategy_for(lazy="select") +@relationships.RelationshipProperty.strategy_for(lazy="raise") +@relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql") +@relationships.RelationshipProperty.strategy_for(lazy="baked_select") +class LazyLoader( + AbstractRelationshipLoader, util.MemoizedSlots, log.Identified +): + """Provide loading behavior for a :class:`.Relationship` + with "lazy=True", that is loads when first accessed. + + """ + + __slots__ = ( + "_lazywhere", + "_rev_lazywhere", + "_lazyload_reverse_option", + "_order_by", + "use_get", + "is_aliased_class", + "_bind_to_col", + "_equated_columns", + "_rev_bind_to_col", + "_rev_equated_columns", + "_simple_lazy_clause", + "_raise_always", + "_raise_on_sql", + ) + + _lazywhere: ColumnElement[bool] + _bind_to_col: Dict[str, ColumnElement[Any]] + _rev_lazywhere: ColumnElement[bool] + _rev_bind_to_col: Dict[str, ColumnElement[Any]] + + parent_property: RelationshipProperty[Any] + + def __init__( + self, parent: RelationshipProperty[Any], strategy_key: Tuple[Any, ...] + ): + super().__init__(parent, strategy_key) + self._raise_always = self.strategy_opts["lazy"] == "raise" + self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql" + + self.is_aliased_class = inspect(self.entity).is_aliased_class + + join_condition = self.parent_property._join_condition + ( + self._lazywhere, + self._bind_to_col, + self._equated_columns, + ) = join_condition.create_lazy_clause() + + ( + self._rev_lazywhere, + self._rev_bind_to_col, + self._rev_equated_columns, + ) = join_condition.create_lazy_clause(reverse_direction=True) + + if self.parent_property.order_by: + self._order_by = [ + sql_util._deep_annotate(elem, {"_orm_adapt": True}) + for elem in util.to_list(self.parent_property.order_by) + ] + else: + self._order_by = None + + self.logger.info("%s lazy loading clause %s", self, self._lazywhere) + + # determine if our "lazywhere" clause is the same as the mapper's + # get() clause. then we can just use mapper.get() + # + # TODO: the "not self.uselist" can be taken out entirely; a m2o + # load that populates for a list (very unusual, but is possible with + # the API) can still set for "None" and the attribute system will + # populate as an empty list. + self.use_get = ( + not self.is_aliased_class + and not self.uselist + and self.entity._get_clause[0].compare( + self._lazywhere, + use_proxies=True, + compare_keys=False, + equivalents=self.mapper._equivalent_columns, + ) + ) + + if self.use_get: + for col in list(self._equated_columns): + if col in self.mapper._equivalent_columns: + for c in self.mapper._equivalent_columns[col]: + self._equated_columns[c] = self._equated_columns[col] + + self.logger.info( + "%s will use Session.get() to optimize instance loads", self + ) + + def init_class_attribute(self, mapper): + self.is_class_level = True + + _legacy_inactive_history_style = ( + self.parent_property._legacy_inactive_history_style + ) + + if self.parent_property.active_history: + active_history = True + _deferred_history = False + + elif ( + self.parent_property.direction is not interfaces.MANYTOONE + or not self.use_get + ): + if _legacy_inactive_history_style: + active_history = True + _deferred_history = False + else: + active_history = False + _deferred_history = True + else: + active_history = _deferred_history = False + + _register_attribute( + self.parent_property, + mapper, + useobject=True, + callable_=self._load_for_state, + typecallable=self.parent_property.collection_class, + active_history=active_history, + _deferred_history=_deferred_history, + ) + + def _memoized_attr__simple_lazy_clause(self): + lazywhere = sql_util._deep_annotate( + self._lazywhere, {"_orm_adapt": True} + ) + + criterion, bind_to_col = (lazywhere, self._bind_to_col) + + params = [] + + def visit_bindparam(bindparam): + bindparam.unique = False + + visitors.traverse(criterion, {}, {"bindparam": visit_bindparam}) + + def visit_bindparam(bindparam): + if bindparam._identifying_key in bind_to_col: + params.append( + ( + bindparam.key, + bind_to_col[bindparam._identifying_key], + None, + ) + ) + elif bindparam.callable is None: + params.append((bindparam.key, None, bindparam.value)) + + criterion = visitors.cloned_traverse( + criterion, {}, {"bindparam": visit_bindparam} + ) + + return criterion, params + + def _generate_lazy_clause(self, state, passive): + criterion, param_keys = self._simple_lazy_clause + + if state is None: + return sql_util.adapt_criterion_to_null( + criterion, [key for key, ident, value in param_keys] + ) + + mapper = self.parent_property.parent + + o = state.obj() # strong ref + dict_ = attributes.instance_dict(o) + + if passive & PassiveFlag.INIT_OK: + passive ^= PassiveFlag.INIT_OK + + params = {} + for key, ident, value in param_keys: + if ident is not None: + if passive and passive & PassiveFlag.LOAD_AGAINST_COMMITTED: + value = mapper._get_committed_state_attr_by_column( + state, dict_, ident, passive + ) + else: + value = mapper._get_state_attr_by_column( + state, dict_, ident, passive + ) + + params[key] = value + + return criterion, params + + def _invoke_raise_load(self, state, passive, lazy): + raise sa_exc.InvalidRequestError( + "'%s' is not available due to lazy='%s'" % (self, lazy) + ) + + def _load_for_state( + self, + state, + passive, + loadopt=None, + extra_criteria=(), + extra_options=(), + alternate_effective_path=None, + execution_options=util.EMPTY_DICT, + ): + if not state.key and ( + ( + not self.parent_property.load_on_pending + and not state._load_pending + ) + or not state.session_id + ): + return LoaderCallableStatus.ATTR_EMPTY + + pending = not state.key + primary_key_identity = None + + use_get = self.use_get and (not loadopt or not loadopt._extra_criteria) + + if (not passive & PassiveFlag.SQL_OK and not use_get) or ( + not passive & attributes.NON_PERSISTENT_OK and pending + ): + return LoaderCallableStatus.PASSIVE_NO_RESULT + + if ( + # we were given lazy="raise" + self._raise_always + # the no_raise history-related flag was not passed + and not passive & PassiveFlag.NO_RAISE + and ( + # if we are use_get and related_object_ok is disabled, + # which means we are at most looking in the identity map + # for history purposes or otherwise returning + # PASSIVE_NO_RESULT, don't raise. This is also a + # history-related flag + not use_get + or passive & PassiveFlag.RELATED_OBJECT_OK + ) + ): + self._invoke_raise_load(state, passive, "raise") + + session = _state_session(state) + if not session: + if passive & PassiveFlag.NO_RAISE: + return LoaderCallableStatus.PASSIVE_NO_RESULT + + raise orm_exc.DetachedInstanceError( + "Parent instance %s is not bound to a Session; " + "lazy load operation of attribute '%s' cannot proceed" + % (orm_util.state_str(state), self.key) + ) + + # if we have a simple primary key load, check the + # identity map without generating a Query at all + if use_get: + primary_key_identity = self._get_ident_for_use_get( + session, state, passive + ) + if LoaderCallableStatus.PASSIVE_NO_RESULT in primary_key_identity: + return LoaderCallableStatus.PASSIVE_NO_RESULT + elif LoaderCallableStatus.NEVER_SET in primary_key_identity: + return LoaderCallableStatus.NEVER_SET + + if _none_set.issuperset(primary_key_identity): + return None + + if ( + self.key in state.dict + and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD + ): + return LoaderCallableStatus.ATTR_WAS_SET + + # look for this identity in the identity map. Delegate to the + # Query class in use, as it may have special rules for how it + # does this, including how it decides what the correct + # identity_token would be for this identity. + + instance = session._identity_lookup( + self.entity, + primary_key_identity, + passive=passive, + lazy_loaded_from=state, + ) + + if instance is not None: + if instance is LoaderCallableStatus.PASSIVE_CLASS_MISMATCH: + return None + else: + return instance + elif ( + not passive & PassiveFlag.SQL_OK + or not passive & PassiveFlag.RELATED_OBJECT_OK + ): + return LoaderCallableStatus.PASSIVE_NO_RESULT + + return self._emit_lazyload( + session, + state, + primary_key_identity, + passive, + loadopt, + extra_criteria, + extra_options, + alternate_effective_path, + execution_options, + ) + + def _get_ident_for_use_get(self, session, state, passive): + instance_mapper = state.manager.mapper + + if passive & PassiveFlag.LOAD_AGAINST_COMMITTED: + get_attr = instance_mapper._get_committed_state_attr_by_column + else: + get_attr = instance_mapper._get_state_attr_by_column + + dict_ = state.dict + + return [ + get_attr(state, dict_, self._equated_columns[pk], passive=passive) + for pk in self.mapper.primary_key + ] + + @util.preload_module("sqlalchemy.orm.strategy_options") + def _emit_lazyload( + self, + session, + state, + primary_key_identity, + passive, + loadopt, + extra_criteria, + extra_options, + alternate_effective_path, + execution_options, + ): + strategy_options = util.preloaded.orm_strategy_options + + clauseelement = self.entity.__clause_element__() + stmt = Select._create_raw_select( + _raw_columns=[clauseelement], + _propagate_attrs=clauseelement._propagate_attrs, + _label_style=LABEL_STYLE_TABLENAME_PLUS_COL, + _compile_options=ORMCompileState.default_compile_options, + ) + load_options = QueryContext.default_load_options + + load_options += { + "_invoke_all_eagers": False, + "_lazy_loaded_from": state, + } + + if self.parent_property.secondary is not None: + stmt = stmt.select_from( + self.mapper, self.parent_property.secondary + ) + + pending = not state.key + + # don't autoflush on pending + if pending or passive & attributes.NO_AUTOFLUSH: + stmt._execution_options = util.immutabledict({"autoflush": False}) + + use_get = self.use_get + + if state.load_options or (loadopt and loadopt._extra_criteria): + if alternate_effective_path is None: + effective_path = state.load_path[self.parent_property] + else: + effective_path = alternate_effective_path[self.parent_property] + + opts = state.load_options + + if loadopt and loadopt._extra_criteria: + use_get = False + opts += ( + orm_util.LoaderCriteriaOption(self.entity, extra_criteria), + ) + + stmt._with_options = opts + elif alternate_effective_path is None: + # this path is used if there are not already any options + # in the query, but an event may want to add them + effective_path = state.mapper._path_registry[self.parent_property] + else: + # added by immediateloader + effective_path = alternate_effective_path[self.parent_property] + + if extra_options: + stmt._with_options += extra_options + + stmt._compile_options += {"_current_path": effective_path} + + if use_get: + if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE: + self._invoke_raise_load(state, passive, "raise_on_sql") + + return loading.load_on_pk_identity( + session, + stmt, + primary_key_identity, + load_options=load_options, + execution_options=execution_options, + ) + + if self._order_by: + stmt._order_by_clauses = self._order_by + + def _lazyload_reverse(compile_context): + for rev in self.parent_property._reverse_property: + # reverse props that are MANYTOONE are loading *this* + # object from get(), so don't need to eager out to those. + if ( + rev.direction is interfaces.MANYTOONE + and rev._use_get + and not isinstance(rev.strategy, LazyLoader) + ): + strategy_options.Load._construct_for_existing_path( + compile_context.compile_options._current_path[ + rev.parent + ] + ).lazyload(rev).process_compile_state(compile_context) + + stmt._with_context_options += ( + (_lazyload_reverse, self.parent_property), + ) + + lazy_clause, params = self._generate_lazy_clause(state, passive) + + if execution_options: + execution_options = util.EMPTY_DICT.merge_with( + execution_options, + { + "_sa_orm_load_options": load_options, + }, + ) + else: + execution_options = { + "_sa_orm_load_options": load_options, + } + + if ( + self.key in state.dict + and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD + ): + return LoaderCallableStatus.ATTR_WAS_SET + + if pending: + if util.has_intersection(orm_util._none_set, params.values()): + return None + + elif util.has_intersection(orm_util._never_set, params.values()): + return None + + if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE: + self._invoke_raise_load(state, passive, "raise_on_sql") + + stmt._where_criteria = (lazy_clause,) + + result = session.execute( + stmt, params, execution_options=execution_options + ) + + result = result.unique().scalars().all() + + if self.uselist: + return result + else: + l = len(result) + if l: + if l > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for lazily-loaded attribute '%s' " + % self.parent_property + ) + + return result[0] + else: + return None + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + key = self.key + + if ( + context.load_options._is_user_refresh + and context.query._compile_options._only_load_props + and self.key in context.query._compile_options._only_load_props + ): + return self._immediateload_create_row_processor( + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ) + + if not self.is_class_level or (loadopt and loadopt._extra_criteria): + # we are not the primary manager for this attribute + # on this class - set up a + # per-instance lazyloader, which will override the + # class-level behavior. + # this currently only happens when using a + # "lazyload" option on a "no load" + # attribute - "eager" attributes always have a + # class-level lazyloader installed. + set_lazy_callable = ( + InstanceState._instance_level_callable_processor + )( + mapper.class_manager, + LoadLazyAttribute( + key, + self, + loadopt, + ( + loadopt._generate_extra_criteria(context) + if loadopt._extra_criteria + else None + ), + ), + key, + ) + + populators["new"].append((self.key, set_lazy_callable)) + elif context.populate_existing or mapper.always_refresh: + + def reset_for_lazy_callable(state, dict_, row): + # we are the primary manager for this attribute on + # this class - reset its + # per-instance attribute state, so that the class-level + # lazy loader is + # executed when next referenced on this instance. + # this is needed in + # populate_existing() types of scenarios to reset + # any existing state. + state._reset(dict_, key) + + populators["new"].append((self.key, reset_for_lazy_callable)) + + +class LoadLazyAttribute: + """semi-serializable loader object used by LazyLoader + + Historically, this object would be carried along with instances that + needed to run lazyloaders, so it had to be serializable to support + cached instances. + + this is no longer a general requirement, and the case where this object + is used is exactly the case where we can't really serialize easily, + which is when extra criteria in the loader option is present. + + We can't reliably serialize that as it refers to mapped entities and + AliasedClass objects that are local to the current process, which would + need to be matched up on deserialize e.g. the sqlalchemy.ext.serializer + approach. + + """ + + def __init__(self, key, initiating_strategy, loadopt, extra_criteria): + self.key = key + self.strategy_key = initiating_strategy.strategy_key + self.loadopt = loadopt + self.extra_criteria = extra_criteria + + def __getstate__(self): + if self.extra_criteria is not None: + util.warn( + "Can't reliably serialize a lazyload() option that " + "contains additional criteria; please use eager loading " + "for this case" + ) + return { + "key": self.key, + "strategy_key": self.strategy_key, + "loadopt": self.loadopt, + "extra_criteria": (), + } + + def __call__(self, state, passive=attributes.PASSIVE_OFF): + key = self.key + instance_mapper = state.manager.mapper + prop = instance_mapper._props[key] + strategy = prop._strategies[self.strategy_key] + + return strategy._load_for_state( + state, + passive, + loadopt=self.loadopt, + extra_criteria=self.extra_criteria, + ) + + +class PostLoader(AbstractRelationshipLoader): + """A relationship loader that emits a second SELECT statement.""" + + __slots__ = () + + def _setup_for_recursion(self, context, path, loadopt, join_depth=None): + effective_path = ( + context.compile_state.current_path or orm_util.PathRegistry.root + ) + path + + top_level_context = context._get_top_level_context() + execution_options = util.immutabledict( + {"sa_top_level_orm_context": top_level_context} + ) + + if loadopt: + recursion_depth = loadopt.local_opts.get("recursion_depth", None) + unlimited_recursion = recursion_depth == -1 + else: + recursion_depth = None + unlimited_recursion = False + + if recursion_depth is not None: + if not self.parent_property._is_self_referential: + raise sa_exc.InvalidRequestError( + f"recursion_depth option on relationship " + f"{self.parent_property} not valid for " + "non-self-referential relationship" + ) + recursion_depth = context.execution_options.get( + f"_recursion_depth_{id(self)}", recursion_depth + ) + + if not unlimited_recursion and recursion_depth < 0: + return ( + effective_path, + False, + execution_options, + recursion_depth, + ) + + if not unlimited_recursion: + execution_options = execution_options.union( + { + f"_recursion_depth_{id(self)}": recursion_depth - 1, + } + ) + + if loading.PostLoad.path_exists( + context, effective_path, self.parent_property + ): + return effective_path, False, execution_options, recursion_depth + + path_w_prop = path[self.parent_property] + effective_path_w_prop = effective_path[self.parent_property] + + if not path_w_prop.contains(context.attributes, "loader"): + if join_depth: + if effective_path_w_prop.length / 2 > join_depth: + return ( + effective_path, + False, + execution_options, + recursion_depth, + ) + elif effective_path_w_prop.contains_mapper(self.mapper): + return ( + effective_path, + False, + execution_options, + recursion_depth, + ) + + return effective_path, True, execution_options, recursion_depth + + +@relationships.RelationshipProperty.strategy_for(lazy="immediate") +class ImmediateLoader(PostLoader): + __slots__ = ("join_depth",) + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.join_depth = self.parent_property.join_depth + + def init_class_attribute(self, mapper): + self.parent_property._get_strategy( + (("lazy", "select"),) + ).init_class_attribute(mapper) + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + if not context.compile_state.compile_options._enable_eagerloads: + return + + ( + effective_path, + run_loader, + execution_options, + recursion_depth, + ) = self._setup_for_recursion(context, path, loadopt, self.join_depth) + + if not run_loader: + # this will not emit SQL and will only emit for a many-to-one + # "use get" load. the "_RELATED" part means it may return + # instance even if its expired, since this is a mutually-recursive + # load operation. + flags = attributes.PASSIVE_NO_FETCH_RELATED | PassiveFlag.NO_RAISE + else: + flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE + + loading.PostLoad.callable_for_path( + context, + effective_path, + self.parent, + self.parent_property, + self._load_for_path, + loadopt, + flags, + recursion_depth, + execution_options, + ) + + def _load_for_path( + self, + context, + path, + states, + load_only, + loadopt, + flags, + recursion_depth, + execution_options, + ): + if recursion_depth: + new_opt = Load(loadopt.path.entity) + new_opt.context = ( + loadopt, + loadopt._recurse(), + ) + alternate_effective_path = path._truncate_recursive() + extra_options = (new_opt,) + else: + new_opt = None + alternate_effective_path = path + extra_options = () + + key = self.key + lazyloader = self.parent_property._get_strategy((("lazy", "select"),)) + for state, overwrite in states: + dict_ = state.dict + + if overwrite or key not in dict_: + value = lazyloader._load_for_state( + state, + flags, + extra_options=extra_options, + alternate_effective_path=alternate_effective_path, + execution_options=execution_options, + ) + if value not in ( + ATTR_WAS_SET, + LoaderCallableStatus.PASSIVE_NO_RESULT, + ): + state.get_impl(key).set_committed_value( + state, dict_, value + ) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy="subquery") +class SubqueryLoader(PostLoader): + __slots__ = ("join_depth",) + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.join_depth = self.parent_property.join_depth + + def init_class_attribute(self, mapper): + self.parent_property._get_strategy( + (("lazy", "select"),) + ).init_class_attribute(mapper) + + def _get_leftmost( + self, + orig_query_entity_index, + subq_path, + current_compile_state, + is_root, + ): + given_subq_path = subq_path + subq_path = subq_path.path + subq_mapper = orm_util._class_to_mapper(subq_path[0]) + + # determine attributes of the leftmost mapper + if ( + self.parent.isa(subq_mapper) + and self.parent_property is subq_path[1] + ): + leftmost_mapper, leftmost_prop = self.parent, self.parent_property + else: + leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1] + + if is_root: + # the subq_path is also coming from cached state, so when we start + # building up this path, it has to also be converted to be in terms + # of the current state. this is for the specific case of the entity + # is an AliasedClass against a subquery that's not otherwise going + # to adapt + new_subq_path = current_compile_state._entities[ + orig_query_entity_index + ].entity_zero._path_registry[leftmost_prop] + additional = len(subq_path) - len(new_subq_path) + if additional: + new_subq_path += path_registry.PathRegistry.coerce( + subq_path[-additional:] + ) + else: + new_subq_path = given_subq_path + + leftmost_cols = leftmost_prop.local_columns + + leftmost_attr = [ + getattr( + new_subq_path.path[0].entity, + leftmost_mapper._columntoproperty[c].key, + ) + for c in leftmost_cols + ] + + return leftmost_mapper, leftmost_attr, leftmost_prop, new_subq_path + + def _generate_from_original_query( + self, + orig_compile_state, + orig_query, + leftmost_mapper, + leftmost_attr, + leftmost_relationship, + orig_entity, + ): + # reformat the original query + # to look only for significant columns + q = orig_query._clone().correlate(None) + + # LEGACY: make a Query back from the select() !! + # This suits at least two legacy cases: + # 1. applications which expect before_compile() to be called + # below when we run .subquery() on this query (Keystone) + # 2. applications which are doing subqueryload with complex + # from_self() queries, as query.subquery() / .statement + # has to do the full compile context for multiply-nested + # from_self() (Neutron) - see test_subqload_from_self + # for demo. + q2 = query.Query.__new__(query.Query) + q2.__dict__.update(q.__dict__) + q = q2 + + # set the query's "FROM" list explicitly to what the + # FROM list would be in any case, as we will be limiting + # the columns in the SELECT list which may no longer include + # all entities mentioned in things like WHERE, JOIN, etc. + if not q._from_obj: + q._enable_assertions = False + q.select_from.non_generative( + q, + *{ + ent["entity"] + for ent in _column_descriptions( + orig_query, compile_state=orig_compile_state + ) + if ent["entity"] is not None + }, + ) + + # select from the identity columns of the outer (specifically, these + # are the 'local_cols' of the property). This will remove other + # columns from the query that might suggest the right entity which is + # why we do set select_from above. The attributes we have are + # coerced and adapted using the original query's adapter, which is + # needed only for the case of adapting a subclass column to + # that of a polymorphic selectable, e.g. we have + # Engineer.primary_language and the entity is Person. All other + # adaptations, e.g. from_self, select_entity_from(), will occur + # within the new query when it compiles, as the compile_state we are + # using here is only a partial one. If the subqueryload is from a + # with_polymorphic() or other aliased() object, left_attr will already + # be the correct attributes so no adaptation is needed. + target_cols = orig_compile_state._adapt_col_list( + [ + sql.coercions.expect(sql.roles.ColumnsClauseRole, o) + for o in leftmost_attr + ], + orig_compile_state._get_current_adapter(), + ) + q._raw_columns = target_cols + + distinct_target_key = leftmost_relationship.distinct_target_key + + if distinct_target_key is True: + q._distinct = True + elif distinct_target_key is None: + # if target_cols refer to a non-primary key or only + # part of a composite primary key, set the q as distinct + for t in {c.table for c in target_cols}: + if not set(target_cols).issuperset(t.primary_key): + q._distinct = True + break + + # don't need ORDER BY if no limit/offset + if not q._has_row_limiting_clause: + q._order_by_clauses = () + + if q._distinct is True and q._order_by_clauses: + # the logic to automatically add the order by columns to the query + # when distinct is True is deprecated in the query + to_add = sql_util.expand_column_list_from_order_by( + target_cols, q._order_by_clauses + ) + if to_add: + q._set_entities(target_cols + to_add) + + # the original query now becomes a subquery + # which we'll join onto. + # LEGACY: as "q" is a Query, the before_compile() event is invoked + # here. + embed_q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).subquery() + left_alias = orm_util.AliasedClass( + leftmost_mapper, embed_q, use_mapper_path=True + ) + return left_alias + + def _prep_for_joins(self, left_alias, subq_path): + # figure out what's being joined. a.k.a. the fun part + to_join = [] + pairs = list(subq_path.pairs()) + + for i, (mapper, prop) in enumerate(pairs): + if i > 0: + # look at the previous mapper in the chain - + # if it is as or more specific than this prop's + # mapper, use that instead. + # note we have an assumption here that + # the non-first element is always going to be a mapper, + # not an AliasedClass + + prev_mapper = pairs[i - 1][1].mapper + to_append = prev_mapper if prev_mapper.isa(mapper) else mapper + else: + to_append = mapper + + to_join.append((to_append, prop.key)) + + # determine the immediate parent class we are joining from, + # which needs to be aliased. + + if len(to_join) < 2: + # in the case of a one level eager load, this is the + # leftmost "left_alias". + parent_alias = left_alias + else: + info = inspect(to_join[-1][0]) + if info.is_aliased_class: + parent_alias = info.entity + else: + # alias a plain mapper as we may be + # joining multiple times + parent_alias = orm_util.AliasedClass( + info.entity, use_mapper_path=True + ) + + local_cols = self.parent_property.local_columns + + local_attr = [ + getattr(parent_alias, self.parent._columntoproperty[c].key) + for c in local_cols + ] + return to_join, local_attr, parent_alias + + def _apply_joins( + self, q, to_join, left_alias, parent_alias, effective_entity + ): + ltj = len(to_join) + if ltj == 1: + to_join = [ + getattr(left_alias, to_join[0][1]).of_type(effective_entity) + ] + elif ltj == 2: + to_join = [ + getattr(left_alias, to_join[0][1]).of_type(parent_alias), + getattr(parent_alias, to_join[-1][1]).of_type( + effective_entity + ), + ] + elif ltj > 2: + middle = [ + ( + ( + orm_util.AliasedClass(item[0]) + if not inspect(item[0]).is_aliased_class + else item[0].entity + ), + item[1], + ) + for item in to_join[1:-1] + ] + inner = [] + + while middle: + item = middle.pop(0) + attr = getattr(item[0], item[1]) + if middle: + attr = attr.of_type(middle[0][0]) + else: + attr = attr.of_type(parent_alias) + + inner.append(attr) + + to_join = ( + [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)] + + inner + + [ + getattr(parent_alias, to_join[-1][1]).of_type( + effective_entity + ) + ] + ) + + for attr in to_join: + q = q.join(attr) + + return q + + def _setup_options( + self, + context, + q, + subq_path, + rewritten_path, + orig_query, + effective_entity, + loadopt, + ): + # note that because the subqueryload object + # does not re-use the cached query, instead always making + # use of the current invoked query, while we have two queries + # here (orig and context.query), they are both non-cached + # queries and we can transfer the options as is without + # adjusting for new criteria. Some work on #6881 / #6889 + # brought this into question. + new_options = orig_query._with_options + + if loadopt and loadopt._extra_criteria: + new_options += ( + orm_util.LoaderCriteriaOption( + self.entity, + loadopt._generate_extra_criteria(context), + ), + ) + + # propagate loader options etc. to the new query. + # these will fire relative to subq_path. + q = q._with_current_path(rewritten_path) + q = q.options(*new_options) + + return q + + def _setup_outermost_orderby(self, q): + if self.parent_property.order_by: + + def _setup_outermost_orderby(compile_context): + compile_context.eager_order_by += tuple( + util.to_list(self.parent_property.order_by) + ) + + q = q._add_context_option( + _setup_outermost_orderby, self.parent_property + ) + + return q + + class _SubqCollections: + """Given a :class:`_query.Query` used to emit the "subquery load", + provide a load interface that executes the query at the + first moment a value is needed. + + """ + + __slots__ = ( + "session", + "execution_options", + "load_options", + "params", + "subq", + "_data", + ) + + def __init__(self, context, subq): + # avoid creating a cycle by storing context + # even though that's preferable + self.session = context.session + self.execution_options = context.execution_options + self.load_options = context.load_options + self.params = context.params or {} + self.subq = subq + self._data = None + + def get(self, key, default): + if self._data is None: + self._load() + return self._data.get(key, default) + + def _load(self): + self._data = collections.defaultdict(list) + + q = self.subq + assert q.session is None + + q = q.with_session(self.session) + + if self.load_options._populate_existing: + q = q.populate_existing() + # to work with baked query, the parameters may have been + # updated since this query was created, so take these into account + + rows = list(q.params(self.params)) + for k, v in itertools.groupby(rows, lambda x: x[1:]): + self._data[k].extend(vv[0] for vv in v) + + def loader(self, state, dict_, row): + if self._data is None: + self._load() + + def _setup_query_from_rowproc( + self, + context, + query_entity, + path, + entity, + loadopt, + adapter, + ): + compile_state = context.compile_state + if ( + not compile_state.compile_options._enable_eagerloads + or compile_state.compile_options._for_refresh_state + ): + return + + orig_query_entity_index = compile_state._entities.index(query_entity) + context.loaders_require_buffering = True + + path = path[self.parent_property] + + # build up a path indicating the path from the leftmost + # entity to the thing we're subquery loading. + with_poly_entity = path.get( + compile_state.attributes, "path_with_polymorphic", None + ) + if with_poly_entity is not None: + effective_entity = with_poly_entity + else: + effective_entity = self.entity + + subq_path, rewritten_path = context.query._execution_options.get( + ("subquery_paths", None), + (orm_util.PathRegistry.root, orm_util.PathRegistry.root), + ) + is_root = subq_path is orm_util.PathRegistry.root + subq_path = subq_path + path + rewritten_path = rewritten_path + path + + # use the current query being invoked, not the compile state + # one. this is so that we get the current parameters. however, + # it means we can't use the existing compile state, we have to make + # a new one. other approaches include possibly using the + # compiled query but swapping the params, seems only marginally + # less time spent but more complicated + orig_query = context.query._execution_options.get( + ("orig_query", SubqueryLoader), context.query + ) + + # make a new compile_state for the query that's probably cached, but + # we're sort of undoing a bit of that caching :( + compile_state_cls = ORMCompileState._get_plugin_class_for_plugin( + orig_query, "orm" + ) + + if orig_query._is_lambda_element: + if context.load_options._lazy_loaded_from is None: + util.warn( + 'subqueryloader for "%s" must invoke lambda callable ' + "at %r in " + "order to produce a new query, decreasing the efficiency " + "of caching for this statement. Consider using " + "selectinload() for more effective full-lambda caching" + % (self, orig_query) + ) + orig_query = orig_query._resolved + + # this is the more "quick" version, however it's not clear how + # much of this we need. in particular I can't get a test to + # fail if the "set_base_alias" is missing and not sure why that is. + orig_compile_state = compile_state_cls._create_entities_collection( + orig_query, legacy=False + ) + + ( + leftmost_mapper, + leftmost_attr, + leftmost_relationship, + rewritten_path, + ) = self._get_leftmost( + orig_query_entity_index, + rewritten_path, + orig_compile_state, + is_root, + ) + + # generate a new Query from the original, then + # produce a subquery from it. + left_alias = self._generate_from_original_query( + orig_compile_state, + orig_query, + leftmost_mapper, + leftmost_attr, + leftmost_relationship, + entity, + ) + + # generate another Query that will join the + # left alias to the target relationships. + # basically doing a longhand + # "from_self()". (from_self() itself not quite industrial + # strength enough for all contingencies...but very close) + + q = query.Query(effective_entity) + + q._execution_options = context.query._execution_options.merge_with( + context.execution_options, + { + ("orig_query", SubqueryLoader): orig_query, + ("subquery_paths", None): (subq_path, rewritten_path), + }, + ) + + q = q._set_enable_single_crit(False) + to_join, local_attr, parent_alias = self._prep_for_joins( + left_alias, subq_path + ) + + q = q.add_columns(*local_attr) + q = self._apply_joins( + q, to_join, left_alias, parent_alias, effective_entity + ) + + q = self._setup_options( + context, + q, + subq_path, + rewritten_path, + orig_query, + effective_entity, + loadopt, + ) + q = self._setup_outermost_orderby(q) + + return q + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + if ( + loadopt + and context.compile_state.statement is not None + and context.compile_state.statement.is_dml + ): + util.warn_deprecated( + "The subqueryload loader option is not compatible with DML " + "statements such as INSERT, UPDATE. Only SELECT may be used." + "This warning will become an exception in a future release.", + "2.0", + ) + + if context.refresh_state: + return self._immediateload_create_row_processor( + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ) + + _, run_loader, _, _ = self._setup_for_recursion( + context, path, loadopt, self.join_depth + ) + if not run_loader: + return + + if not isinstance(context.compile_state, ORMSelectCompileState): + # issue 7505 - subqueryload() in 1.3 and previous would silently + # degrade for from_statement() without warning. this behavior + # is restored here + return + + if not self.parent.class_manager[self.key].impl.supports_population: + raise sa_exc.InvalidRequestError( + "'%s' does not support object " + "population - eager loading cannot be applied." % self + ) + + # a little dance here as the "path" is still something that only + # semi-tracks the exact series of things we are loading, still not + # telling us about with_polymorphic() and stuff like that when it's at + # the root.. the initial MapperEntity is more accurate for this case. + if len(path) == 1: + if not orm_util._entity_isa(query_entity.entity_zero, self.parent): + return + elif not orm_util._entity_isa(path[-1], self.parent): + return + + subq = self._setup_query_from_rowproc( + context, + query_entity, + path, + path[-1], + loadopt, + adapter, + ) + + if subq is None: + return + + assert subq.session is None + + path = path[self.parent_property] + + local_cols = self.parent_property.local_columns + + # cache the loaded collections in the context + # so that inheriting mappers don't re-load when they + # call upon create_row_processor again + collections = path.get(context.attributes, "collections") + if collections is None: + collections = self._SubqCollections(context, subq) + path.set(context.attributes, "collections", collections) + + if adapter: + local_cols = [adapter.columns[c] for c in local_cols] + + if self.uselist: + self._create_collection_loader( + context, result, collections, local_cols, populators + ) + else: + self._create_scalar_loader( + context, result, collections, local_cols, populators + ) + + def _create_collection_loader( + self, context, result, collections, local_cols, populators + ): + tuple_getter = result._tuple_getter(local_cols) + + def load_collection_from_subq(state, dict_, row): + collection = collections.get(tuple_getter(row), ()) + state.get_impl(self.key).set_committed_value( + state, dict_, collection + ) + + def load_collection_from_subq_existing_row(state, dict_, row): + if self.key not in dict_: + load_collection_from_subq(state, dict_, row) + + populators["new"].append((self.key, load_collection_from_subq)) + populators["existing"].append( + (self.key, load_collection_from_subq_existing_row) + ) + + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) + + def _create_scalar_loader( + self, context, result, collections, local_cols, populators + ): + tuple_getter = result._tuple_getter(local_cols) + + def load_scalar_from_subq(state, dict_, row): + collection = collections.get(tuple_getter(row), (None,)) + if len(collection) > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for eagerly-loaded attribute '%s' " % self + ) + + scalar = collection[0] + state.get_impl(self.key).set_committed_value(state, dict_, scalar) + + def load_scalar_from_subq_existing_row(state, dict_, row): + if self.key not in dict_: + load_scalar_from_subq(state, dict_, row) + + populators["new"].append((self.key, load_scalar_from_subq)) + populators["existing"].append( + (self.key, load_scalar_from_subq_existing_row) + ) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy="joined") +@relationships.RelationshipProperty.strategy_for(lazy=False) +class JoinedLoader(AbstractRelationshipLoader): + """Provide loading behavior for a :class:`.Relationship` + using joined eager loading. + + """ + + __slots__ = "join_depth" + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.join_depth = self.parent_property.join_depth + + def init_class_attribute(self, mapper): + self.parent_property._get_strategy( + (("lazy", "select"),) + ).init_class_attribute(mapper) + + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection=None, + parentmapper=None, + chained_from_outerjoin=False, + **kwargs, + ): + """Add a left outer join to the statement that's being constructed.""" + + if not compile_state.compile_options._enable_eagerloads: + return + elif ( + loadopt + and compile_state.statement is not None + and compile_state.statement.is_dml + ): + util.warn_deprecated( + "The joinedload loader option is not compatible with DML " + "statements such as INSERT, UPDATE. Only SELECT may be used." + "This warning will become an exception in a future release.", + "2.0", + ) + elif self.uselist: + compile_state.multi_row_eager_loaders = True + + path = path[self.parent_property] + + with_polymorphic = None + + user_defined_adapter = ( + self._init_user_defined_eager_proc( + loadopt, compile_state, compile_state.attributes + ) + if loadopt + else False + ) + + if user_defined_adapter is not False: + # setup an adapter but dont create any JOIN, assume it's already + # in the query + ( + clauses, + adapter, + add_to_collection, + ) = self._setup_query_on_user_defined_adapter( + compile_state, + query_entity, + path, + adapter, + user_defined_adapter, + ) + + # don't do "wrap" for multi-row, we want to wrap + # limited/distinct SELECT, + # because we want to put the JOIN on the outside. + + else: + # if not via query option, check for + # a cycle + if not path.contains(compile_state.attributes, "loader"): + if self.join_depth: + if path.length / 2 > self.join_depth: + return + elif path.contains_mapper(self.mapper): + return + + # add the JOIN and create an adapter + ( + clauses, + adapter, + add_to_collection, + chained_from_outerjoin, + ) = self._generate_row_adapter( + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection, + parentmapper, + chained_from_outerjoin, + ) + + # for multi-row, we want to wrap limited/distinct SELECT, + # because we want to put the JOIN on the outside. + compile_state.eager_adding_joins = True + + with_poly_entity = path.get( + compile_state.attributes, "path_with_polymorphic", None + ) + if with_poly_entity is not None: + with_polymorphic = inspect( + with_poly_entity + ).with_polymorphic_mappers + else: + with_polymorphic = None + + path = path[self.entity] + + loading._setup_entity_query( + compile_state, + self.mapper, + query_entity, + path, + clauses, + add_to_collection, + with_polymorphic=with_polymorphic, + parentmapper=self.mapper, + chained_from_outerjoin=chained_from_outerjoin, + ) + + has_nones = util.NONE_SET.intersection(compile_state.secondary_columns) + + if has_nones: + if with_poly_entity is not None: + raise sa_exc.InvalidRequestError( + "Detected unaliased columns when generating joined " + "load. Make sure to use aliased=True or flat=True " + "when using joined loading with with_polymorphic()." + ) + else: + compile_state.secondary_columns = [ + c for c in compile_state.secondary_columns if c is not None + ] + + def _init_user_defined_eager_proc( + self, loadopt, compile_state, target_attributes + ): + # check if the opt applies at all + if "eager_from_alias" not in loadopt.local_opts: + # nope + return False + + path = loadopt.path.parent + + # the option applies. check if the "user_defined_eager_row_processor" + # has been built up. + adapter = path.get( + compile_state.attributes, "user_defined_eager_row_processor", False + ) + if adapter is not False: + # just return it + return adapter + + # otherwise figure it out. + alias = loadopt.local_opts["eager_from_alias"] + root_mapper, prop = path[-2:] + + if alias is not None: + if isinstance(alias, str): + alias = prop.target.alias(alias) + adapter = orm_util.ORMAdapter( + orm_util._TraceAdaptRole.JOINEDLOAD_USER_DEFINED_ALIAS, + prop.mapper, + selectable=alias, + equivalents=prop.mapper._equivalent_columns, + limit_on_entity=False, + ) + else: + if path.contains( + compile_state.attributes, "path_with_polymorphic" + ): + with_poly_entity = path.get( + compile_state.attributes, "path_with_polymorphic" + ) + adapter = orm_util.ORMAdapter( + orm_util._TraceAdaptRole.JOINEDLOAD_PATH_WITH_POLYMORPHIC, + with_poly_entity, + equivalents=prop.mapper._equivalent_columns, + ) + else: + adapter = compile_state._polymorphic_adapters.get( + prop.mapper, None + ) + path.set( + target_attributes, + "user_defined_eager_row_processor", + adapter, + ) + + return adapter + + def _setup_query_on_user_defined_adapter( + self, context, entity, path, adapter, user_defined_adapter + ): + # apply some more wrapping to the "user defined adapter" + # if we are setting up the query for SQL render. + adapter = entity._get_entity_clauses(context) + + if adapter and user_defined_adapter: + user_defined_adapter = user_defined_adapter.wrap(adapter) + path.set( + context.attributes, + "user_defined_eager_row_processor", + user_defined_adapter, + ) + elif adapter: + user_defined_adapter = adapter + path.set( + context.attributes, + "user_defined_eager_row_processor", + user_defined_adapter, + ) + + add_to_collection = context.primary_columns + return user_defined_adapter, adapter, add_to_collection + + def _generate_row_adapter( + self, + compile_state, + entity, + path, + loadopt, + adapter, + column_collection, + parentmapper, + chained_from_outerjoin, + ): + with_poly_entity = path.get( + compile_state.attributes, "path_with_polymorphic", None + ) + if with_poly_entity: + to_adapt = with_poly_entity + else: + insp = inspect(self.entity) + if insp.is_aliased_class: + alt_selectable = insp.selectable + else: + alt_selectable = None + + to_adapt = orm_util.AliasedClass( + self.mapper, + alias=( + alt_selectable._anonymous_fromclause(flat=True) + if alt_selectable is not None + else None + ), + flat=True, + use_mapper_path=True, + ) + + to_adapt_insp = inspect(to_adapt) + + clauses = to_adapt_insp._memo( + ("joinedloader_ormadapter", self), + orm_util.ORMAdapter, + orm_util._TraceAdaptRole.JOINEDLOAD_MEMOIZED_ADAPTER, + to_adapt_insp, + equivalents=self.mapper._equivalent_columns, + adapt_required=True, + allow_label_resolve=False, + anonymize_labels=True, + ) + + assert clauses.is_aliased_class + + innerjoin = ( + loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin) + if loadopt is not None + else self.parent_property.innerjoin + ) + + if not innerjoin: + # if this is an outer join, all non-nested eager joins from + # this path must also be outer joins + chained_from_outerjoin = True + + compile_state.create_eager_joins.append( + ( + self._create_eager_join, + entity, + path, + adapter, + parentmapper, + clauses, + innerjoin, + chained_from_outerjoin, + loadopt._extra_criteria if loadopt else (), + ) + ) + + add_to_collection = compile_state.secondary_columns + path.set(compile_state.attributes, "eager_row_processor", clauses) + + return clauses, adapter, add_to_collection, chained_from_outerjoin + + def _create_eager_join( + self, + compile_state, + query_entity, + path, + adapter, + parentmapper, + clauses, + innerjoin, + chained_from_outerjoin, + extra_criteria, + ): + if parentmapper is None: + localparent = query_entity.mapper + else: + localparent = parentmapper + + # whether or not the Query will wrap the selectable in a subquery, + # and then attach eager load joins to that (i.e., in the case of + # LIMIT/OFFSET etc.) + should_nest_selectable = ( + compile_state.multi_row_eager_loaders + and compile_state._should_nest_selectable + ) + + query_entity_key = None + + if ( + query_entity not in compile_state.eager_joins + and not should_nest_selectable + and compile_state.from_clauses + ): + indexes = sql_util.find_left_clause_that_matches_given( + compile_state.from_clauses, query_entity.selectable + ) + + if len(indexes) > 1: + # for the eager load case, I can't reproduce this right + # now. For query.join() I can. + raise sa_exc.InvalidRequestError( + "Can't identify which query entity in which to joined " + "eager load from. Please use an exact match when " + "specifying the join path." + ) + + if indexes: + clause = compile_state.from_clauses[indexes[0]] + # join to an existing FROM clause on the query. + # key it to its list index in the eager_joins dict. + # Query._compile_context will adapt as needed and + # append to the FROM clause of the select(). + query_entity_key, default_towrap = indexes[0], clause + + if query_entity_key is None: + query_entity_key, default_towrap = ( + query_entity, + query_entity.selectable, + ) + + towrap = compile_state.eager_joins.setdefault( + query_entity_key, default_towrap + ) + + if adapter: + if getattr(adapter, "is_aliased_class", False): + # joining from an adapted entity. The adapted entity + # might be a "with_polymorphic", so resolve that to our + # specific mapper's entity before looking for our attribute + # name on it. + efm = adapter.aliased_insp._entity_for_mapper( + localparent + if localparent.isa(self.parent) + else self.parent + ) + + # look for our attribute on the adapted entity, else fall back + # to our straight property + onclause = getattr(efm.entity, self.key, self.parent_property) + else: + onclause = getattr( + orm_util.AliasedClass( + self.parent, adapter.selectable, use_mapper_path=True + ), + self.key, + self.parent_property, + ) + + else: + onclause = self.parent_property + + assert clauses.is_aliased_class + + attach_on_outside = ( + not chained_from_outerjoin + or not innerjoin + or innerjoin == "unnested" + or query_entity.entity_zero.represents_outer_join + ) + + extra_join_criteria = extra_criteria + additional_entity_criteria = compile_state.global_attributes.get( + ("additional_entity_criteria", self.mapper), () + ) + if additional_entity_criteria: + extra_join_criteria += tuple( + ae._resolve_where_criteria(self.mapper) + for ae in additional_entity_criteria + if ae.propagate_to_loaders + ) + + if attach_on_outside: + # this is the "classic" eager join case. + eagerjoin = orm_util._ORMJoin( + towrap, + clauses.aliased_insp, + onclause, + isouter=not innerjoin + or query_entity.entity_zero.represents_outer_join + or (chained_from_outerjoin and isinstance(towrap, sql.Join)), + _left_memo=self.parent, + _right_memo=path[self.mapper], + _extra_criteria=extra_join_criteria, + ) + else: + # all other cases are innerjoin=='nested' approach + eagerjoin = self._splice_nested_inner_join( + path, towrap, clauses, onclause, extra_join_criteria + ) + + compile_state.eager_joins[query_entity_key] = eagerjoin + + # send a hint to the Query as to where it may "splice" this join + eagerjoin.stop_on = query_entity.selectable + + if not parentmapper: + # for parentclause that is the non-eager end of the join, + # ensure all the parent cols in the primaryjoin are actually + # in the + # columns clause (i.e. are not deferred), so that aliasing applied + # by the Query propagates those columns outward. + # This has the effect + # of "undefering" those columns. + for col in sql_util._find_columns( + self.parent_property.primaryjoin + ): + if localparent.persist_selectable.c.contains_column(col): + if adapter: + col = adapter.columns[col] + compile_state._append_dedupe_col_collection( + col, compile_state.primary_columns + ) + + if self.parent_property.order_by: + compile_state.eager_order_by += tuple( + (eagerjoin._target_adapter.copy_and_process)( + util.to_list(self.parent_property.order_by) + ) + ) + + def _splice_nested_inner_join( + self, + path, + join_obj, + clauses, + onclause, + extra_criteria, + splicing=False, + detected_existing_path=None, + ): + # recursive fn to splice a nested join into an existing one. + # splicing=False means this is the outermost call, and it + # should return a value. splicing= is the recursive + # form, where it can return None to indicate the end of the recursion + + if splicing is False: + # first call is always handed a join object + # from the outside + assert isinstance(join_obj, orm_util._ORMJoin) + elif isinstance(join_obj, sql.selectable.FromGrouping): + return self._splice_nested_inner_join( + path, + join_obj.element, + clauses, + onclause, + extra_criteria, + splicing, + ) + elif not isinstance(join_obj, orm_util._ORMJoin): + if path[-2].isa(splicing): + + if detected_existing_path: + # TODO: refine this into a more efficient method + if not detected_existing_path.contains_mapper(splicing): + return None + elif path_registry.PathRegistry.coerce( + detected_existing_path[len(path) :] + ).contains_mapper(splicing): + return None + + return orm_util._ORMJoin( + join_obj, + clauses.aliased_insp, + onclause, + isouter=False, + _left_memo=splicing, + _right_memo=path[path[-1].mapper], + _extra_criteria=extra_criteria, + ) + else: + return None + + target_join = self._splice_nested_inner_join( + path, + join_obj.right, + clauses, + onclause, + extra_criteria, + # NOTE: this is the one place _right_memo is consumed + splicing=( + join_obj._right_memo[-1].mapper + if join_obj._right_memo is not None + else None + ), + ) + if target_join is None: + right_splice = False + target_join = self._splice_nested_inner_join( + path, + join_obj.left, + clauses, + onclause, + extra_criteria, + join_obj._left_memo, + detected_existing_path=join_obj._right_memo, + ) + + if target_join is None: + # should only return None when recursively called, + # e.g. splicing refers to a from obj + assert ( + splicing is not False + ), "assertion failed attempting to produce joined eager loads" + return None + else: + right_splice = True + + if right_splice: + # for a right splice, attempt to flatten out + # a JOIN b JOIN c JOIN .. to avoid needless + # parenthesis nesting + if not join_obj.isouter and not target_join.isouter: + eagerjoin = join_obj._splice_into_center(target_join) + else: + eagerjoin = orm_util._ORMJoin( + join_obj.left, + target_join, + join_obj.onclause, + isouter=join_obj.isouter, + _left_memo=join_obj._left_memo, + ) + else: + eagerjoin = orm_util._ORMJoin( + target_join, + join_obj.right, + join_obj.onclause, + isouter=join_obj.isouter, + _right_memo=join_obj._right_memo, + ) + + eagerjoin._target_adapter = target_join._target_adapter + return eagerjoin + + def _create_eager_adapter(self, context, result, adapter, path, loadopt): + compile_state = context.compile_state + + user_defined_adapter = ( + self._init_user_defined_eager_proc( + loadopt, compile_state, context.attributes + ) + if loadopt + else False + ) + + if user_defined_adapter is not False: + decorator = user_defined_adapter + # user defined eagerloads are part of the "primary" + # portion of the load. + # the adapters applied to the Query should be honored. + if compile_state.compound_eager_adapter and decorator: + decorator = decorator.wrap( + compile_state.compound_eager_adapter + ) + elif compile_state.compound_eager_adapter: + decorator = compile_state.compound_eager_adapter + else: + decorator = path.get( + compile_state.attributes, "eager_row_processor" + ) + if decorator is None: + return False + + if self.mapper._result_has_identity_key(result, decorator): + return decorator + else: + # no identity key - don't return a row + # processor, will cause a degrade to lazy + return False + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + + if not context.compile_state.compile_options._enable_eagerloads: + return + + if not self.parent.class_manager[self.key].impl.supports_population: + raise sa_exc.InvalidRequestError( + "'%s' does not support object " + "population - eager loading cannot be applied." % self + ) + + if self.uselist: + context.loaders_require_uniquing = True + + our_path = path[self.parent_property] + + eager_adapter = self._create_eager_adapter( + context, result, adapter, our_path, loadopt + ) + + if eager_adapter is not False: + key = self.key + + _instance = loading._instance_processor( + query_entity, + self.mapper, + context, + result, + our_path[self.entity], + eager_adapter, + ) + + if not self.uselist: + self._create_scalar_loader(context, key, _instance, populators) + else: + self._create_collection_loader( + context, key, _instance, populators + ) + else: + self.parent_property._get_strategy( + (("lazy", "select"),) + ).create_row_processor( + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ) + + def _create_collection_loader(self, context, key, _instance, populators): + def load_collection_from_joined_new_row(state, dict_, row): + # note this must unconditionally clear out any existing collection. + # an existing collection would be present only in the case of + # populate_existing(). + collection = attributes.init_state_collection(state, dict_, key) + result_list = util.UniqueAppender( + collection, "append_without_event" + ) + context.attributes[(state, key)] = result_list + inst = _instance(row) + if inst is not None: + result_list.append(inst) + + def load_collection_from_joined_existing_row(state, dict_, row): + if (state, key) in context.attributes: + result_list = context.attributes[(state, key)] + else: + # appender_key can be absent from context.attributes + # with isnew=False when self-referential eager loading + # is used; the same instance may be present in two + # distinct sets of result columns + collection = attributes.init_state_collection( + state, dict_, key + ) + result_list = util.UniqueAppender( + collection, "append_without_event" + ) + context.attributes[(state, key)] = result_list + inst = _instance(row) + if inst is not None: + result_list.append(inst) + + def load_collection_from_joined_exec(state, dict_, row): + _instance(row) + + populators["new"].append( + (self.key, load_collection_from_joined_new_row) + ) + populators["existing"].append( + (self.key, load_collection_from_joined_existing_row) + ) + if context.invoke_all_eagers: + populators["eager"].append( + (self.key, load_collection_from_joined_exec) + ) + + def _create_scalar_loader(self, context, key, _instance, populators): + def load_scalar_from_joined_new_row(state, dict_, row): + # set a scalar object instance directly on the parent + # object, bypassing InstrumentedAttribute event handlers. + dict_[key] = _instance(row) + + def load_scalar_from_joined_existing_row(state, dict_, row): + # call _instance on the row, even though the object has + # been created, so that we further descend into properties + existing = _instance(row) + + # conflicting value already loaded, this shouldn't happen + if key in dict_: + if existing is not dict_[key]: + util.warn( + "Multiple rows returned with " + "uselist=False for eagerly-loaded attribute '%s' " + % self + ) + else: + # this case is when one row has multiple loads of the + # same entity (e.g. via aliasing), one has an attribute + # that the other doesn't. + dict_[key] = existing + + def load_scalar_from_joined_exec(state, dict_, row): + _instance(row) + + populators["new"].append((self.key, load_scalar_from_joined_new_row)) + populators["existing"].append( + (self.key, load_scalar_from_joined_existing_row) + ) + if context.invoke_all_eagers: + populators["eager"].append( + (self.key, load_scalar_from_joined_exec) + ) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy="selectin") +class SelectInLoader(PostLoader, util.MemoizedSlots): + __slots__ = ( + "join_depth", + "omit_join", + "_parent_alias", + "_query_info", + "_fallback_query_info", + ) + + query_info = collections.namedtuple( + "queryinfo", + [ + "load_only_child", + "load_with_join", + "in_expr", + "pk_cols", + "zero_idx", + "child_lookup_cols", + ], + ) + + _chunksize = 500 + + def __init__(self, parent, strategy_key): + super().__init__(parent, strategy_key) + self.join_depth = self.parent_property.join_depth + is_m2o = self.parent_property.direction is interfaces.MANYTOONE + + if self.parent_property.omit_join is not None: + self.omit_join = self.parent_property.omit_join + else: + lazyloader = self.parent_property._get_strategy( + (("lazy", "select"),) + ) + if is_m2o: + self.omit_join = lazyloader.use_get + else: + self.omit_join = self.parent._get_clause[0].compare( + lazyloader._rev_lazywhere, + use_proxies=True, + compare_keys=False, + equivalents=self.parent._equivalent_columns, + ) + + if self.omit_join: + if is_m2o: + self._query_info = self._init_for_omit_join_m2o() + self._fallback_query_info = self._init_for_join() + else: + self._query_info = self._init_for_omit_join() + else: + self._query_info = self._init_for_join() + + def _init_for_omit_join(self): + pk_to_fk = dict( + self.parent_property._join_condition.local_remote_pairs + ) + pk_to_fk.update( + (equiv, pk_to_fk[k]) + for k in list(pk_to_fk) + for equiv in self.parent._equivalent_columns.get(k, ()) + ) + + pk_cols = fk_cols = [ + pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk + ] + if len(fk_cols) > 1: + in_expr = sql.tuple_(*fk_cols) + zero_idx = False + else: + in_expr = fk_cols[0] + zero_idx = True + + return self.query_info(False, False, in_expr, pk_cols, zero_idx, None) + + def _init_for_omit_join_m2o(self): + pk_cols = self.mapper.primary_key + if len(pk_cols) > 1: + in_expr = sql.tuple_(*pk_cols) + zero_idx = False + else: + in_expr = pk_cols[0] + zero_idx = True + + lazyloader = self.parent_property._get_strategy((("lazy", "select"),)) + lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols] + + return self.query_info( + True, False, in_expr, pk_cols, zero_idx, lookup_cols + ) + + def _init_for_join(self): + self._parent_alias = AliasedClass(self.parent.class_) + pa_insp = inspect(self._parent_alias) + pk_cols = [ + pa_insp._adapt_element(col) for col in self.parent.primary_key + ] + if len(pk_cols) > 1: + in_expr = sql.tuple_(*pk_cols) + zero_idx = False + else: + in_expr = pk_cols[0] + zero_idx = True + return self.query_info(False, True, in_expr, pk_cols, zero_idx, None) + + def init_class_attribute(self, mapper): + self.parent_property._get_strategy( + (("lazy", "select"),) + ).init_class_attribute(mapper) + + def create_row_processor( + self, + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ): + if context.refresh_state: + return self._immediateload_create_row_processor( + context, + query_entity, + path, + loadopt, + mapper, + result, + adapter, + populators, + ) + + ( + effective_path, + run_loader, + execution_options, + recursion_depth, + ) = self._setup_for_recursion( + context, path, loadopt, join_depth=self.join_depth + ) + + if not run_loader: + return + + if not context.compile_state.compile_options._enable_eagerloads: + return + + if not self.parent.class_manager[self.key].impl.supports_population: + raise sa_exc.InvalidRequestError( + "'%s' does not support object " + "population - eager loading cannot be applied." % self + ) + + # a little dance here as the "path" is still something that only + # semi-tracks the exact series of things we are loading, still not + # telling us about with_polymorphic() and stuff like that when it's at + # the root.. the initial MapperEntity is more accurate for this case. + if len(path) == 1: + if not orm_util._entity_isa(query_entity.entity_zero, self.parent): + return + elif not orm_util._entity_isa(path[-1], self.parent): + return + + selectin_path = effective_path + + path_w_prop = path[self.parent_property] + + # build up a path indicating the path from the leftmost + # entity to the thing we're subquery loading. + with_poly_entity = path_w_prop.get( + context.attributes, "path_with_polymorphic", None + ) + if with_poly_entity is not None: + effective_entity = inspect(with_poly_entity) + else: + effective_entity = self.entity + + loading.PostLoad.callable_for_path( + context, + selectin_path, + self.parent, + self.parent_property, + self._load_for_path, + effective_entity, + loadopt, + recursion_depth, + execution_options, + ) + + def _load_for_path( + self, + context, + path, + states, + load_only, + effective_entity, + loadopt, + recursion_depth, + execution_options, + ): + if load_only and self.key not in load_only: + return + + query_info = self._query_info + + if query_info.load_only_child: + our_states = collections.defaultdict(list) + none_states = [] + + mapper = self.parent + + for state, overwrite in states: + state_dict = state.dict + related_ident = tuple( + mapper._get_state_attr_by_column( + state, + state_dict, + lk, + passive=attributes.PASSIVE_NO_FETCH, + ) + for lk in query_info.child_lookup_cols + ) + # if the loaded parent objects do not have the foreign key + # to the related item loaded, then degrade into the joined + # version of selectinload + if LoaderCallableStatus.PASSIVE_NO_RESULT in related_ident: + query_info = self._fallback_query_info + break + + # organize states into lists keyed to particular foreign + # key values. + if None not in related_ident: + our_states[related_ident].append( + (state, state_dict, overwrite) + ) + else: + # For FK values that have None, add them to a + # separate collection that will be populated separately + none_states.append((state, state_dict, overwrite)) + + # note the above conditional may have changed query_info + if not query_info.load_only_child: + our_states = [ + (state.key[1], state, state.dict, overwrite) + for state, overwrite in states + ] + + pk_cols = query_info.pk_cols + in_expr = query_info.in_expr + + if not query_info.load_with_join: + # in "omit join" mode, the primary key column and the + # "in" expression are in terms of the related entity. So + # if the related entity is polymorphic or otherwise aliased, + # we need to adapt our "pk_cols" and "in_expr" to that + # entity. in non-"omit join" mode, these are against the + # parent entity and do not need adaption. + if effective_entity.is_aliased_class: + pk_cols = [ + effective_entity._adapt_element(col) for col in pk_cols + ] + in_expr = effective_entity._adapt_element(in_expr) + + bundle_ent = orm_util.Bundle("pk", *pk_cols) + bundle_sql = bundle_ent.__clause_element__() + + entity_sql = effective_entity.__clause_element__() + q = Select._create_raw_select( + _raw_columns=[bundle_sql, entity_sql], + _label_style=LABEL_STYLE_TABLENAME_PLUS_COL, + _compile_options=ORMCompileState.default_compile_options, + _propagate_attrs={ + "compile_state_plugin": "orm", + "plugin_subject": effective_entity, + }, + ) + + if not query_info.load_with_join: + # the Bundle we have in the "omit_join" case is against raw, non + # annotated columns, so to ensure the Query knows its primary + # entity, we add it explicitly. If we made the Bundle against + # annotated columns, we hit a performance issue in this specific + # case, which is detailed in issue #4347. + q = q.select_from(effective_entity) + else: + # in the non-omit_join case, the Bundle is against the annotated/ + # mapped column of the parent entity, but the #4347 issue does not + # occur in this case. + q = q.select_from(self._parent_alias).join( + getattr(self._parent_alias, self.parent_property.key).of_type( + effective_entity + ) + ) + + q = q.filter(in_expr.in_(sql.bindparam("primary_keys"))) + + # a test which exercises what these comments talk about is + # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic + # + # effective_entity above is given to us in terms of the cached + # statement, namely this one: + orig_query = context.compile_state.select_statement + + # the actual statement that was requested is this one: + # context_query = context.user_passed_query + # + # that's not the cached one, however. So while it is of the identical + # structure, if it has entities like AliasedInsp, which we get from + # aliased() or with_polymorphic(), the AliasedInsp will likely be a + # different object identity each time, and will not match up + # hashing-wise to the corresponding AliasedInsp that's in the + # cached query, meaning it won't match on paths and loader lookups + # and loaders like this one will be skipped if it is used in options. + # + # as it turns out, standard loader options like selectinload(), + # lazyload() that have a path need + # to come from the cached query so that the AliasedInsp etc. objects + # that are in the query line up with the object that's in the path + # of the strategy object. however other options like + # with_loader_criteria() that doesn't have a path (has a fixed entity) + # and needs to have access to the latest closure state in order to + # be correct, we need to use the uncached one. + # + # as of #8399 we let the loader option itself figure out what it + # wants to do given cached and uncached version of itself. + + effective_path = path[self.parent_property] + + if orig_query is context.user_passed_query: + new_options = orig_query._with_options + else: + cached_options = orig_query._with_options + uncached_options = context.user_passed_query._with_options + + # propagate compile state options from the original query, + # updating their "extra_criteria" as necessary. + # note this will create a different cache key than + # "orig" options if extra_criteria is present, because the copy + # of extra_criteria will have different boundparam than that of + # the QueryableAttribute in the path + new_options = [ + orig_opt._adapt_cached_option_to_uncached_option( + context, uncached_opt + ) + for orig_opt, uncached_opt in zip( + cached_options, uncached_options + ) + ] + + if loadopt and loadopt._extra_criteria: + new_options += ( + orm_util.LoaderCriteriaOption( + effective_entity, + loadopt._generate_extra_criteria(context), + ), + ) + + if recursion_depth is not None: + effective_path = effective_path._truncate_recursive() + + q = q.options(*new_options) + + q = q._update_compile_options({"_current_path": effective_path}) + if context.populate_existing: + q = q.execution_options(populate_existing=True) + + if self.parent_property.order_by: + if not query_info.load_with_join: + eager_order_by = self.parent_property.order_by + if effective_entity.is_aliased_class: + eager_order_by = [ + effective_entity._adapt_element(elem) + for elem in eager_order_by + ] + q = q.order_by(*eager_order_by) + else: + + def _setup_outermost_orderby(compile_context): + compile_context.eager_order_by += tuple( + util.to_list(self.parent_property.order_by) + ) + + q = q._add_context_option( + _setup_outermost_orderby, self.parent_property + ) + + if query_info.load_only_child: + self._load_via_child( + our_states, + none_states, + query_info, + q, + context, + execution_options, + ) + else: + self._load_via_parent( + our_states, query_info, q, context, execution_options + ) + + def _load_via_child( + self, + our_states, + none_states, + query_info, + q, + context, + execution_options, + ): + uselist = self.uselist + + # this sort is really for the benefit of the unit tests + our_keys = sorted(our_states) + while our_keys: + chunk = our_keys[0 : self._chunksize] + our_keys = our_keys[self._chunksize :] + data = { + k: v + for k, v in context.session.execute( + q, + params={ + "primary_keys": [ + key[0] if query_info.zero_idx else key + for key in chunk + ] + }, + execution_options=execution_options, + ).unique() + } + + for key in chunk: + # for a real foreign key and no concurrent changes to the + # DB while running this method, "key" is always present in + # data. However, for primaryjoins without real foreign keys + # a non-None primaryjoin condition may still refer to no + # related object. + related_obj = data.get(key, None) + for state, dict_, overwrite in our_states[key]: + if not overwrite and self.key in dict_: + continue + + state.get_impl(self.key).set_committed_value( + state, + dict_, + related_obj if not uselist else [related_obj], + ) + # populate none states with empty value / collection + for state, dict_, overwrite in none_states: + if not overwrite and self.key in dict_: + continue + + # note it's OK if this is a uselist=True attribute, the empty + # collection will be populated + state.get_impl(self.key).set_committed_value(state, dict_, None) + + def _load_via_parent( + self, our_states, query_info, q, context, execution_options + ): + uselist = self.uselist + _empty_result = () if uselist else None + + while our_states: + chunk = our_states[0 : self._chunksize] + our_states = our_states[self._chunksize :] + + primary_keys = [ + key[0] if query_info.zero_idx else key + for key, state, state_dict, overwrite in chunk + ] + + data = collections.defaultdict(list) + for k, v in itertools.groupby( + context.session.execute( + q, + params={"primary_keys": primary_keys}, + execution_options=execution_options, + ).unique(), + lambda x: x[0], + ): + data[k].extend(vv[1] for vv in v) + + for key, state, state_dict, overwrite in chunk: + if not overwrite and self.key in state_dict: + continue + + collection = data.get(key, _empty_result) + + if not uselist and collection: + if len(collection) > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for eagerly-loaded " + "attribute '%s' " % self + ) + state.get_impl(self.key).set_committed_value( + state, state_dict, collection[0] + ) + else: + # note that empty tuple set on uselist=False sets the + # value to None + state.get_impl(self.key).set_committed_value( + state, state_dict, collection + ) + + +def single_parent_validator(desc, prop): + def _do_check(state, value, oldvalue, initiator): + if value is not None and initiator.key == prop.key: + hasparent = initiator.hasparent(attributes.instance_state(value)) + if hasparent and oldvalue is not value: + raise sa_exc.InvalidRequestError( + "Instance %s is already associated with an instance " + "of %s via its %s attribute, and is only allowed a " + "single parent." + % (orm_util.instance_str(value), state.class_, prop), + code="bbf1", + ) + return value + + def append(state, value, initiator): + return _do_check(state, value, None, initiator) + + def set_(state, value, oldvalue, initiator): + return _do_check(state, value, oldvalue, initiator) + + event.listen( + desc, "append", append, raw=True, retval=True, active_history=True + ) + event.listen(desc, "set", set_, raw=True, retval=True, active_history=True) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py new file mode 100644 index 00000000..b4bfea14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py @@ -0,0 +1,2569 @@ +# orm/strategy_options.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +""" + +""" + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Iterable +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union + +from . import util as orm_util +from ._typing import insp_is_aliased_class +from ._typing import insp_is_attribute +from ._typing import insp_is_mapper +from ._typing import insp_is_mapper_property +from .attributes import QueryableAttribute +from .base import InspectionAttr +from .interfaces import LoaderOption +from .path_registry import _DEFAULT_TOKEN +from .path_registry import _StrPathToken +from .path_registry import _WILDCARD_TOKEN +from .path_registry import AbstractEntityRegistry +from .path_registry import path_is_property +from .path_registry import PathRegistry +from .path_registry import TokenRegistry +from .util import _orm_full_deannotate +from .util import AliasedInsp +from .. import exc as sa_exc +from .. import inspect +from .. import util +from ..sql import and_ +from ..sql import cache_key +from ..sql import coercions +from ..sql import roles +from ..sql import traversals +from ..sql import visitors +from ..sql.base import _generative +from ..util.typing import Final +from ..util.typing import Literal +from ..util.typing import Self + +_RELATIONSHIP_TOKEN: Final[Literal["relationship"]] = "relationship" +_COLUMN_TOKEN: Final[Literal["column"]] = "column" + +_FN = TypeVar("_FN", bound="Callable[..., Any]") + +if typing.TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _InternalEntityType + from .context import _MapperEntity + from .context import ORMCompileState + from .context import QueryContext + from .interfaces import _StrategyKey + from .interfaces import MapperProperty + from .interfaces import ORMOption + from .mapper import Mapper + from .path_registry import _PathRepresentation + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _FromClauseArgument + from ..sql.cache_key import _CacheKeyTraversalType + from ..sql.cache_key import CacheKey + + +_AttrType = Union[Literal["*"], "QueryableAttribute[Any]"] + +_WildcardKeyType = Literal["relationship", "column"] +_StrategySpec = Dict[str, Any] +_OptsType = Dict[str, Any] +_AttrGroupType = Tuple[_AttrType, ...] + + +class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption): + __slots__ = ("propagate_to_loaders",) + + _is_strategy_option = True + propagate_to_loaders: bool + + def contains_eager( + self, + attr: _AttrType, + alias: Optional[_FromClauseArgument] = None, + _is_chain: bool = False, + _propagate_to_loaders: bool = False, + ) -> Self: + r"""Indicate that the given attribute should be eagerly loaded from + columns stated manually in the query. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + The option is used in conjunction with an explicit join that loads + the desired rows, i.e.:: + + sess.query(Order).join(Order.user).options( + contains_eager(Order.user) + ) + + The above query would join from the ``Order`` entity to its related + ``User`` entity, and the returned ``Order`` objects would have the + ``Order.user`` attribute pre-populated. + + It may also be used for customizing the entries in an eagerly loaded + collection; queries will normally want to use the + :ref:`orm_queryguide_populate_existing` execution option assuming the + primary collection of parent objects may already have been loaded:: + + sess.query(User).join(User.addresses).filter( + Address.email_address.like("%@aol.com") + ).options(contains_eager(User.addresses)).populate_existing() + + See the section :ref:`contains_eager` for complete usage details. + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`contains_eager` + + """ + if alias is not None: + if not isinstance(alias, str): + coerced_alias = coercions.expect(roles.FromClauseRole, alias) + else: + util.warn_deprecated( + "Passing a string name for the 'alias' argument to " + "'contains_eager()` is deprecated, and will not work in a " + "future release. Please use a sqlalchemy.alias() or " + "sqlalchemy.orm.aliased() construct.", + version="1.4", + ) + coerced_alias = alias + + elif getattr(attr, "_of_type", None): + assert isinstance(attr, QueryableAttribute) + ot: Optional[_InternalEntityType[Any]] = inspect(attr._of_type) + assert ot is not None + coerced_alias = ot.selectable + else: + coerced_alias = None + + cloned = self._set_relationship_strategy( + attr, + {"lazy": "joined"}, + propagate_to_loaders=_propagate_to_loaders, + opts={"eager_from_alias": coerced_alias}, + _reconcile_to_other=True if _is_chain else None, + ) + return cloned + + def load_only(self, *attrs: _AttrType, raiseload: bool = False) -> Self: + r"""Indicate that for a particular entity, only the given list + of column-based attribute names should be loaded; all others will be + deferred. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + Example - given a class ``User``, load only the ``name`` and + ``fullname`` attributes:: + + session.query(User).options(load_only(User.name, User.fullname)) + + Example - given a relationship ``User.addresses -> Address``, specify + subquery loading for the ``User.addresses`` collection, but on each + ``Address`` object load only the ``email_address`` attribute:: + + session.query(User).options( + subqueryload(User.addresses).load_only(Address.email_address) + ) + + For a statement that has multiple entities, + the lead entity can be + specifically referred to using the :class:`_orm.Load` constructor:: + + stmt = ( + select(User, Address) + .join(User.addresses) + .options( + Load(User).load_only(User.name, User.fullname), + Load(Address).load_only(Address.email_address), + ) + ) + + When used together with the + :ref:`populate_existing ` + execution option only the attributes listed will be refreshed. + + :param \*attrs: Attributes to be loaded, all others will be deferred. + + :param raiseload: raise :class:`.InvalidRequestError` rather than + lazy loading a value when a deferred attribute is accessed. Used + to prevent unwanted SQL from being emitted. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`orm_queryguide_column_deferral` - in the + :ref:`queryguide_toplevel` + + :param \*attrs: Attributes to be loaded, all others will be deferred. + + :param raiseload: raise :class:`.InvalidRequestError` rather than + lazy loading a value when a deferred attribute is accessed. Used + to prevent unwanted SQL from being emitted. + + .. versionadded:: 2.0 + + """ + cloned = self._set_column_strategy( + attrs, + {"deferred": False, "instrument": True}, + ) + + wildcard_strategy = {"deferred": True, "instrument": True} + if raiseload: + wildcard_strategy["raiseload"] = True + + cloned = cloned._set_column_strategy( + ("*",), + wildcard_strategy, + ) + return cloned + + def joinedload( + self, + attr: _AttrType, + innerjoin: Optional[bool] = None, + ) -> Self: + """Indicate that the given attribute should be loaded using joined + eager loading. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + examples:: + + # joined-load the "orders" collection on "User" + select(User).options(joinedload(User.orders)) + + # joined-load Order.items and then Item.keywords + select(Order).options( + joinedload(Order.items).joinedload(Item.keywords) + ) + + # lazily load Order.items, but when Items are loaded, + # joined-load the keywords collection + select(Order).options( + lazyload(Order.items).joinedload(Item.keywords) + ) + + :param innerjoin: if ``True``, indicates that the joined eager load + should use an inner join instead of the default of left outer join:: + + select(Order).options(joinedload(Order.user, innerjoin=True)) + + In order to chain multiple eager joins together where some may be + OUTER and others INNER, right-nested joins are used to link them:: + + select(A).options( + joinedload(A.bs, innerjoin=False).joinedload( + B.cs, innerjoin=True + ) + ) + + The above query, linking A.bs via "outer" join and B.cs via "inner" + join would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When + using older versions of SQLite (< 3.7.16), this form of JOIN is + translated to use full subqueries as this syntax is otherwise not + directly supported. + + The ``innerjoin`` flag can also be stated with the term ``"unnested"``. + This indicates that an INNER JOIN should be used, *unless* the join + is linked to a LEFT OUTER JOIN to the left, in which case it + will render as LEFT OUTER JOIN. For example, supposing ``A.bs`` + is an outerjoin:: + + select(A).options( + joinedload(A.bs).joinedload(B.cs, innerjoin="unnested") + ) + + + The above join will render as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", + rather than as "a LEFT OUTER JOIN (b JOIN c)". + + .. note:: The "unnested" flag does **not** affect the JOIN rendered + from a many-to-many association table, e.g. a table configured as + :paramref:`_orm.relationship.secondary`, to the target table; for + correctness of results, these joins are always INNER and are + therefore right-nested if linked to an OUTER join. + + .. note:: + + The joins produced by :func:`_orm.joinedload` are **anonymously + aliased**. The criteria by which the join proceeds cannot be + modified, nor can the ORM-enabled :class:`_sql.Select` or legacy + :class:`_query.Query` refer to these joins in any way, including + ordering. See :ref:`zen_of_eager_loading` for further detail. + + To produce a specific SQL JOIN which is explicitly available, use + :meth:`_sql.Select.join` and :meth:`_query.Query.join`. To combine + explicit JOINs with eager loading of collections, use + :func:`_orm.contains_eager`; see :ref:`contains_eager`. + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`joined_eager_loading` + + """ + loader = self._set_relationship_strategy( + attr, + {"lazy": "joined"}, + opts=( + {"innerjoin": innerjoin} + if innerjoin is not None + else util.EMPTY_DICT + ), + ) + return loader + + def subqueryload(self, attr: _AttrType) -> Self: + """Indicate that the given attribute should be loaded using + subquery eager loading. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + examples:: + + # subquery-load the "orders" collection on "User" + select(User).options(subqueryload(User.orders)) + + # subquery-load Order.items and then Item.keywords + select(Order).options( + subqueryload(Order.items).subqueryload(Item.keywords) + ) + + # lazily load Order.items, but when Items are loaded, + # subquery-load the keywords collection + select(Order).options( + lazyload(Order.items).subqueryload(Item.keywords) + ) + + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`subquery_eager_loading` + + """ + return self._set_relationship_strategy(attr, {"lazy": "subquery"}) + + def selectinload( + self, + attr: _AttrType, + recursion_depth: Optional[int] = None, + ) -> Self: + """Indicate that the given attribute should be loaded using + SELECT IN eager loading. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + examples:: + + # selectin-load the "orders" collection on "User" + select(User).options(selectinload(User.orders)) + + # selectin-load Order.items and then Item.keywords + select(Order).options( + selectinload(Order.items).selectinload(Item.keywords) + ) + + # lazily load Order.items, but when Items are loaded, + # selectin-load the keywords collection + select(Order).options( + lazyload(Order.items).selectinload(Item.keywords) + ) + + :param recursion_depth: optional int; when set to a positive integer + in conjunction with a self-referential relationship, + indicates "selectin" loading will continue that many levels deep + automatically until no items are found. + + .. note:: The :paramref:`_orm.selectinload.recursion_depth` option + currently supports only self-referential relationships. There + is not yet an option to automatically traverse recursive structures + with more than one relationship involved. + + Additionally, the :paramref:`_orm.selectinload.recursion_depth` + parameter is new and experimental and should be treated as "alpha" + status for the 2.0 series. + + .. versionadded:: 2.0 added + :paramref:`_orm.selectinload.recursion_depth` + + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`selectin_eager_loading` + + """ + return self._set_relationship_strategy( + attr, + {"lazy": "selectin"}, + opts={"recursion_depth": recursion_depth}, + ) + + def lazyload(self, attr: _AttrType) -> Self: + """Indicate that the given attribute should be loaded using "lazy" + loading. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`lazy_loading` + + """ + return self._set_relationship_strategy(attr, {"lazy": "select"}) + + def immediateload( + self, + attr: _AttrType, + recursion_depth: Optional[int] = None, + ) -> Self: + """Indicate that the given attribute should be loaded using + an immediate load with a per-attribute SELECT statement. + + The load is achieved using the "lazyloader" strategy and does not + fire off any additional eager loaders. + + The :func:`.immediateload` option is superseded in general + by the :func:`.selectinload` option, which performs the same task + more efficiently by emitting a SELECT for all loaded objects. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + :param recursion_depth: optional int; when set to a positive integer + in conjunction with a self-referential relationship, + indicates "selectin" loading will continue that many levels deep + automatically until no items are found. + + .. note:: The :paramref:`_orm.immediateload.recursion_depth` option + currently supports only self-referential relationships. There + is not yet an option to automatically traverse recursive structures + with more than one relationship involved. + + .. warning:: This parameter is new and experimental and should be + treated as "alpha" status + + .. versionadded:: 2.0 added + :paramref:`_orm.immediateload.recursion_depth` + + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`selectin_eager_loading` + + """ + loader = self._set_relationship_strategy( + attr, + {"lazy": "immediate"}, + opts={"recursion_depth": recursion_depth}, + ) + return loader + + def noload(self, attr: _AttrType) -> Self: + """Indicate that the given relationship attribute should remain + unloaded. + + The relationship attribute will return ``None`` when accessed without + producing any loading effect. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + :func:`_orm.noload` applies to :func:`_orm.relationship` attributes + only. + + .. legacy:: The :func:`_orm.noload` option is **legacy**. As it + forces collections to be empty, which invariably leads to + non-intuitive and difficult to predict results. There are no + legitimate uses for this option in modern SQLAlchemy. + + .. seealso:: + + :ref:`loading_toplevel` + + """ + + return self._set_relationship_strategy(attr, {"lazy": "noload"}) + + def raiseload(self, attr: _AttrType, sql_only: bool = False) -> Self: + """Indicate that the given attribute should raise an error if accessed. + + A relationship attribute configured with :func:`_orm.raiseload` will + raise an :exc:`~sqlalchemy.exc.InvalidRequestError` upon access. The + typical way this is useful is when an application is attempting to + ensure that all relationship attributes that are accessed in a + particular context would have been already loaded via eager loading. + Instead of having to read through SQL logs to ensure lazy loads aren't + occurring, this strategy will cause them to raise immediately. + + :func:`_orm.raiseload` applies to :func:`_orm.relationship` attributes + only. In order to apply raise-on-SQL behavior to a column-based + attribute, use the :paramref:`.orm.defer.raiseload` parameter on the + :func:`.defer` loader option. + + :param sql_only: if True, raise only if the lazy load would emit SQL, + but not if it is only checking the identity map, or determining that + the related value should just be None due to missing keys. When False, + the strategy will raise for all varieties of relationship loading. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`prevent_lazy_with_raiseload` + + :ref:`orm_queryguide_deferred_raiseload` + + """ + + return self._set_relationship_strategy( + attr, {"lazy": "raise_on_sql" if sql_only else "raise"} + ) + + def defaultload(self, attr: _AttrType) -> Self: + """Indicate an attribute should load using its predefined loader style. + + The behavior of this loading option is to not change the current + loading style of the attribute, meaning that the previously configured + one is used or, if no previous style was selected, the default + loading will be used. + + This method is used to link to other loader options further into + a chain of attributes without altering the loader style of the links + along the chain. For example, to set joined eager loading for an + element of an element:: + + session.query(MyClass).options( + defaultload(MyClass.someattribute).joinedload( + MyOtherClass.someotherattribute + ) + ) + + :func:`.defaultload` is also useful for setting column-level options on + a related class, namely that of :func:`.defer` and :func:`.undefer`:: + + session.scalars( + select(MyClass).options( + defaultload(MyClass.someattribute) + .defer("some_column") + .undefer("some_other_column") + ) + ) + + .. seealso:: + + :ref:`orm_queryguide_relationship_sub_options` + + :meth:`_orm.Load.options` + + """ + return self._set_relationship_strategy(attr, None) + + def defer(self, key: _AttrType, raiseload: bool = False) -> Self: + r"""Indicate that the given column-oriented attribute should be + deferred, e.g. not loaded until accessed. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + e.g.:: + + from sqlalchemy.orm import defer + + session.query(MyClass).options( + defer(MyClass.attribute_one), + defer(MyClass.attribute_two) + ) + + To specify a deferred load of an attribute on a related class, + the path can be specified one token at a time, specifying the loading + style for each link along the chain. To leave the loading style + for a link unchanged, use :func:`_orm.defaultload`:: + + session.query(MyClass).options( + defaultload(MyClass.someattr).defer(RelatedClass.some_column) + ) + + Multiple deferral options related to a relationship can be bundled + at once using :meth:`_orm.Load.options`:: + + + select(MyClass).options( + defaultload(MyClass.someattr).options( + defer(RelatedClass.some_column), + defer(RelatedClass.some_other_column), + defer(RelatedClass.another_column) + ) + ) + + :param key: Attribute to be deferred. + + :param raiseload: raise :class:`.InvalidRequestError` rather than + lazy loading a value when the deferred attribute is accessed. Used + to prevent unwanted SQL from being emitted. + + .. versionadded:: 1.4 + + .. seealso:: + + :ref:`orm_queryguide_column_deferral` - in the + :ref:`queryguide_toplevel` + + :func:`_orm.load_only` + + :func:`_orm.undefer` + + """ + strategy = {"deferred": True, "instrument": True} + if raiseload: + strategy["raiseload"] = True + return self._set_column_strategy((key,), strategy) + + def undefer(self, key: _AttrType) -> Self: + r"""Indicate that the given column-oriented attribute should be + undeferred, e.g. specified within the SELECT statement of the entity + as a whole. + + The column being undeferred is typically set up on the mapping as a + :func:`.deferred` attribute. + + This function is part of the :class:`_orm.Load` interface and supports + both method-chained and standalone operation. + + Examples:: + + # undefer two columns + session.query(MyClass).options( + undefer(MyClass.col1), undefer(MyClass.col2) + ) + + # undefer all columns specific to a single class using Load + * + session.query(MyClass, MyOtherClass).options( + Load(MyClass).undefer("*") + ) + + # undefer a column on a related object + select(MyClass).options( + defaultload(MyClass.items).undefer(MyClass.text) + ) + + :param key: Attribute to be undeferred. + + .. seealso:: + + :ref:`orm_queryguide_column_deferral` - in the + :ref:`queryguide_toplevel` + + :func:`_orm.defer` + + :func:`_orm.undefer_group` + + """ + return self._set_column_strategy( + (key,), {"deferred": False, "instrument": True} + ) + + def undefer_group(self, name: str) -> Self: + """Indicate that columns within the given deferred group name should be + undeferred. + + The columns being undeferred are set up on the mapping as + :func:`.deferred` attributes and include a "group" name. + + E.g:: + + session.query(MyClass).options(undefer_group("large_attrs")) + + To undefer a group of attributes on a related entity, the path can be + spelled out using relationship loader options, such as + :func:`_orm.defaultload`:: + + select(MyClass).options( + defaultload("someattr").undefer_group("large_attrs") + ) + + .. seealso:: + + :ref:`orm_queryguide_column_deferral` - in the + :ref:`queryguide_toplevel` + + :func:`_orm.defer` + + :func:`_orm.undefer` + + """ + return self._set_column_strategy( + (_WILDCARD_TOKEN,), None, {f"undefer_group_{name}": True} + ) + + def with_expression( + self, + key: _AttrType, + expression: _ColumnExpressionArgument[Any], + ) -> Self: + r"""Apply an ad-hoc SQL expression to a "deferred expression" + attribute. + + This option is used in conjunction with the + :func:`_orm.query_expression` mapper-level construct that indicates an + attribute which should be the target of an ad-hoc SQL expression. + + E.g.:: + + stmt = select(SomeClass).options( + with_expression(SomeClass.x_y_expr, SomeClass.x + SomeClass.y) + ) + + .. versionadded:: 1.2 + + :param key: Attribute to be populated + + :param expr: SQL expression to be applied to the attribute. + + .. seealso:: + + :ref:`orm_queryguide_with_expression` - background and usage + examples + + """ + + expression = _orm_full_deannotate( + coercions.expect(roles.LabeledColumnExprRole, expression) + ) + + return self._set_column_strategy( + (key,), {"query_expression": True}, extra_criteria=(expression,) + ) + + def selectin_polymorphic(self, classes: Iterable[Type[Any]]) -> Self: + """Indicate an eager load should take place for all attributes + specific to a subclass. + + This uses an additional SELECT with IN against all matched primary + key values, and is the per-query analogue to the ``"selectin"`` + setting on the :paramref:`.mapper.polymorphic_load` parameter. + + .. versionadded:: 1.2 + + .. seealso:: + + :ref:`polymorphic_selectin` + + """ + self = self._set_class_strategy( + {"selectinload_polymorphic": True}, + opts={ + "entities": tuple( + sorted((inspect(cls) for cls in classes), key=id) + ) + }, + ) + return self + + @overload + def _coerce_strat(self, strategy: _StrategySpec) -> _StrategyKey: ... + + @overload + def _coerce_strat(self, strategy: Literal[None]) -> None: ... + + def _coerce_strat( + self, strategy: Optional[_StrategySpec] + ) -> Optional[_StrategyKey]: + if strategy is not None: + strategy_key = tuple(sorted(strategy.items())) + else: + strategy_key = None + return strategy_key + + @_generative + def _set_relationship_strategy( + self, + attr: _AttrType, + strategy: Optional[_StrategySpec], + propagate_to_loaders: bool = True, + opts: Optional[_OptsType] = None, + _reconcile_to_other: Optional[bool] = None, + ) -> Self: + strategy_key = self._coerce_strat(strategy) + + self._clone_for_bind_strategy( + (attr,), + strategy_key, + _RELATIONSHIP_TOKEN, + opts=opts, + propagate_to_loaders=propagate_to_loaders, + reconcile_to_other=_reconcile_to_other, + ) + return self + + @_generative + def _set_column_strategy( + self, + attrs: Tuple[_AttrType, ...], + strategy: Optional[_StrategySpec], + opts: Optional[_OptsType] = None, + extra_criteria: Optional[Tuple[Any, ...]] = None, + ) -> Self: + strategy_key = self._coerce_strat(strategy) + + self._clone_for_bind_strategy( + attrs, + strategy_key, + _COLUMN_TOKEN, + opts=opts, + attr_group=attrs, + extra_criteria=extra_criteria, + ) + return self + + @_generative + def _set_generic_strategy( + self, + attrs: Tuple[_AttrType, ...], + strategy: _StrategySpec, + _reconcile_to_other: Optional[bool] = None, + ) -> Self: + strategy_key = self._coerce_strat(strategy) + self._clone_for_bind_strategy( + attrs, + strategy_key, + None, + propagate_to_loaders=True, + reconcile_to_other=_reconcile_to_other, + ) + return self + + @_generative + def _set_class_strategy( + self, strategy: _StrategySpec, opts: _OptsType + ) -> Self: + strategy_key = self._coerce_strat(strategy) + + self._clone_for_bind_strategy(None, strategy_key, None, opts=opts) + return self + + def _apply_to_parent(self, parent: Load) -> None: + """apply this :class:`_orm._AbstractLoad` object as a sub-option o + a :class:`_orm.Load` object. + + Implementation is provided by subclasses. + + """ + raise NotImplementedError() + + def options(self, *opts: _AbstractLoad) -> Self: + r"""Apply a series of options as sub-options to this + :class:`_orm._AbstractLoad` object. + + Implementation is provided by subclasses. + + """ + raise NotImplementedError() + + def _clone_for_bind_strategy( + self, + attrs: Optional[Tuple[_AttrType, ...]], + strategy: Optional[_StrategyKey], + wildcard_key: Optional[_WildcardKeyType], + opts: Optional[_OptsType] = None, + attr_group: Optional[_AttrGroupType] = None, + propagate_to_loaders: bool = True, + reconcile_to_other: Optional[bool] = None, + extra_criteria: Optional[Tuple[Any, ...]] = None, + ) -> Self: + raise NotImplementedError() + + def process_compile_state_replaced_entities( + self, + compile_state: ORMCompileState, + mapper_entities: Sequence[_MapperEntity], + ) -> None: + if not compile_state.compile_options._enable_eagerloads: + return + + # process is being run here so that the options given are validated + # against what the lead entities were, as well as to accommodate + # for the entities having been replaced with equivalents + self._process( + compile_state, + mapper_entities, + not bool(compile_state.current_path), + ) + + def process_compile_state(self, compile_state: ORMCompileState) -> None: + if not compile_state.compile_options._enable_eagerloads: + return + + self._process( + compile_state, + compile_state._lead_mapper_entities, + not bool(compile_state.current_path) + and not compile_state.compile_options._for_refresh_state, + ) + + def _process( + self, + compile_state: ORMCompileState, + mapper_entities: Sequence[_MapperEntity], + raiseerr: bool, + ) -> None: + """implemented by subclasses""" + raise NotImplementedError() + + @classmethod + def _chop_path( + cls, + to_chop: _PathRepresentation, + path: PathRegistry, + debug: bool = False, + ) -> Optional[_PathRepresentation]: + i = -1 + + for i, (c_token, p_token) in enumerate( + zip(to_chop, path.natural_path) + ): + if isinstance(c_token, str): + if i == 0 and ( + c_token.endswith(f":{_DEFAULT_TOKEN}") + or c_token.endswith(f":{_WILDCARD_TOKEN}") + ): + return to_chop + elif ( + c_token != f"{_RELATIONSHIP_TOKEN}:{_WILDCARD_TOKEN}" + and c_token != p_token.key # type: ignore + ): + return None + + if c_token is p_token: + continue + elif ( + isinstance(c_token, InspectionAttr) + and insp_is_mapper(c_token) + and insp_is_mapper(p_token) + and c_token.isa(p_token) + ): + continue + + else: + return None + return to_chop[i + 1 :] + + +class Load(_AbstractLoad): + """Represents loader options which modify the state of a + ORM-enabled :class:`_sql.Select` or a legacy :class:`_query.Query` in + order to affect how various mapped attributes are loaded. + + The :class:`_orm.Load` object is in most cases used implicitly behind the + scenes when one makes use of a query option like :func:`_orm.joinedload`, + :func:`_orm.defer`, or similar. It typically is not instantiated directly + except for in some very specific cases. + + .. seealso:: + + :ref:`orm_queryguide_relationship_per_entity_wildcard` - illustrates an + example where direct use of :class:`_orm.Load` may be useful + + """ + + __slots__ = ( + "path", + "context", + "additional_source_entities", + ) + + _traverse_internals = [ + ("path", visitors.ExtendedInternalTraversal.dp_has_cache_key), + ( + "context", + visitors.InternalTraversal.dp_has_cache_key_list, + ), + ("propagate_to_loaders", visitors.InternalTraversal.dp_boolean), + ( + "additional_source_entities", + visitors.InternalTraversal.dp_has_cache_key_list, + ), + ] + _cache_key_traversal = None + + path: PathRegistry + context: Tuple[_LoadElement, ...] + additional_source_entities: Tuple[_InternalEntityType[Any], ...] + + def __init__(self, entity: _EntityType[Any]): + insp = cast("Union[Mapper[Any], AliasedInsp[Any]]", inspect(entity)) + insp._post_inspect + + self.path = insp._path_registry + self.context = () + self.propagate_to_loaders = False + self.additional_source_entities = () + + def __str__(self) -> str: + return f"Load({self.path[0]})" + + @classmethod + def _construct_for_existing_path( + cls, path: AbstractEntityRegistry + ) -> Load: + load = cls.__new__(cls) + load.path = path + load.context = () + load.propagate_to_loaders = False + load.additional_source_entities = () + return load + + def _adapt_cached_option_to_uncached_option( + self, context: QueryContext, uncached_opt: ORMOption + ) -> ORMOption: + if uncached_opt is self: + return self + return self._adjust_for_extra_criteria(context) + + def _prepend_path(self, path: PathRegistry) -> Load: + cloned = self._clone() + cloned.context = tuple( + element._prepend_path(path) for element in self.context + ) + return cloned + + def _adjust_for_extra_criteria(self, context: QueryContext) -> Load: + """Apply the current bound parameters in a QueryContext to all + occurrences "extra_criteria" stored within this ``Load`` object, + returning a new instance of this ``Load`` object. + + """ + + # avoid generating cache keys for the queries if we don't + # actually have any extra_criteria options, which is the + # common case + for value in self.context: + if value._extra_criteria: + break + else: + return self + + replacement_cache_key = context.user_passed_query._generate_cache_key() + + if replacement_cache_key is None: + return self + + orig_query = context.compile_state.select_statement + orig_cache_key = orig_query._generate_cache_key() + assert orig_cache_key is not None + + def process( + opt: _LoadElement, + replacement_cache_key: CacheKey, + orig_cache_key: CacheKey, + ) -> _LoadElement: + cloned_opt = opt._clone() + + cloned_opt._extra_criteria = tuple( + replacement_cache_key._apply_params_to_element( + orig_cache_key, crit + ) + for crit in cloned_opt._extra_criteria + ) + + return cloned_opt + + cloned = self._clone() + cloned.context = tuple( + ( + process(value, replacement_cache_key, orig_cache_key) + if value._extra_criteria + else value + ) + for value in self.context + ) + return cloned + + def _reconcile_query_entities_with_us(self, mapper_entities, raiseerr): + """called at process time to allow adjustment of the root + entity inside of _LoadElement objects. + + """ + path = self.path + + ezero = None + for ent in mapper_entities: + ezero = ent.entity_zero + if ezero and orm_util._entity_corresponds_to( + # technically this can be a token also, but this is + # safe to pass to _entity_corresponds_to() + ezero, + cast("_InternalEntityType[Any]", path[0]), + ): + return ezero + + return None + + def _process( + self, + compile_state: ORMCompileState, + mapper_entities: Sequence[_MapperEntity], + raiseerr: bool, + ) -> None: + reconciled_lead_entity = self._reconcile_query_entities_with_us( + mapper_entities, raiseerr + ) + + # if the context has a current path, this is a lazy load + has_current_path = bool(compile_state.compile_options._current_path) + + for loader in self.context: + # issue #11292 + # historically, propagate_to_loaders was only considered at + # object loading time, whether or not to carry along options + # onto an object's loaded state where it would be used by lazyload. + # however, the defaultload() option needs to propagate in case + # its sub-options propagate_to_loaders, but its sub-options + # that dont propagate should not be applied for lazy loaders. + # so we check again + if has_current_path and not loader.propagate_to_loaders: + continue + loader.process_compile_state( + self, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ) + + def _apply_to_parent(self, parent: Load) -> None: + """apply this :class:`_orm.Load` object as a sub-option of another + :class:`_orm.Load` object. + + This method is used by the :meth:`_orm.Load.options` method. + + """ + cloned = self._generate() + + assert cloned.propagate_to_loaders == self.propagate_to_loaders + + if not any( + orm_util._entity_corresponds_to_use_path_impl( + elem, cloned.path.odd_element(0) + ) + for elem in (parent.path.odd_element(-1),) + + parent.additional_source_entities + ): + if len(cloned.path) > 1: + attrname = cloned.path[1] + parent_entity = cloned.path[0] + else: + attrname = cloned.path[0] + parent_entity = cloned.path[0] + _raise_for_does_not_link(parent.path, attrname, parent_entity) + + cloned.path = PathRegistry.coerce(parent.path[0:-1] + cloned.path[:]) + + if self.context: + cloned.context = tuple( + value._prepend_path_from(parent) for value in self.context + ) + + if cloned.context: + parent.context += cloned.context + parent.additional_source_entities += ( + cloned.additional_source_entities + ) + + @_generative + def options(self, *opts: _AbstractLoad) -> Self: + r"""Apply a series of options as sub-options to this + :class:`_orm.Load` + object. + + E.g.:: + + query = session.query(Author) + query = query.options( + joinedload(Author.book).options( + load_only(Book.summary, Book.excerpt), + joinedload(Book.citations).options( + joinedload(Citation.author) + ) + ) + ) + + :param \*opts: A series of loader option objects (ultimately + :class:`_orm.Load` objects) which should be applied to the path + specified by this :class:`_orm.Load` object. + + .. versionadded:: 1.3.6 + + .. seealso:: + + :func:`.defaultload` + + :ref:`orm_queryguide_relationship_sub_options` + + """ + for opt in opts: + try: + opt._apply_to_parent(self) + except AttributeError as ae: + if not isinstance(opt, _AbstractLoad): + raise sa_exc.ArgumentError( + f"Loader option {opt} is not compatible with the " + "Load.options() method." + ) from ae + else: + raise + return self + + def _clone_for_bind_strategy( + self, + attrs: Optional[Tuple[_AttrType, ...]], + strategy: Optional[_StrategyKey], + wildcard_key: Optional[_WildcardKeyType], + opts: Optional[_OptsType] = None, + attr_group: Optional[_AttrGroupType] = None, + propagate_to_loaders: bool = True, + reconcile_to_other: Optional[bool] = None, + extra_criteria: Optional[Tuple[Any, ...]] = None, + ) -> Self: + # for individual strategy that needs to propagate, set the whole + # Load container to also propagate, so that it shows up in + # InstanceState.load_options + if propagate_to_loaders: + self.propagate_to_loaders = True + + if self.path.is_token: + raise sa_exc.ArgumentError( + "Wildcard token cannot be followed by another entity" + ) + + elif path_is_property(self.path): + # re-use the lookup which will raise a nicely formatted + # LoaderStrategyException + if strategy: + self.path.prop._strategy_lookup(self.path.prop, strategy[0]) + else: + raise sa_exc.ArgumentError( + f"Mapped attribute '{self.path.prop}' does not " + "refer to a mapped entity" + ) + + if attrs is None: + load_element = _ClassStrategyLoad.create( + self.path, + None, + strategy, + wildcard_key, + opts, + propagate_to_loaders, + attr_group=attr_group, + reconcile_to_other=reconcile_to_other, + extra_criteria=extra_criteria, + ) + if load_element: + self.context += (load_element,) + assert opts is not None + self.additional_source_entities += cast( + "Tuple[_InternalEntityType[Any]]", opts["entities"] + ) + + else: + for attr in attrs: + if isinstance(attr, str): + load_element = _TokenStrategyLoad.create( + self.path, + attr, + strategy, + wildcard_key, + opts, + propagate_to_loaders, + attr_group=attr_group, + reconcile_to_other=reconcile_to_other, + extra_criteria=extra_criteria, + ) + else: + load_element = _AttributeStrategyLoad.create( + self.path, + attr, + strategy, + wildcard_key, + opts, + propagate_to_loaders, + attr_group=attr_group, + reconcile_to_other=reconcile_to_other, + extra_criteria=extra_criteria, + ) + + if load_element: + # for relationship options, update self.path on this Load + # object with the latest path. + if wildcard_key is _RELATIONSHIP_TOKEN: + self.path = load_element.path + self.context += (load_element,) + + # this seems to be effective for selectinloader, + # giving the extra match to one more level deep. + # but does not work for immediateloader, which still + # must add additional options at load time + if load_element.local_opts.get("recursion_depth", False): + r1 = load_element._recurse() + self.context += (r1,) + + return self + + def __getstate__(self): + d = self._shallow_to_dict() + d["path"] = self.path.serialize() + return d + + def __setstate__(self, state): + state["path"] = PathRegistry.deserialize(state["path"]) + self._shallow_from_dict(state) + + +class _WildcardLoad(_AbstractLoad): + """represent a standalone '*' load operation""" + + __slots__ = ("strategy", "path", "local_opts") + + _traverse_internals = [ + ("strategy", visitors.ExtendedInternalTraversal.dp_plain_obj), + ("path", visitors.ExtendedInternalTraversal.dp_plain_obj), + ( + "local_opts", + visitors.ExtendedInternalTraversal.dp_string_multi_dict, + ), + ] + cache_key_traversal: _CacheKeyTraversalType = None + + strategy: Optional[Tuple[Any, ...]] + local_opts: _OptsType + path: Union[Tuple[()], Tuple[str]] + propagate_to_loaders = False + + def __init__(self) -> None: + self.path = () + self.strategy = None + self.local_opts = util.EMPTY_DICT + + def _clone_for_bind_strategy( + self, + attrs, + strategy, + wildcard_key, + opts=None, + attr_group=None, + propagate_to_loaders=True, + reconcile_to_other=None, + extra_criteria=None, + ): + assert attrs is not None + attr = attrs[0] + assert ( + wildcard_key + and isinstance(attr, str) + and attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN) + ) + + attr = f"{wildcard_key}:{attr}" + + self.strategy = strategy + self.path = (attr,) + if opts: + self.local_opts = util.immutabledict(opts) + + assert extra_criteria is None + + def options(self, *opts: _AbstractLoad) -> Self: + raise NotImplementedError("Star option does not support sub-options") + + def _apply_to_parent(self, parent: Load) -> None: + """apply this :class:`_orm._WildcardLoad` object as a sub-option of + a :class:`_orm.Load` object. + + This method is used by the :meth:`_orm.Load.options` method. Note + that :class:`_orm.WildcardLoad` itself can't have sub-options, but + it may be used as the sub-option of a :class:`_orm.Load` object. + + """ + assert self.path + attr = self.path[0] + if attr.endswith(_DEFAULT_TOKEN): + attr = f"{attr.split(':')[0]}:{_WILDCARD_TOKEN}" + + effective_path = cast(AbstractEntityRegistry, parent.path).token(attr) + + assert effective_path.is_token + + loader = _TokenStrategyLoad.create( + effective_path, + None, + self.strategy, + None, + self.local_opts, + self.propagate_to_loaders, + ) + + parent.context += (loader,) + + def _process(self, compile_state, mapper_entities, raiseerr): + is_refresh = compile_state.compile_options._for_refresh_state + + if is_refresh and not self.propagate_to_loaders: + return + + entities = [ent.entity_zero for ent in mapper_entities] + current_path = compile_state.current_path + + start_path: _PathRepresentation = self.path + + if current_path: + # TODO: no cases in test suite where we actually get + # None back here + new_path = self._chop_path(start_path, current_path) + if new_path is None: + return + + # chop_path does not actually "chop" a wildcard token path, + # just returns it + assert new_path == start_path + + # start_path is a single-token tuple + assert start_path and len(start_path) == 1 + + token = start_path[0] + assert isinstance(token, str) + entity = self._find_entity_basestring(entities, token, raiseerr) + + if not entity: + return + + path_element = entity + + # transfer our entity-less state into a Load() object + # with a real entity path. Start with the lead entity + # we just located, then go through the rest of our path + # tokens and populate into the Load(). + + assert isinstance(token, str) + loader = _TokenStrategyLoad.create( + path_element._path_registry, + token, + self.strategy, + None, + self.local_opts, + self.propagate_to_loaders, + raiseerr=raiseerr, + ) + if not loader: + return + + assert loader.path.is_token + + # don't pass a reconciled lead entity here + loader.process_compile_state( + self, compile_state, mapper_entities, None, raiseerr + ) + + return loader + + def _find_entity_basestring( + self, + entities: Iterable[_InternalEntityType[Any]], + token: str, + raiseerr: bool, + ) -> Optional[_InternalEntityType[Any]]: + if token.endswith(f":{_WILDCARD_TOKEN}"): + if len(list(entities)) != 1: + if raiseerr: + raise sa_exc.ArgumentError( + "Can't apply wildcard ('*') or load_only() " + f"loader option to multiple entities " + f"{', '.join(str(ent) for ent in entities)}. Specify " + "loader options for each entity individually, such as " + f"""{ + ", ".join( + f"Load({ent}).some_option('*')" + for ent in entities + ) + }.""" + ) + elif token.endswith(_DEFAULT_TOKEN): + raiseerr = False + + for ent in entities: + # return only the first _MapperEntity when searching + # based on string prop name. Ideally object + # attributes are used to specify more exactly. + return ent + else: + if raiseerr: + raise sa_exc.ArgumentError( + "Query has only expression-based entities - " + f'can\'t find property named "{token}".' + ) + else: + return None + + def __getstate__(self) -> Dict[str, Any]: + d = self._shallow_to_dict() + return d + + def __setstate__(self, state: Dict[str, Any]) -> None: + self._shallow_from_dict(state) + + +class _LoadElement( + cache_key.HasCacheKey, traversals.HasShallowCopy, visitors.Traversible +): + """represents strategy information to select for a LoaderStrategy + and pass options to it. + + :class:`._LoadElement` objects provide the inner datastructure + stored by a :class:`_orm.Load` object and are also the object passed + to methods like :meth:`.LoaderStrategy.setup_query`. + + .. versionadded:: 2.0 + + """ + + __slots__ = ( + "path", + "strategy", + "propagate_to_loaders", + "local_opts", + "_extra_criteria", + "_reconcile_to_other", + ) + __visit_name__ = "load_element" + + _traverse_internals = [ + ("path", visitors.ExtendedInternalTraversal.dp_has_cache_key), + ("strategy", visitors.ExtendedInternalTraversal.dp_plain_obj), + ( + "local_opts", + visitors.ExtendedInternalTraversal.dp_string_multi_dict, + ), + ("_extra_criteria", visitors.InternalTraversal.dp_clauseelement_list), + ("propagate_to_loaders", visitors.InternalTraversal.dp_plain_obj), + ("_reconcile_to_other", visitors.InternalTraversal.dp_plain_obj), + ] + _cache_key_traversal = None + + _extra_criteria: Tuple[Any, ...] + + _reconcile_to_other: Optional[bool] + strategy: Optional[_StrategyKey] + path: PathRegistry + propagate_to_loaders: bool + + local_opts: util.immutabledict[str, Any] + + is_token_strategy: bool + is_class_strategy: bool + + def __hash__(self) -> int: + return id(self) + + def __eq__(self, other): + return traversals.compare(self, other) + + @property + def is_opts_only(self) -> bool: + return bool(self.local_opts and self.strategy is None) + + def _clone(self, **kw: Any) -> _LoadElement: + cls = self.__class__ + s = cls.__new__(cls) + + self._shallow_copy_to(s) + return s + + def _update_opts(self, **kw: Any) -> _LoadElement: + new = self._clone() + new.local_opts = new.local_opts.union(kw) + return new + + def __getstate__(self) -> Dict[str, Any]: + d = self._shallow_to_dict() + d["path"] = self.path.serialize() + return d + + def __setstate__(self, state: Dict[str, Any]) -> None: + state["path"] = PathRegistry.deserialize(state["path"]) + self._shallow_from_dict(state) + + def _raise_for_no_match(self, parent_loader, mapper_entities): + path = parent_loader.path + + found_entities = False + for ent in mapper_entities: + ezero = ent.entity_zero + if ezero: + found_entities = True + break + + if not found_entities: + raise sa_exc.ArgumentError( + "Query has only expression-based entities; " + f"attribute loader options for {path[0]} can't " + "be applied here." + ) + else: + raise sa_exc.ArgumentError( + f"Mapped class {path[0]} does not apply to any of the " + f"root entities in this query, e.g. " + f"""{ + ", ".join( + str(x.entity_zero) + for x in mapper_entities if x.entity_zero + )}. Please """ + "specify the full path " + "from one of the root entities to the target " + "attribute. " + ) + + def _adjust_effective_path_for_current_path( + self, effective_path: PathRegistry, current_path: PathRegistry + ) -> Optional[PathRegistry]: + """receives the 'current_path' entry from an :class:`.ORMCompileState` + instance, which is set during lazy loads and secondary loader strategy + loads, and adjusts the given path to be relative to the + current_path. + + E.g. given a loader path and current path:: + + lp: User -> orders -> Order -> items -> Item -> keywords -> Keyword + + cp: User -> orders -> Order -> items + + The adjusted path would be:: + + Item -> keywords -> Keyword + + + """ + chopped_start_path = Load._chop_path( + effective_path.natural_path, current_path + ) + if not chopped_start_path: + return None + + tokens_removed_from_start_path = len(effective_path) - len( + chopped_start_path + ) + + loader_lead_path_element = self.path[tokens_removed_from_start_path] + + effective_path = PathRegistry.coerce( + (loader_lead_path_element,) + chopped_start_path[1:] + ) + + return effective_path + + def _init_path( + self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria + ): + """Apply ORM attributes and/or wildcard to an existing path, producing + a new path. + + This method is used within the :meth:`.create` method to initialize + a :class:`._LoadElement` object. + + """ + raise NotImplementedError() + + def _prepare_for_compile_state( + self, + parent_loader, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ): + """implemented by subclasses.""" + raise NotImplementedError() + + def process_compile_state( + self, + parent_loader, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ): + """populate ORMCompileState.attributes with loader state for this + _LoadElement. + + """ + keys = self._prepare_for_compile_state( + parent_loader, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ) + for key in keys: + if key in compile_state.attributes: + compile_state.attributes[key] = _LoadElement._reconcile( + self, compile_state.attributes[key] + ) + else: + compile_state.attributes[key] = self + + @classmethod + def create( + cls, + path: PathRegistry, + attr: Union[_AttrType, _StrPathToken, None], + strategy: Optional[_StrategyKey], + wildcard_key: Optional[_WildcardKeyType], + local_opts: Optional[_OptsType], + propagate_to_loaders: bool, + raiseerr: bool = True, + attr_group: Optional[_AttrGroupType] = None, + reconcile_to_other: Optional[bool] = None, + extra_criteria: Optional[Tuple[Any, ...]] = None, + ) -> _LoadElement: + """Create a new :class:`._LoadElement` object.""" + + opt = cls.__new__(cls) + opt.path = path + opt.strategy = strategy + opt.propagate_to_loaders = propagate_to_loaders + opt.local_opts = ( + util.immutabledict(local_opts) if local_opts else util.EMPTY_DICT + ) + opt._extra_criteria = () + + if reconcile_to_other is not None: + opt._reconcile_to_other = reconcile_to_other + elif strategy is None and not local_opts: + opt._reconcile_to_other = True + else: + opt._reconcile_to_other = None + + path = opt._init_path( + path, attr, wildcard_key, attr_group, raiseerr, extra_criteria + ) + + if not path: + return None # type: ignore + + assert opt.is_token_strategy == path.is_token + + opt.path = path + return opt + + def __init__(self) -> None: + raise NotImplementedError() + + def _recurse(self) -> _LoadElement: + cloned = self._clone() + cloned.path = PathRegistry.coerce(self.path[:] + self.path[-2:]) + + return cloned + + def _prepend_path_from(self, parent: Load) -> _LoadElement: + """adjust the path of this :class:`._LoadElement` to be + a subpath of that of the given parent :class:`_orm.Load` object's + path. + + This is used by the :meth:`_orm.Load._apply_to_parent` method, + which is in turn part of the :meth:`_orm.Load.options` method. + + """ + + if not any( + orm_util._entity_corresponds_to_use_path_impl( + elem, + self.path.odd_element(0), + ) + for elem in (parent.path.odd_element(-1),) + + parent.additional_source_entities + ): + raise sa_exc.ArgumentError( + f'Attribute "{self.path[1]}" does not link ' + f'from element "{parent.path[-1]}".' + ) + + return self._prepend_path(parent.path) + + def _prepend_path(self, path: PathRegistry) -> _LoadElement: + cloned = self._clone() + + assert cloned.strategy == self.strategy + assert cloned.local_opts == self.local_opts + assert cloned.is_class_strategy == self.is_class_strategy + + cloned.path = PathRegistry.coerce(path[0:-1] + cloned.path[:]) + + return cloned + + @staticmethod + def _reconcile( + replacement: _LoadElement, existing: _LoadElement + ) -> _LoadElement: + """define behavior for when two Load objects are to be put into + the context.attributes under the same key. + + :param replacement: ``_LoadElement`` that seeks to replace the + existing one + + :param existing: ``_LoadElement`` that is already present. + + """ + # mapper inheritance loading requires fine-grained "block other + # options" / "allow these options to be overridden" behaviors + # see test_poly_loading.py + + if replacement._reconcile_to_other: + return existing + elif replacement._reconcile_to_other is False: + return replacement + elif existing._reconcile_to_other: + return replacement + elif existing._reconcile_to_other is False: + return existing + + if existing is replacement: + return replacement + elif ( + existing.strategy == replacement.strategy + and existing.local_opts == replacement.local_opts + ): + return replacement + elif replacement.is_opts_only: + existing = existing._clone() + existing.local_opts = existing.local_opts.union( + replacement.local_opts + ) + existing._extra_criteria += replacement._extra_criteria + return existing + elif existing.is_opts_only: + replacement = replacement._clone() + replacement.local_opts = replacement.local_opts.union( + existing.local_opts + ) + replacement._extra_criteria += existing._extra_criteria + return replacement + elif replacement.path.is_token: + # use 'last one wins' logic for wildcard options. this is also + # kind of inconsistent vs. options that are specific paths which + # will raise as below + return replacement + + raise sa_exc.InvalidRequestError( + f"Loader strategies for {replacement.path} conflict" + ) + + +class _AttributeStrategyLoad(_LoadElement): + """Loader strategies against specific relationship or column paths. + + e.g.:: + + joinedload(User.addresses) + defer(Order.name) + selectinload(User.orders).lazyload(Order.items) + + """ + + __slots__ = ("_of_type", "_path_with_polymorphic_path") + + __visit_name__ = "attribute_strategy_load_element" + + _traverse_internals = _LoadElement._traverse_internals + [ + ("_of_type", visitors.ExtendedInternalTraversal.dp_multi), + ( + "_path_with_polymorphic_path", + visitors.ExtendedInternalTraversal.dp_has_cache_key, + ), + ] + + _of_type: Union[Mapper[Any], AliasedInsp[Any], None] + _path_with_polymorphic_path: Optional[PathRegistry] + + is_class_strategy = False + is_token_strategy = False + + def _init_path( + self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria + ): + assert attr is not None + self._of_type = None + self._path_with_polymorphic_path = None + insp, _, prop = _parse_attr_argument(attr) + + if insp.is_property: + # direct property can be sent from internal strategy logic + # that sets up specific loaders, such as + # emit_lazyload->_lazyload_reverse + # prop = found_property = attr + prop = attr + path = path[prop] + + if path.has_entity: + path = path.entity_path + return path + + elif not insp.is_attribute: + # should not reach here; + assert False + + # here we assume we have user-passed InstrumentedAttribute + if not orm_util._entity_corresponds_to_use_path_impl( + path[-1], attr.parent + ): + if raiseerr: + if attr_group and attr is not attr_group[0]: + raise sa_exc.ArgumentError( + "Can't apply wildcard ('*') or load_only() " + "loader option to multiple entities in the " + "same option. Use separate options per entity." + ) + else: + _raise_for_does_not_link(path, str(attr), attr.parent) + else: + return None + + # note the essential logic of this attribute was very different in + # 1.4, where there were caching failures in e.g. + # test_relationship_criteria.py::RelationshipCriteriaTest:: + # test_selectinload_nested_criteria[True] if an existing + # "_extra_criteria" on a Load object were replaced with that coming + # from an attribute. This appears to have been an artifact of how + # _UnboundLoad / Load interacted together, which was opaque and + # poorly defined. + if extra_criteria: + assert not attr._extra_criteria + self._extra_criteria = extra_criteria + else: + self._extra_criteria = attr._extra_criteria + + if getattr(attr, "_of_type", None): + ac = attr._of_type + ext_info = inspect(ac) + self._of_type = ext_info + + self._path_with_polymorphic_path = path.entity_path[prop] + + path = path[prop][ext_info] + + else: + path = path[prop] + + if path.has_entity: + path = path.entity_path + + return path + + def _generate_extra_criteria(self, context): + """Apply the current bound parameters in a QueryContext to the + immediate "extra_criteria" stored with this Load object. + + Load objects are typically pulled from the cached version of + the statement from a QueryContext. The statement currently being + executed will have new values (and keys) for bound parameters in the + extra criteria which need to be applied by loader strategies when + they handle this criteria for a result set. + + """ + + assert ( + self._extra_criteria + ), "this should only be called if _extra_criteria is present" + + orig_query = context.compile_state.select_statement + current_query = context.query + + # NOTE: while it seems like we should not do the "apply" operation + # here if orig_query is current_query, skipping it in the "optimized" + # case causes the query to be different from a cache key perspective, + # because we are creating a copy of the criteria which is no longer + # the same identity of the _extra_criteria in the loader option + # itself. cache key logic produces a different key for + # (A, copy_of_A) vs. (A, A), because in the latter case it shortens + # the second part of the key to just indicate on identity. + + # if orig_query is current_query: + # not cached yet. just do the and_() + # return and_(*self._extra_criteria) + + k1 = orig_query._generate_cache_key() + k2 = current_query._generate_cache_key() + + return k2._apply_params_to_element(k1, and_(*self._extra_criteria)) + + def _set_of_type_info(self, context, current_path): + assert self._path_with_polymorphic_path + + pwpi = self._of_type + assert pwpi + if not pwpi.is_aliased_class: + pwpi = inspect( + orm_util.AliasedInsp._with_polymorphic_factory( + pwpi.mapper.base_mapper, + (pwpi.mapper,), + aliased=True, + _use_mapper_path=True, + ) + ) + start_path = self._path_with_polymorphic_path + if current_path: + new_path = self._adjust_effective_path_for_current_path( + start_path, current_path + ) + if new_path is None: + return + start_path = new_path + + key = ("path_with_polymorphic", start_path.natural_path) + if key in context: + existing_aliased_insp = context[key] + this_aliased_insp = pwpi + new_aliased_insp = existing_aliased_insp._merge_with( + this_aliased_insp + ) + context[key] = new_aliased_insp + else: + context[key] = pwpi + + def _prepare_for_compile_state( + self, + parent_loader, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ): + # _AttributeStrategyLoad + + current_path = compile_state.current_path + is_refresh = compile_state.compile_options._for_refresh_state + assert not self.path.is_token + + if is_refresh and not self.propagate_to_loaders: + return [] + + if self._of_type: + # apply additional with_polymorphic alias that may have been + # generated. this has to happen even if this is a defaultload + self._set_of_type_info(compile_state.attributes, current_path) + + # omit setting loader attributes for a "defaultload" type of option + if not self.strategy and not self.local_opts: + return [] + + if raiseerr and not reconciled_lead_entity: + self._raise_for_no_match(parent_loader, mapper_entities) + + if self.path.has_entity: + effective_path = self.path.parent + else: + effective_path = self.path + + if current_path: + assert effective_path is not None + effective_path = self._adjust_effective_path_for_current_path( + effective_path, current_path + ) + if effective_path is None: + return [] + + return [("loader", cast(PathRegistry, effective_path).natural_path)] + + def __getstate__(self): + d = super().__getstate__() + + # can't pickle this. See + # test_pickled.py -> test_lazyload_extra_criteria_not_supported + # where we should be emitting a warning for the usual case where this + # would be non-None + d["_extra_criteria"] = () + + if self._path_with_polymorphic_path: + d["_path_with_polymorphic_path"] = ( + self._path_with_polymorphic_path.serialize() + ) + + if self._of_type: + if self._of_type.is_aliased_class: + d["_of_type"] = None + elif self._of_type.is_mapper: + d["_of_type"] = self._of_type.class_ + else: + assert False, "unexpected object for _of_type" + + return d + + def __setstate__(self, state): + super().__setstate__(state) + + if state.get("_path_with_polymorphic_path", None): + self._path_with_polymorphic_path = PathRegistry.deserialize( + state["_path_with_polymorphic_path"] + ) + else: + self._path_with_polymorphic_path = None + + if state.get("_of_type", None): + self._of_type = inspect(state["_of_type"]) + else: + self._of_type = None + + +class _TokenStrategyLoad(_LoadElement): + """Loader strategies against wildcard attributes + + e.g.:: + + raiseload('*') + Load(User).lazyload('*') + defer('*') + load_only(User.name, User.email) # will create a defer('*') + joinedload(User.addresses).raiseload('*') + + """ + + __visit_name__ = "token_strategy_load_element" + + inherit_cache = True + is_class_strategy = False + is_token_strategy = True + + def _init_path( + self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria + ): + # assert isinstance(attr, str) or attr is None + if attr is not None: + default_token = attr.endswith(_DEFAULT_TOKEN) + if attr.endswith(_WILDCARD_TOKEN) or default_token: + if wildcard_key: + attr = f"{wildcard_key}:{attr}" + + path = path.token(attr) + return path + else: + raise sa_exc.ArgumentError( + "Strings are not accepted for attribute names in loader " + "options; please use class-bound attributes directly." + ) + return path + + def _prepare_for_compile_state( + self, + parent_loader, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ): + # _TokenStrategyLoad + + current_path = compile_state.current_path + is_refresh = compile_state.compile_options._for_refresh_state + + assert self.path.is_token + + if is_refresh and not self.propagate_to_loaders: + return [] + + # omit setting attributes for a "defaultload" type of option + if not self.strategy and not self.local_opts: + return [] + + effective_path = self.path + if reconciled_lead_entity: + effective_path = PathRegistry.coerce( + (reconciled_lead_entity,) + effective_path.path[1:] + ) + + if current_path: + new_effective_path = self._adjust_effective_path_for_current_path( + effective_path, current_path + ) + if new_effective_path is None: + return [] + effective_path = new_effective_path + + # for a wildcard token, expand out the path we set + # to encompass everything from the query entity on + # forward. not clear if this is necessary when current_path + # is set. + + return [ + ("loader", natural_path) + for natural_path in ( + cast( + TokenRegistry, effective_path + )._generate_natural_for_superclasses() + ) + ] + + +class _ClassStrategyLoad(_LoadElement): + """Loader strategies that deals with a class as a target, not + an attribute path + + e.g.:: + + q = s.query(Person).options( + selectin_polymorphic(Person, [Engineer, Manager]) + ) + + """ + + inherit_cache = True + is_class_strategy = True + is_token_strategy = False + + __visit_name__ = "class_strategy_load_element" + + def _init_path( + self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria + ): + return path + + def _prepare_for_compile_state( + self, + parent_loader, + compile_state, + mapper_entities, + reconciled_lead_entity, + raiseerr, + ): + # _ClassStrategyLoad + + current_path = compile_state.current_path + is_refresh = compile_state.compile_options._for_refresh_state + + if is_refresh and not self.propagate_to_loaders: + return [] + + # omit setting attributes for a "defaultload" type of option + if not self.strategy and not self.local_opts: + return [] + + effective_path = self.path + + if current_path: + new_effective_path = self._adjust_effective_path_for_current_path( + effective_path, current_path + ) + if new_effective_path is None: + return [] + effective_path = new_effective_path + + return [("loader", effective_path.natural_path)] + + +def _generate_from_keys( + meth: Callable[..., _AbstractLoad], + keys: Tuple[_AttrType, ...], + chained: bool, + kw: Any, +) -> _AbstractLoad: + lead_element: Optional[_AbstractLoad] = None + + attr: Any + for is_default, _keys in (True, keys[0:-1]), (False, keys[-1:]): + for attr in _keys: + if isinstance(attr, str): + if attr.startswith("." + _WILDCARD_TOKEN): + util.warn_deprecated( + "The undocumented `.{WILDCARD}` format is " + "deprecated " + "and will be removed in a future version as " + "it is " + "believed to be unused. " + "If you have been using this functionality, " + "please " + "comment on Issue #4390 on the SQLAlchemy project " + "tracker.", + version="1.4", + ) + attr = attr[1:] + + if attr == _WILDCARD_TOKEN: + if is_default: + raise sa_exc.ArgumentError( + "Wildcard token cannot be followed by " + "another entity", + ) + + if lead_element is None: + lead_element = _WildcardLoad() + + lead_element = meth(lead_element, _DEFAULT_TOKEN, **kw) + + else: + raise sa_exc.ArgumentError( + "Strings are not accepted for attribute names in " + "loader options; please use class-bound " + "attributes directly.", + ) + else: + if lead_element is None: + _, lead_entity, _ = _parse_attr_argument(attr) + lead_element = Load(lead_entity) + + if is_default: + if not chained: + lead_element = lead_element.defaultload(attr) + else: + lead_element = meth( + lead_element, attr, _is_chain=True, **kw + ) + else: + lead_element = meth(lead_element, attr, **kw) + + assert lead_element + return lead_element + + +def _parse_attr_argument( + attr: _AttrType, +) -> Tuple[InspectionAttr, _InternalEntityType[Any], MapperProperty[Any]]: + """parse an attribute or wildcard argument to produce an + :class:`._AbstractLoad` instance. + + This is used by the standalone loader strategy functions like + ``joinedload()``, ``defer()``, etc. to produce :class:`_orm.Load` or + :class:`._WildcardLoad` objects. + + """ + try: + # TODO: need to figure out this None thing being returned by + # inspect(), it should not have None as an option in most cases + # if at all + insp: InspectionAttr = inspect(attr) # type: ignore + except sa_exc.NoInspectionAvailable as err: + raise sa_exc.ArgumentError( + "expected ORM mapped attribute for loader strategy argument" + ) from err + + lead_entity: _InternalEntityType[Any] + + if insp_is_mapper_property(insp): + lead_entity = insp.parent + prop = insp + elif insp_is_attribute(insp): + lead_entity = insp.parent + prop = insp.prop + else: + raise sa_exc.ArgumentError( + "expected ORM mapped attribute for loader strategy argument" + ) + + return insp, lead_entity, prop + + +def loader_unbound_fn(fn: _FN) -> _FN: + """decorator that applies docstrings between standalone loader functions + and the loader methods on :class:`._AbstractLoad`. + + """ + bound_fn = getattr(_AbstractLoad, fn.__name__) + fn_doc = bound_fn.__doc__ + bound_fn.__doc__ = f"""Produce a new :class:`_orm.Load` object with the +:func:`_orm.{fn.__name__}` option applied. + +See :func:`_orm.{fn.__name__}` for usage examples. + +""" + + fn.__doc__ = fn_doc + return fn + + +# standalone functions follow. docstrings are filled in +# by the ``@loader_unbound_fn`` decorator. + + +@loader_unbound_fn +def contains_eager(*keys: _AttrType, **kw: Any) -> _AbstractLoad: + return _generate_from_keys(Load.contains_eager, keys, True, kw) + + +@loader_unbound_fn +def load_only(*attrs: _AttrType, raiseload: bool = False) -> _AbstractLoad: + # TODO: attrs against different classes. we likely have to + # add some extra state to Load of some kind + _, lead_element, _ = _parse_attr_argument(attrs[0]) + return Load(lead_element).load_only(*attrs, raiseload=raiseload) + + +@loader_unbound_fn +def joinedload(*keys: _AttrType, **kw: Any) -> _AbstractLoad: + return _generate_from_keys(Load.joinedload, keys, False, kw) + + +@loader_unbound_fn +def subqueryload(*keys: _AttrType) -> _AbstractLoad: + return _generate_from_keys(Load.subqueryload, keys, False, {}) + + +@loader_unbound_fn +def selectinload( + *keys: _AttrType, recursion_depth: Optional[int] = None +) -> _AbstractLoad: + return _generate_from_keys( + Load.selectinload, keys, False, {"recursion_depth": recursion_depth} + ) + + +@loader_unbound_fn +def lazyload(*keys: _AttrType) -> _AbstractLoad: + return _generate_from_keys(Load.lazyload, keys, False, {}) + + +@loader_unbound_fn +def immediateload( + *keys: _AttrType, recursion_depth: Optional[int] = None +) -> _AbstractLoad: + return _generate_from_keys( + Load.immediateload, keys, False, {"recursion_depth": recursion_depth} + ) + + +@loader_unbound_fn +def noload(*keys: _AttrType) -> _AbstractLoad: + return _generate_from_keys(Load.noload, keys, False, {}) + + +@loader_unbound_fn +def raiseload(*keys: _AttrType, **kw: Any) -> _AbstractLoad: + return _generate_from_keys(Load.raiseload, keys, False, kw) + + +@loader_unbound_fn +def defaultload(*keys: _AttrType) -> _AbstractLoad: + return _generate_from_keys(Load.defaultload, keys, False, {}) + + +@loader_unbound_fn +def defer( + key: _AttrType, *addl_attrs: _AttrType, raiseload: bool = False +) -> _AbstractLoad: + if addl_attrs: + util.warn_deprecated( + "The *addl_attrs on orm.defer is deprecated. Please use " + "method chaining in conjunction with defaultload() to " + "indicate a path.", + version="1.3", + ) + + if raiseload: + kw = {"raiseload": raiseload} + else: + kw = {} + + return _generate_from_keys(Load.defer, (key,) + addl_attrs, False, kw) + + +@loader_unbound_fn +def undefer(key: _AttrType, *addl_attrs: _AttrType) -> _AbstractLoad: + if addl_attrs: + util.warn_deprecated( + "The *addl_attrs on orm.undefer is deprecated. Please use " + "method chaining in conjunction with defaultload() to " + "indicate a path.", + version="1.3", + ) + return _generate_from_keys(Load.undefer, (key,) + addl_attrs, False, {}) + + +@loader_unbound_fn +def undefer_group(name: str) -> _AbstractLoad: + element = _WildcardLoad() + return element.undefer_group(name) + + +@loader_unbound_fn +def with_expression( + key: _AttrType, expression: _ColumnExpressionArgument[Any] +) -> _AbstractLoad: + return _generate_from_keys( + Load.with_expression, (key,), False, {"expression": expression} + ) + + +@loader_unbound_fn +def selectin_polymorphic( + base_cls: _EntityType[Any], classes: Iterable[Type[Any]] +) -> _AbstractLoad: + ul = Load(base_cls) + return ul.selectin_polymorphic(classes) + + +def _raise_for_does_not_link(path, attrname, parent_entity): + if len(path) > 1: + path_is_of_type = path[-1].entity is not path[-2].mapper.class_ + if insp_is_aliased_class(parent_entity): + parent_entity_str = str(parent_entity) + else: + parent_entity_str = parent_entity.class_.__name__ + + raise sa_exc.ArgumentError( + f'ORM mapped entity or attribute "{attrname}" does not ' + f'link from relationship "{path[-2]}%s".%s' + % ( + f".of_type({path[-1]})" if path_is_of_type else "", + ( + " Did you mean to use " + f'"{path[-2]}' + f'.of_type({parent_entity_str})" or "loadopt.options(' + f"selectin_polymorphic({path[-2].mapper.class_.__name__}, " + f'[{parent_entity_str}]), ...)" ?' + if not path_is_of_type + and not path[-1].is_aliased_class + and orm_util._entity_corresponds_to( + path.entity, inspect(parent_entity).mapper + ) + else "" + ), + ) + ) + else: + raise sa_exc.ArgumentError( + f'ORM mapped attribute "{attrname}" does not ' + f'link mapped class "{path[-1]}"' + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py new file mode 100644 index 00000000..db09a3e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py @@ -0,0 +1,164 @@ +# orm/sync.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + + +"""private module containing functions used for copying data +between instances based on join conditions. + +""" + +from __future__ import annotations + +from . import exc +from . import util as orm_util +from .base import PassiveFlag + + +def populate( + source, + source_mapper, + dest, + dest_mapper, + synchronize_pairs, + uowcommit, + flag_cascaded_pks, +): + source_dict = source.dict + dest_dict = dest.dict + + for l, r in synchronize_pairs: + try: + # inline of source_mapper._get_state_attr_by_column + prop = source_mapper._columntoproperty[l] + value = source.manager[prop.key].impl.get( + source, source_dict, PassiveFlag.PASSIVE_OFF + ) + except exc.UnmappedColumnError as err: + _raise_col_to_prop(False, source_mapper, l, dest_mapper, r, err) + + try: + # inline of dest_mapper._set_state_attr_by_column + prop = dest_mapper._columntoproperty[r] + dest.manager[prop.key].impl.set(dest, dest_dict, value, None) + except exc.UnmappedColumnError as err: + _raise_col_to_prop(True, source_mapper, l, dest_mapper, r, err) + + # technically the "r.primary_key" check isn't + # needed here, but we check for this condition to limit + # how often this logic is invoked for memory/performance + # reasons, since we only need this info for a primary key + # destination. + if ( + flag_cascaded_pks + and l.primary_key + and r.primary_key + and r.references(l) + ): + uowcommit.attributes[("pk_cascaded", dest, r)] = True + + +def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs): + # a simplified version of populate() used by bulk insert mode + for l, r in synchronize_pairs: + try: + prop = source_mapper._columntoproperty[l] + value = source_dict[prop.key] + except exc.UnmappedColumnError as err: + _raise_col_to_prop(False, source_mapper, l, source_mapper, r, err) + + try: + prop = source_mapper._columntoproperty[r] + source_dict[prop.key] = value + except exc.UnmappedColumnError as err: + _raise_col_to_prop(True, source_mapper, l, source_mapper, r, err) + + +def clear(dest, dest_mapper, synchronize_pairs): + for l, r in synchronize_pairs: + if ( + r.primary_key + and dest_mapper._get_state_attr_by_column(dest, dest.dict, r) + not in orm_util._none_set + ): + raise AssertionError( + f"Dependency rule on column '{l}' " + "tried to blank-out primary key " + f"column '{r}' on instance '{orm_util.state_str(dest)}'" + ) + try: + dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None) + except exc.UnmappedColumnError as err: + _raise_col_to_prop(True, None, l, dest_mapper, r, err) + + +def update(source, source_mapper, dest, old_prefix, synchronize_pairs): + for l, r in synchronize_pairs: + try: + oldvalue = source_mapper._get_committed_attr_by_column( + source.obj(), l + ) + value = source_mapper._get_state_attr_by_column( + source, source.dict, l, passive=PassiveFlag.PASSIVE_OFF + ) + except exc.UnmappedColumnError as err: + _raise_col_to_prop(False, source_mapper, l, None, r, err) + dest[r.key] = value + dest[old_prefix + r.key] = oldvalue + + +def populate_dict(source, source_mapper, dict_, synchronize_pairs): + for l, r in synchronize_pairs: + try: + value = source_mapper._get_state_attr_by_column( + source, source.dict, l, passive=PassiveFlag.PASSIVE_OFF + ) + except exc.UnmappedColumnError as err: + _raise_col_to_prop(False, source_mapper, l, None, r, err) + + dict_[r.key] = value + + +def source_modified(uowcommit, source, source_mapper, synchronize_pairs): + """return true if the source object has changes from an old to a + new value on the given synchronize pairs + + """ + for l, r in synchronize_pairs: + try: + prop = source_mapper._columntoproperty[l] + except exc.UnmappedColumnError as err: + _raise_col_to_prop(False, source_mapper, l, None, r, err) + history = uowcommit.get_attribute_history( + source, prop.key, PassiveFlag.PASSIVE_NO_INITIALIZE + ) + if bool(history.deleted): + return True + else: + return False + + +def _raise_col_to_prop( + isdest, source_mapper, source_column, dest_mapper, dest_column, err +): + if isdest: + raise exc.UnmappedColumnError( + "Can't execute sync rule for " + "destination column '%s'; mapper '%s' does not map " + "this column. Try using an explicit `foreign_keys` " + "collection which does not include this column (or use " + "a viewonly=True relation)." % (dest_column, dest_mapper) + ) from err + else: + raise exc.UnmappedColumnError( + "Can't execute sync rule for " + "source column '%s'; mapper '%s' does not map this " + "column. Try using an explicit `foreign_keys` " + "collection which does not include destination column " + "'%s' (or use a viewonly=True relation)." + % (source_column, source_mapper, dest_column) + ) from err diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py new file mode 100644 index 00000000..7e2df2b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py @@ -0,0 +1,796 @@ +# orm/unitofwork.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""The internals for the unit of work system. + +The session's flush() process passes objects to a contextual object +here, which assembles flush tasks based on mappers and their properties, +organizes them in order of dependency, and executes. + +""" + +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import Optional +from typing import Set +from typing import TYPE_CHECKING + +from . import attributes +from . import exc as orm_exc +from . import util as orm_util +from .. import event +from .. import util +from ..util import topological + + +if TYPE_CHECKING: + from .dependency import DependencyProcessor + from .interfaces import MapperProperty + from .mapper import Mapper + from .session import Session + from .session import SessionTransaction + from .state import InstanceState + + +def track_cascade_events(descriptor, prop): + """Establish event listeners on object attributes which handle + cascade-on-set/append. + + """ + key = prop.key + + def append(state, item, initiator, **kw): + # process "save_update" cascade rules for when + # an instance is appended to the list of another instance + + if item is None: + return + + sess = state.session + if sess: + if sess._warn_on_events: + sess._flush_warning("collection append") + + prop = state.manager.mapper._props[key] + item_state = attributes.instance_state(item) + + if ( + prop._cascade.save_update + and (key == initiator.key) + and not sess._contains_state(item_state) + ): + sess._save_or_update_state(item_state) + return item + + def remove(state, item, initiator, **kw): + if item is None: + return + + sess = state.session + + prop = state.manager.mapper._props[key] + + if sess and sess._warn_on_events: + sess._flush_warning( + "collection remove" + if prop.uselist + else "related attribute delete" + ) + + if ( + item is not None + and item is not attributes.NEVER_SET + and item is not attributes.PASSIVE_NO_RESULT + and prop._cascade.delete_orphan + ): + # expunge pending orphans + item_state = attributes.instance_state(item) + + if prop.mapper._is_orphan(item_state): + if sess and item_state in sess._new: + sess.expunge(item) + else: + # the related item may or may not itself be in a + # Session, however the parent for which we are catching + # the event is not in a session, so memoize this on the + # item + item_state._orphaned_outside_of_session = True + + def set_(state, newvalue, oldvalue, initiator, **kw): + # process "save_update" cascade rules for when an instance + # is attached to another instance + if oldvalue is newvalue: + return newvalue + + sess = state.session + if sess: + if sess._warn_on_events: + sess._flush_warning("related attribute set") + + prop = state.manager.mapper._props[key] + if newvalue is not None: + newvalue_state = attributes.instance_state(newvalue) + if ( + prop._cascade.save_update + and (key == initiator.key) + and not sess._contains_state(newvalue_state) + ): + sess._save_or_update_state(newvalue_state) + + if ( + oldvalue is not None + and oldvalue is not attributes.NEVER_SET + and oldvalue is not attributes.PASSIVE_NO_RESULT + and prop._cascade.delete_orphan + ): + # possible to reach here with attributes.NEVER_SET ? + oldvalue_state = attributes.instance_state(oldvalue) + + if oldvalue_state in sess._new and prop.mapper._is_orphan( + oldvalue_state + ): + sess.expunge(oldvalue) + return newvalue + + event.listen( + descriptor, "append_wo_mutation", append, raw=True, include_key=True + ) + event.listen( + descriptor, "append", append, raw=True, retval=True, include_key=True + ) + event.listen( + descriptor, "remove", remove, raw=True, retval=True, include_key=True + ) + event.listen( + descriptor, "set", set_, raw=True, retval=True, include_key=True + ) + + +class UOWTransaction: + session: Session + transaction: SessionTransaction + attributes: Dict[str, Any] + deps: util.defaultdict[Mapper[Any], Set[DependencyProcessor]] + mappers: util.defaultdict[Mapper[Any], Set[InstanceState[Any]]] + + def __init__(self, session: Session): + self.session = session + + # dictionary used by external actors to + # store arbitrary state information. + self.attributes = {} + + # dictionary of mappers to sets of + # DependencyProcessors, which are also + # set to be part of the sorted flush actions, + # which have that mapper as a parent. + self.deps = util.defaultdict(set) + + # dictionary of mappers to sets of InstanceState + # items pending for flush which have that mapper + # as a parent. + self.mappers = util.defaultdict(set) + + # a dictionary of Preprocess objects, which gather + # additional states impacted by the flush + # and determine if a flush action is needed + self.presort_actions = {} + + # dictionary of PostSortRec objects, each + # one issues work during the flush within + # a certain ordering. + self.postsort_actions = {} + + # a set of 2-tuples, each containing two + # PostSortRec objects where the second + # is dependent on the first being executed + # first + self.dependencies = set() + + # dictionary of InstanceState-> (isdelete, listonly) + # tuples, indicating if this state is to be deleted + # or insert/updated, or just refreshed + self.states = {} + + # tracks InstanceStates which will be receiving + # a "post update" call. Keys are mappers, + # values are a set of states and a set of the + # columns which should be included in the update. + self.post_update_states = util.defaultdict(lambda: (set(), set())) + + @property + def has_work(self): + return bool(self.states) + + def was_already_deleted(self, state): + """Return ``True`` if the given state is expired and was deleted + previously. + """ + if state.expired: + try: + state._load_expired(state, attributes.PASSIVE_OFF) + except orm_exc.ObjectDeletedError: + self.session._remove_newly_deleted([state]) + return True + return False + + def is_deleted(self, state): + """Return ``True`` if the given state is marked as deleted + within this uowtransaction.""" + + return state in self.states and self.states[state][0] + + def memo(self, key, callable_): + if key in self.attributes: + return self.attributes[key] + else: + self.attributes[key] = ret = callable_() + return ret + + def remove_state_actions(self, state): + """Remove pending actions for a state from the uowtransaction.""" + + isdelete = self.states[state][0] + + self.states[state] = (isdelete, True) + + def get_attribute_history( + self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE + ): + """Facade to attributes.get_state_history(), including + caching of results.""" + + hashkey = ("history", state, key) + + # cache the objects, not the states; the strong reference here + # prevents newly loaded objects from being dereferenced during the + # flush process + + if hashkey in self.attributes: + history, state_history, cached_passive = self.attributes[hashkey] + # if the cached lookup was "passive" and now + # we want non-passive, do a non-passive lookup and re-cache + + if ( + not cached_passive & attributes.SQL_OK + and passive & attributes.SQL_OK + ): + impl = state.manager[key].impl + history = impl.get_history( + state, + state.dict, + attributes.PASSIVE_OFF + | attributes.LOAD_AGAINST_COMMITTED + | attributes.NO_RAISE, + ) + if history and impl.uses_objects: + state_history = history.as_state() + else: + state_history = history + self.attributes[hashkey] = (history, state_history, passive) + else: + impl = state.manager[key].impl + # TODO: store the history as (state, object) tuples + # so we don't have to keep converting here + history = impl.get_history( + state, + state.dict, + passive + | attributes.LOAD_AGAINST_COMMITTED + | attributes.NO_RAISE, + ) + if history and impl.uses_objects: + state_history = history.as_state() + else: + state_history = history + self.attributes[hashkey] = (history, state_history, passive) + + return state_history + + def has_dep(self, processor): + return (processor, True) in self.presort_actions + + def register_preprocessor(self, processor, fromparent): + key = (processor, fromparent) + if key not in self.presort_actions: + self.presort_actions[key] = Preprocess(processor, fromparent) + + def register_object( + self, + state: InstanceState[Any], + isdelete: bool = False, + listonly: bool = False, + cancel_delete: bool = False, + operation: Optional[str] = None, + prop: Optional[MapperProperty] = None, + ) -> bool: + if not self.session._contains_state(state): + # this condition is normal when objects are registered + # as part of a relationship cascade operation. it should + # not occur for the top-level register from Session.flush(). + if not state.deleted and operation is not None: + util.warn( + "Object of type %s not in session, %s operation " + "along '%s' will not proceed" + % (orm_util.state_class_str(state), operation, prop) + ) + return False + + if state not in self.states: + mapper = state.manager.mapper + + if mapper not in self.mappers: + self._per_mapper_flush_actions(mapper) + + self.mappers[mapper].add(state) + self.states[state] = (isdelete, listonly) + else: + if not listonly and (isdelete or cancel_delete): + self.states[state] = (isdelete, False) + return True + + def register_post_update(self, state, post_update_cols): + mapper = state.manager.mapper.base_mapper + states, cols = self.post_update_states[mapper] + states.add(state) + cols.update(post_update_cols) + + def _per_mapper_flush_actions(self, mapper): + saves = SaveUpdateAll(self, mapper.base_mapper) + deletes = DeleteAll(self, mapper.base_mapper) + self.dependencies.add((saves, deletes)) + + for dep in mapper._dependency_processors: + dep.per_property_preprocessors(self) + + for prop in mapper.relationships: + if prop.viewonly: + continue + dep = prop._dependency_processor + dep.per_property_preprocessors(self) + + @util.memoized_property + def _mapper_for_dep(self): + """return a dynamic mapping of (Mapper, DependencyProcessor) to + True or False, indicating if the DependencyProcessor operates + on objects of that Mapper. + + The result is stored in the dictionary persistently once + calculated. + + """ + return util.PopulateDict( + lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop + ) + + def filter_states_for_dep(self, dep, states): + """Filter the given list of InstanceStates to those relevant to the + given DependencyProcessor. + + """ + mapper_for_dep = self._mapper_for_dep + return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]] + + def states_for_mapper_hierarchy(self, mapper, isdelete, listonly): + checktup = (isdelete, listonly) + for mapper in mapper.base_mapper.self_and_descendants: + for state in self.mappers[mapper]: + if self.states[state] == checktup: + yield state + + def _generate_actions(self): + """Generate the full, unsorted collection of PostSortRecs as + well as dependency pairs for this UOWTransaction. + + """ + # execute presort_actions, until all states + # have been processed. a presort_action might + # add new states to the uow. + while True: + ret = False + for action in list(self.presort_actions.values()): + if action.execute(self): + ret = True + if not ret: + break + + # see if the graph of mapper dependencies has cycles. + self.cycles = cycles = topological.find_cycles( + self.dependencies, list(self.postsort_actions.values()) + ) + + if cycles: + # if yes, break the per-mapper actions into + # per-state actions + convert = { + rec: set(rec.per_state_flush_actions(self)) for rec in cycles + } + + # rewrite the existing dependencies to point to + # the per-state actions for those per-mapper actions + # that were broken up. + for edge in list(self.dependencies): + if ( + None in edge + or edge[0].disabled + or edge[1].disabled + or cycles.issuperset(edge) + ): + self.dependencies.remove(edge) + elif edge[0] in cycles: + self.dependencies.remove(edge) + for dep in convert[edge[0]]: + self.dependencies.add((dep, edge[1])) + elif edge[1] in cycles: + self.dependencies.remove(edge) + for dep in convert[edge[1]]: + self.dependencies.add((edge[0], dep)) + + return { + a for a in self.postsort_actions.values() if not a.disabled + }.difference(cycles) + + def execute(self) -> None: + postsort_actions = self._generate_actions() + + postsort_actions = sorted( + postsort_actions, + key=lambda item: item.sort_key, + ) + # sort = topological.sort(self.dependencies, postsort_actions) + # print "--------------" + # print "\ndependencies:", self.dependencies + # print "\ncycles:", self.cycles + # print "\nsort:", list(sort) + # print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions) + + # execute + if self.cycles: + for subset in topological.sort_as_subsets( + self.dependencies, postsort_actions + ): + set_ = set(subset) + while set_: + n = set_.pop() + n.execute_aggregate(self, set_) + else: + for rec in topological.sort(self.dependencies, postsort_actions): + rec.execute(self) + + def finalize_flush_changes(self) -> None: + """Mark processed objects as clean / deleted after a successful + flush(). + + This method is called within the flush() method after the + execute() method has succeeded and the transaction has been committed. + + """ + if not self.states: + return + + states = set(self.states) + isdel = { + s for (s, (isdelete, listonly)) in self.states.items() if isdelete + } + other = states.difference(isdel) + if isdel: + self.session._remove_newly_deleted(isdel) + if other: + self.session._register_persistent(other) + + +class IterateMappersMixin: + __slots__ = () + + def _mappers(self, uow): + if self.fromparent: + return iter( + m + for m in self.dependency_processor.parent.self_and_descendants + if uow._mapper_for_dep[(m, self.dependency_processor)] + ) + else: + return self.dependency_processor.mapper.self_and_descendants + + +class Preprocess(IterateMappersMixin): + __slots__ = ( + "dependency_processor", + "fromparent", + "processed", + "setup_flush_actions", + ) + + def __init__(self, dependency_processor, fromparent): + self.dependency_processor = dependency_processor + self.fromparent = fromparent + self.processed = set() + self.setup_flush_actions = False + + def execute(self, uow): + delete_states = set() + save_states = set() + + for mapper in self._mappers(uow): + for state in uow.mappers[mapper].difference(self.processed): + (isdelete, listonly) = uow.states[state] + if not listonly: + if isdelete: + delete_states.add(state) + else: + save_states.add(state) + + if delete_states: + self.dependency_processor.presort_deletes(uow, delete_states) + self.processed.update(delete_states) + if save_states: + self.dependency_processor.presort_saves(uow, save_states) + self.processed.update(save_states) + + if delete_states or save_states: + if not self.setup_flush_actions and ( + self.dependency_processor.prop_has_changes( + uow, delete_states, True + ) + or self.dependency_processor.prop_has_changes( + uow, save_states, False + ) + ): + self.dependency_processor.per_property_flush_actions(uow) + self.setup_flush_actions = True + return True + else: + return False + + +class PostSortRec: + __slots__ = ("disabled",) + + def __new__(cls, uow, *args): + key = (cls,) + args + if key in uow.postsort_actions: + return uow.postsort_actions[key] + else: + uow.postsort_actions[key] = ret = object.__new__(cls) + ret.disabled = False + return ret + + def execute_aggregate(self, uow, recs): + self.execute(uow) + + +class ProcessAll(IterateMappersMixin, PostSortRec): + __slots__ = "dependency_processor", "isdelete", "fromparent", "sort_key" + + def __init__(self, uow, dependency_processor, isdelete, fromparent): + self.dependency_processor = dependency_processor + self.sort_key = ( + "ProcessAll", + self.dependency_processor.sort_key, + isdelete, + ) + self.isdelete = isdelete + self.fromparent = fromparent + uow.deps[dependency_processor.parent.base_mapper].add( + dependency_processor + ) + + def execute(self, uow): + states = self._elements(uow) + if self.isdelete: + self.dependency_processor.process_deletes(uow, states) + else: + self.dependency_processor.process_saves(uow, states) + + def per_state_flush_actions(self, uow): + # this is handled by SaveUpdateAll and DeleteAll, + # since a ProcessAll should unconditionally be pulled + # into per-state if either the parent/child mappers + # are part of a cycle + return iter([]) + + def __repr__(self): + return "%s(%s, isdelete=%s)" % ( + self.__class__.__name__, + self.dependency_processor, + self.isdelete, + ) + + def _elements(self, uow): + for mapper in self._mappers(uow): + for state in uow.mappers[mapper]: + (isdelete, listonly) = uow.states[state] + if isdelete == self.isdelete and not listonly: + yield state + + +class PostUpdateAll(PostSortRec): + __slots__ = "mapper", "isdelete", "sort_key" + + def __init__(self, uow, mapper, isdelete): + self.mapper = mapper + self.isdelete = isdelete + self.sort_key = ("PostUpdateAll", mapper._sort_key, isdelete) + + @util.preload_module("sqlalchemy.orm.persistence") + def execute(self, uow): + persistence = util.preloaded.orm_persistence + states, cols = uow.post_update_states[self.mapper] + states = [s for s in states if uow.states[s][0] == self.isdelete] + + persistence.post_update(self.mapper, states, uow, cols) + + +class SaveUpdateAll(PostSortRec): + __slots__ = ("mapper", "sort_key") + + def __init__(self, uow, mapper): + self.mapper = mapper + self.sort_key = ("SaveUpdateAll", mapper._sort_key) + assert mapper is mapper.base_mapper + + @util.preload_module("sqlalchemy.orm.persistence") + def execute(self, uow): + util.preloaded.orm_persistence.save_obj( + self.mapper, + uow.states_for_mapper_hierarchy(self.mapper, False, False), + uow, + ) + + def per_state_flush_actions(self, uow): + states = list( + uow.states_for_mapper_hierarchy(self.mapper, False, False) + ) + base_mapper = self.mapper.base_mapper + delete_all = DeleteAll(uow, base_mapper) + for state in states: + # keep saves before deletes - + # this ensures 'row switch' operations work + action = SaveUpdateState(uow, state) + uow.dependencies.add((action, delete_all)) + yield action + + for dep in uow.deps[self.mapper]: + states_for_prop = uow.filter_states_for_dep(dep, states) + dep.per_state_flush_actions(uow, states_for_prop, False) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, self.mapper) + + +class DeleteAll(PostSortRec): + __slots__ = ("mapper", "sort_key") + + def __init__(self, uow, mapper): + self.mapper = mapper + self.sort_key = ("DeleteAll", mapper._sort_key) + assert mapper is mapper.base_mapper + + @util.preload_module("sqlalchemy.orm.persistence") + def execute(self, uow): + util.preloaded.orm_persistence.delete_obj( + self.mapper, + uow.states_for_mapper_hierarchy(self.mapper, True, False), + uow, + ) + + def per_state_flush_actions(self, uow): + states = list( + uow.states_for_mapper_hierarchy(self.mapper, True, False) + ) + base_mapper = self.mapper.base_mapper + save_all = SaveUpdateAll(uow, base_mapper) + for state in states: + # keep saves before deletes - + # this ensures 'row switch' operations work + action = DeleteState(uow, state) + uow.dependencies.add((save_all, action)) + yield action + + for dep in uow.deps[self.mapper]: + states_for_prop = uow.filter_states_for_dep(dep, states) + dep.per_state_flush_actions(uow, states_for_prop, True) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, self.mapper) + + +class ProcessState(PostSortRec): + __slots__ = "dependency_processor", "isdelete", "state", "sort_key" + + def __init__(self, uow, dependency_processor, isdelete, state): + self.dependency_processor = dependency_processor + self.sort_key = ("ProcessState", dependency_processor.sort_key) + self.isdelete = isdelete + self.state = state + + def execute_aggregate(self, uow, recs): + cls_ = self.__class__ + dependency_processor = self.dependency_processor + isdelete = self.isdelete + our_recs = [ + r + for r in recs + if r.__class__ is cls_ + and r.dependency_processor is dependency_processor + and r.isdelete is isdelete + ] + recs.difference_update(our_recs) + states = [self.state] + [r.state for r in our_recs] + if isdelete: + dependency_processor.process_deletes(uow, states) + else: + dependency_processor.process_saves(uow, states) + + def __repr__(self): + return "%s(%s, %s, delete=%s)" % ( + self.__class__.__name__, + self.dependency_processor, + orm_util.state_str(self.state), + self.isdelete, + ) + + +class SaveUpdateState(PostSortRec): + __slots__ = "state", "mapper", "sort_key" + + def __init__(self, uow, state): + self.state = state + self.mapper = state.mapper.base_mapper + self.sort_key = ("ProcessState", self.mapper._sort_key) + + @util.preload_module("sqlalchemy.orm.persistence") + def execute_aggregate(self, uow, recs): + persistence = util.preloaded.orm_persistence + cls_ = self.__class__ + mapper = self.mapper + our_recs = [ + r for r in recs if r.__class__ is cls_ and r.mapper is mapper + ] + recs.difference_update(our_recs) + persistence.save_obj( + mapper, [self.state] + [r.state for r in our_recs], uow + ) + + def __repr__(self): + return "%s(%s)" % ( + self.__class__.__name__, + orm_util.state_str(self.state), + ) + + +class DeleteState(PostSortRec): + __slots__ = "state", "mapper", "sort_key" + + def __init__(self, uow, state): + self.state = state + self.mapper = state.mapper.base_mapper + self.sort_key = ("DeleteState", self.mapper._sort_key) + + @util.preload_module("sqlalchemy.orm.persistence") + def execute_aggregate(self, uow, recs): + persistence = util.preloaded.orm_persistence + cls_ = self.__class__ + mapper = self.mapper + our_recs = [ + r for r in recs if r.__class__ is cls_ and r.mapper is mapper + ] + recs.difference_update(our_recs) + states = [self.state] + [r.state for r in our_recs] + persistence.delete_obj( + mapper, [s for s in states if uow.states[s][0]], uow + ) + + def __repr__(self): + return "%s(%s)" % ( + self.__class__.__name__, + orm_util.state_str(self.state), + ) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py new file mode 100644 index 00000000..2b4ac3c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py @@ -0,0 +1,2423 @@ +# orm/util.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +from __future__ import annotations + +import enum +import functools +import re +import types +import typing +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Match +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import weakref + +from . import attributes # noqa +from . import exc +from ._typing import _O +from ._typing import insp_is_aliased_class +from ._typing import insp_is_mapper +from ._typing import prop_is_relationship +from .base import _class_to_mapper as _class_to_mapper +from .base import _MappedAnnotationBase +from .base import _never_set as _never_set # noqa: F401 +from .base import _none_set as _none_set # noqa: F401 +from .base import attribute_str as attribute_str # noqa: F401 +from .base import class_mapper as class_mapper +from .base import DynamicMapped +from .base import InspectionAttr as InspectionAttr +from .base import instance_str as instance_str # noqa: F401 +from .base import Mapped +from .base import object_mapper as object_mapper +from .base import object_state as object_state # noqa: F401 +from .base import opt_manager_of_class +from .base import ORMDescriptor +from .base import state_attribute_str as state_attribute_str # noqa: F401 +from .base import state_class_str as state_class_str # noqa: F401 +from .base import state_str as state_str # noqa: F401 +from .base import WriteOnlyMapped +from .interfaces import CriteriaOption +from .interfaces import MapperProperty as MapperProperty +from .interfaces import ORMColumnsClauseRole +from .interfaces import ORMEntityColumnsClauseRole +from .interfaces import ORMFromClauseRole +from .path_registry import PathRegistry as PathRegistry +from .. import event +from .. import exc as sa_exc +from .. import inspection +from .. import sql +from .. import util +from ..engine.result import result_tuple +from ..sql import coercions +from ..sql import expression +from ..sql import lambdas +from ..sql import roles +from ..sql import util as sql_util +from ..sql import visitors +from ..sql._typing import is_selectable +from ..sql.annotation import SupportsCloneAnnotations +from ..sql.base import ColumnCollection +from ..sql.cache_key import HasCacheKey +from ..sql.cache_key import MemoizedHasCacheKey +from ..sql.elements import ColumnElement +from ..sql.elements import KeyedColumnElement +from ..sql.selectable import FromClause +from ..util.langhelpers import MemoizedSlots +from ..util.typing import de_stringify_annotation as _de_stringify_annotation +from ..util.typing import ( + de_stringify_union_elements as _de_stringify_union_elements, +) +from ..util.typing import eval_name_only as _eval_name_only +from ..util.typing import fixup_container_fwd_refs +from ..util.typing import is_origin_of_cls +from ..util.typing import Literal +from ..util.typing import Protocol +from ..util.typing import typing_get_origin + +if typing.TYPE_CHECKING: + from ._typing import _EntityType + from ._typing import _IdentityKeyType + from ._typing import _InternalEntityType + from ._typing import _ORMCOLEXPR + from .context import _MapperEntity + from .context import ORMCompileState + from .mapper import Mapper + from .path_registry import AbstractEntityRegistry + from .query import Query + from .relationships import RelationshipProperty + from ..engine import Row + from ..engine import RowMapping + from ..sql._typing import _CE + from ..sql._typing import _ColumnExpressionArgument + from ..sql._typing import _EquivalentColumnMap + from ..sql._typing import _FromClauseArgument + from ..sql._typing import _OnClauseArgument + from ..sql._typing import _PropagateAttrsType + from ..sql.annotation import _SA + from ..sql.base import ReadOnlyColumnCollection + from ..sql.elements import BindParameter + from ..sql.selectable import _ColumnsClauseElement + from ..sql.selectable import Select + from ..sql.selectable import Selectable + from ..sql.visitors import anon_map + from ..util.typing import _AnnotationScanType + from ..util.typing import ArgsTypeProcotol + +_T = TypeVar("_T", bound=Any) + +all_cascades = frozenset( + ( + "delete", + "delete-orphan", + "all", + "merge", + "expunge", + "save-update", + "refresh-expire", + "none", + ) +) + + +_de_stringify_partial = functools.partial( + functools.partial, + locals_=util.immutabledict( + { + "Mapped": Mapped, + "WriteOnlyMapped": WriteOnlyMapped, + "DynamicMapped": DynamicMapped, + } + ), +) + +# partial is practically useless as we have to write out the whole +# function and maintain the signature anyway + + +class _DeStringifyAnnotation(Protocol): + def __call__( + self, + cls: Type[Any], + annotation: _AnnotationScanType, + originating_module: str, + *, + str_cleanup_fn: Optional[Callable[[str, str], str]] = None, + include_generic: bool = False, + ) -> Type[Any]: ... + + +de_stringify_annotation = cast( + _DeStringifyAnnotation, _de_stringify_partial(_de_stringify_annotation) +) + + +class _DeStringifyUnionElements(Protocol): + def __call__( + self, + cls: Type[Any], + annotation: ArgsTypeProcotol, + originating_module: str, + *, + str_cleanup_fn: Optional[Callable[[str, str], str]] = None, + ) -> Type[Any]: ... + + +de_stringify_union_elements = cast( + _DeStringifyUnionElements, + _de_stringify_partial(_de_stringify_union_elements), +) + + +class _EvalNameOnly(Protocol): + def __call__(self, name: str, module_name: str) -> Any: ... + + +eval_name_only = cast(_EvalNameOnly, _de_stringify_partial(_eval_name_only)) + + +class CascadeOptions(FrozenSet[str]): + """Keeps track of the options sent to + :paramref:`.relationship.cascade`""" + + _add_w_all_cascades = all_cascades.difference( + ["all", "none", "delete-orphan"] + ) + _allowed_cascades = all_cascades + + _viewonly_cascades = ["expunge", "all", "none", "refresh-expire", "merge"] + + __slots__ = ( + "save_update", + "delete", + "refresh_expire", + "merge", + "expunge", + "delete_orphan", + ) + + save_update: bool + delete: bool + refresh_expire: bool + merge: bool + expunge: bool + delete_orphan: bool + + def __new__( + cls, value_list: Optional[Union[Iterable[str], str]] + ) -> CascadeOptions: + if isinstance(value_list, str) or value_list is None: + return cls.from_string(value_list) # type: ignore + values = set(value_list) + if values.difference(cls._allowed_cascades): + raise sa_exc.ArgumentError( + "Invalid cascade option(s): %s" + % ", ".join( + [ + repr(x) + for x in sorted( + values.difference(cls._allowed_cascades) + ) + ] + ) + ) + + if "all" in values: + values.update(cls._add_w_all_cascades) + if "none" in values: + values.clear() + values.discard("all") + + self = super().__new__(cls, values) + self.save_update = "save-update" in values + self.delete = "delete" in values + self.refresh_expire = "refresh-expire" in values + self.merge = "merge" in values + self.expunge = "expunge" in values + self.delete_orphan = "delete-orphan" in values + + if self.delete_orphan and not self.delete: + util.warn("The 'delete-orphan' cascade option requires 'delete'.") + return self + + def __repr__(self): + return "CascadeOptions(%r)" % (",".join([x for x in sorted(self)])) + + @classmethod + def from_string(cls, arg): + values = [c for c in re.split(r"\s*,\s*", arg or "") if c] + return cls(values) + + +def _validator_events(desc, key, validator, include_removes, include_backrefs): + """Runs a validation method on an attribute value to be set or + appended. + """ + + if not include_backrefs: + + def detect_is_backref(state, initiator): + impl = state.manager[key].impl + return initiator.impl is not impl + + if include_removes: + + def append(state, value, initiator): + if initiator.op is not attributes.OP_BULK_REPLACE and ( + include_backrefs or not detect_is_backref(state, initiator) + ): + return validator(state.obj(), key, value, False) + else: + return value + + def bulk_set(state, values, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + obj = state.obj() + values[:] = [ + validator(obj, key, value, False) for value in values + ] + + def set_(state, value, oldvalue, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + return validator(state.obj(), key, value, False) + else: + return value + + def remove(state, value, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + validator(state.obj(), key, value, True) + + else: + + def append(state, value, initiator): + if initiator.op is not attributes.OP_BULK_REPLACE and ( + include_backrefs or not detect_is_backref(state, initiator) + ): + return validator(state.obj(), key, value) + else: + return value + + def bulk_set(state, values, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + obj = state.obj() + values[:] = [validator(obj, key, value) for value in values] + + def set_(state, value, oldvalue, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + return validator(state.obj(), key, value) + else: + return value + + event.listen(desc, "append", append, raw=True, retval=True) + event.listen(desc, "bulk_replace", bulk_set, raw=True) + event.listen(desc, "set", set_, raw=True, retval=True) + if include_removes: + event.listen(desc, "remove", remove, raw=True, retval=True) + + +def polymorphic_union( + table_map, typecolname, aliasname="p_union", cast_nulls=True +): + """Create a ``UNION`` statement used by a polymorphic mapper. + + See :ref:`concrete_inheritance` for an example of how + this is used. + + :param table_map: mapping of polymorphic identities to + :class:`_schema.Table` objects. + :param typecolname: string name of a "discriminator" column, which will be + derived from the query, producing the polymorphic identity for + each row. If ``None``, no polymorphic discriminator is generated. + :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` + construct generated. + :param cast_nulls: if True, non-existent columns, which are represented + as labeled NULLs, will be passed into CAST. This is a legacy behavior + that is problematic on some backends such as Oracle - in which case it + can be set to False. + + """ + + colnames: util.OrderedSet[str] = util.OrderedSet() + colnamemaps = {} + types = {} + for key in table_map: + table = table_map[key] + + table = coercions.expect( + roles.StrictFromClauseRole, table, allow_select=True + ) + table_map[key] = table + + m = {} + for c in table.c: + if c.key == typecolname: + raise sa_exc.InvalidRequestError( + "Polymorphic union can't use '%s' as the discriminator " + "column due to mapped column %r; please apply the " + "'typecolname' " + "argument; this is available on " + "ConcreteBase as '_concrete_discriminator_name'" + % (typecolname, c) + ) + colnames.add(c.key) + m[c.key] = c + types[c.key] = c.type + colnamemaps[table] = m + + def col(name, table): + try: + return colnamemaps[table][name] + except KeyError: + if cast_nulls: + return sql.cast(sql.null(), types[name]).label(name) + else: + return sql.type_coerce(sql.null(), types[name]).label(name) + + result = [] + for type_, table in table_map.items(): + if typecolname is not None: + result.append( + sql.select( + *( + [col(name, table) for name in colnames] + + [ + sql.literal_column( + sql_util._quote_ddl_expr(type_) + ).label(typecolname) + ] + ) + ).select_from(table) + ) + else: + result.append( + sql.select( + *[col(name, table) for name in colnames] + ).select_from(table) + ) + return sql.union_all(*result).alias(aliasname) + + +def identity_key( + class_: Optional[Type[_T]] = None, + ident: Union[Any, Tuple[Any, ...]] = None, + *, + instance: Optional[_T] = None, + row: Optional[Union[Row[Any], RowMapping]] = None, + identity_token: Optional[Any] = None, +) -> _IdentityKeyType[_T]: + r"""Generate "identity key" tuples, as are used as keys in the + :attr:`.Session.identity_map` dictionary. + + This function has several call styles: + + * ``identity_key(class, ident, identity_token=token)`` + + This form receives a mapped class and a primary key scalar or + tuple as an argument. + + E.g.:: + + >>> identity_key(MyClass, (1, 2)) + (, (1, 2), None) + + :param class: mapped class (must be a positional argument) + :param ident: primary key, may be a scalar or tuple argument. + :param identity_token: optional identity token + + .. versionadded:: 1.2 added identity_token + + + * ``identity_key(instance=instance)`` + + This form will produce the identity key for a given instance. The + instance need not be persistent, only that its primary key attributes + are populated (else the key will contain ``None`` for those missing + values). + + E.g.:: + + >>> instance = MyClass(1, 2) + >>> identity_key(instance=instance) + (, (1, 2), None) + + In this form, the given instance is ultimately run though + :meth:`_orm.Mapper.identity_key_from_instance`, which will have the + effect of performing a database check for the corresponding row + if the object is expired. + + :param instance: object instance (must be given as a keyword arg) + + * ``identity_key(class, row=row, identity_token=token)`` + + This form is similar to the class/tuple form, except is passed a + database result row as a :class:`.Row` or :class:`.RowMapping` object. + + E.g.:: + + >>> row = engine.execute(\ + text("select * from table where a=1 and b=2")\ + ).first() + >>> identity_key(MyClass, row=row) + (, (1, 2), None) + + :param class: mapped class (must be a positional argument) + :param row: :class:`.Row` row returned by a :class:`_engine.CursorResult` + (must be given as a keyword arg) + :param identity_token: optional identity token + + .. versionadded:: 1.2 added identity_token + + """ + if class_ is not None: + mapper = class_mapper(class_) + if row is None: + if ident is None: + raise sa_exc.ArgumentError("ident or row is required") + return mapper.identity_key_from_primary_key( + tuple(util.to_list(ident)), identity_token=identity_token + ) + else: + return mapper.identity_key_from_row( + row, identity_token=identity_token + ) + elif instance is not None: + mapper = object_mapper(instance) + return mapper.identity_key_from_instance(instance) + else: + raise sa_exc.ArgumentError("class or instance is required") + + +class _TraceAdaptRole(enum.Enum): + """Enumeration of all the use cases for ORMAdapter. + + ORMAdapter remains one of the most complicated aspects of the ORM, as it is + used for in-place adaption of column expressions to be applied to a SELECT, + replacing :class:`.Table` and other objects that are mapped to classes with + aliases of those tables in the case of joined eager loading, or in the case + of polymorphic loading as used with concrete mappings or other custom "with + polymorphic" parameters, with whole user-defined subqueries. The + enumerations provide an overview of all the use cases used by ORMAdapter, a + layer of formality as to the introduction of new ORMAdapter use cases (of + which none are anticipated), as well as a means to trace the origins of a + particular ORMAdapter within runtime debugging. + + SQLAlchemy 2.0 has greatly scaled back ORM features which relied heavily on + open-ended statement adaption, including the ``Query.with_polymorphic()`` + method and the ``Query.select_from_entity()`` methods, favoring + user-explicit aliasing schemes using the ``aliased()`` and + ``with_polymorphic()`` standalone constructs; these still use adaption, + however the adaption is applied in a narrower scope. + + """ + + # aliased() use that is used to adapt individual attributes at query + # construction time + ALIASED_INSP = enum.auto() + + # joinedload cases; typically adapt an ON clause of a relationship + # join + JOINEDLOAD_USER_DEFINED_ALIAS = enum.auto() + JOINEDLOAD_PATH_WITH_POLYMORPHIC = enum.auto() + JOINEDLOAD_MEMOIZED_ADAPTER = enum.auto() + + # polymorphic cases - these are complex ones that replace FROM + # clauses, replacing tables with subqueries + MAPPER_POLYMORPHIC_ADAPTER = enum.auto() + WITH_POLYMORPHIC_ADAPTER = enum.auto() + WITH_POLYMORPHIC_ADAPTER_RIGHT_JOIN = enum.auto() + DEPRECATED_JOIN_ADAPT_RIGHT_SIDE = enum.auto() + + # the from_statement() case, used only to adapt individual attributes + # from a given statement to local ORM attributes at result fetching + # time. assigned to ORMCompileState._from_obj_alias + ADAPT_FROM_STATEMENT = enum.auto() + + # the joinedload for queries that have LIMIT/OFFSET/DISTINCT case; + # the query is placed inside of a subquery with the LIMIT/OFFSET/etc., + # joinedloads are then placed on the outside. + # assigned to ORMCompileState.compound_eager_adapter + COMPOUND_EAGER_STATEMENT = enum.auto() + + # the legacy Query._set_select_from() case. + # this is needed for Query's set operations (i.e. UNION, etc. ) + # as well as "legacy from_self()", which while removed from 2.0 as + # public API, is used for the Query.count() method. this one + # still does full statement traversal + # assigned to ORMCompileState._from_obj_alias + LEGACY_SELECT_FROM_ALIAS = enum.auto() + + +class ORMStatementAdapter(sql_util.ColumnAdapter): + """ColumnAdapter which includes a role attribute.""" + + __slots__ = ("role",) + + def __init__( + self, + role: _TraceAdaptRole, + selectable: Selectable, + *, + equivalents: Optional[_EquivalentColumnMap] = None, + adapt_required: bool = False, + allow_label_resolve: bool = True, + anonymize_labels: bool = False, + adapt_on_names: bool = False, + adapt_from_selectables: Optional[AbstractSet[FromClause]] = None, + ): + self.role = role + super().__init__( + selectable, + equivalents=equivalents, + adapt_required=adapt_required, + allow_label_resolve=allow_label_resolve, + anonymize_labels=anonymize_labels, + adapt_on_names=adapt_on_names, + adapt_from_selectables=adapt_from_selectables, + ) + + +class ORMAdapter(sql_util.ColumnAdapter): + """ColumnAdapter subclass which excludes adaptation of entities from + non-matching mappers. + + """ + + __slots__ = ("role", "mapper", "is_aliased_class", "aliased_insp") + + is_aliased_class: bool + aliased_insp: Optional[AliasedInsp[Any]] + + def __init__( + self, + role: _TraceAdaptRole, + entity: _InternalEntityType[Any], + *, + equivalents: Optional[_EquivalentColumnMap] = None, + adapt_required: bool = False, + allow_label_resolve: bool = True, + anonymize_labels: bool = False, + selectable: Optional[Selectable] = None, + limit_on_entity: bool = True, + adapt_on_names: bool = False, + adapt_from_selectables: Optional[AbstractSet[FromClause]] = None, + ): + self.role = role + self.mapper = entity.mapper + if selectable is None: + selectable = entity.selectable + if insp_is_aliased_class(entity): + self.is_aliased_class = True + self.aliased_insp = entity + else: + self.is_aliased_class = False + self.aliased_insp = None + + super().__init__( + selectable, + equivalents, + adapt_required=adapt_required, + allow_label_resolve=allow_label_resolve, + anonymize_labels=anonymize_labels, + include_fn=self._include_fn if limit_on_entity else None, + adapt_on_names=adapt_on_names, + adapt_from_selectables=adapt_from_selectables, + ) + + def _include_fn(self, elem): + entity = elem._annotations.get("parentmapper", None) + + return not entity or entity.isa(self.mapper) or self.mapper.isa(entity) + + +class AliasedClass( + inspection.Inspectable["AliasedInsp[_O]"], ORMColumnsClauseRole[_O] +): + r"""Represents an "aliased" form of a mapped class for usage with Query. + + The ORM equivalent of a :func:`~sqlalchemy.sql.expression.alias` + construct, this object mimics the mapped class using a + ``__getattr__`` scheme and maintains a reference to a + real :class:`~sqlalchemy.sql.expression.Alias` object. + + A primary purpose of :class:`.AliasedClass` is to serve as an alternate + within a SQL statement generated by the ORM, such that an existing + mapped entity can be used in multiple contexts. A simple example:: + + # find all pairs of users with the same name + user_alias = aliased(User) + session.query(User, user_alias).\ + join((user_alias, User.id > user_alias.id)).\ + filter(User.name == user_alias.name) + + :class:`.AliasedClass` is also capable of mapping an existing mapped + class to an entirely new selectable, provided this selectable is column- + compatible with the existing mapped selectable, and it can also be + configured in a mapping as the target of a :func:`_orm.relationship`. + See the links below for examples. + + The :class:`.AliasedClass` object is constructed typically using the + :func:`_orm.aliased` function. It also is produced with additional + configuration when using the :func:`_orm.with_polymorphic` function. + + The resulting object is an instance of :class:`.AliasedClass`. + This object implements an attribute scheme which produces the + same attribute and method interface as the original mapped + class, allowing :class:`.AliasedClass` to be compatible + with any attribute technique which works on the original class, + including hybrid attributes (see :ref:`hybrids_toplevel`). + + The :class:`.AliasedClass` can be inspected for its underlying + :class:`_orm.Mapper`, aliased selectable, and other information + using :func:`_sa.inspect`:: + + from sqlalchemy import inspect + my_alias = aliased(MyClass) + insp = inspect(my_alias) + + The resulting inspection object is an instance of :class:`.AliasedInsp`. + + + .. seealso:: + + :func:`.aliased` + + :func:`.with_polymorphic` + + :ref:`relationship_aliased_class` + + :ref:`relationship_to_window_function` + + + """ + + __name__: str + + def __init__( + self, + mapped_class_or_ac: _EntityType[_O], + alias: Optional[FromClause] = None, + name: Optional[str] = None, + flat: bool = False, + adapt_on_names: bool = False, + with_polymorphic_mappers: Optional[Sequence[Mapper[Any]]] = None, + with_polymorphic_discriminator: Optional[ColumnElement[Any]] = None, + base_alias: Optional[AliasedInsp[Any]] = None, + use_mapper_path: bool = False, + represents_outer_join: bool = False, + ): + insp = cast( + "_InternalEntityType[_O]", inspection.inspect(mapped_class_or_ac) + ) + mapper = insp.mapper + + nest_adapters = False + + if alias is None: + if insp.is_aliased_class and insp.selectable._is_subquery: + alias = insp.selectable.alias() + else: + alias = ( + mapper._with_polymorphic_selectable._anonymous_fromclause( + name=name, + flat=flat, + ) + ) + elif insp.is_aliased_class: + nest_adapters = True + + assert alias is not None + self._aliased_insp = AliasedInsp( + self, + insp, + alias, + name, + ( + with_polymorphic_mappers + if with_polymorphic_mappers + else mapper.with_polymorphic_mappers + ), + ( + with_polymorphic_discriminator + if with_polymorphic_discriminator is not None + else mapper.polymorphic_on + ), + base_alias, + use_mapper_path, + adapt_on_names, + represents_outer_join, + nest_adapters, + ) + + self.__name__ = f"aliased({mapper.class_.__name__})" + + @classmethod + def _reconstitute_from_aliased_insp( + cls, aliased_insp: AliasedInsp[_O] + ) -> AliasedClass[_O]: + obj = cls.__new__(cls) + obj.__name__ = f"aliased({aliased_insp.mapper.class_.__name__})" + obj._aliased_insp = aliased_insp + + if aliased_insp._is_with_polymorphic: + for sub_aliased_insp in aliased_insp._with_polymorphic_entities: + if sub_aliased_insp is not aliased_insp: + ent = AliasedClass._reconstitute_from_aliased_insp( + sub_aliased_insp + ) + setattr(obj, sub_aliased_insp.class_.__name__, ent) + + return obj + + def __getattr__(self, key: str) -> Any: + try: + _aliased_insp = self.__dict__["_aliased_insp"] + except KeyError: + raise AttributeError() + else: + target = _aliased_insp._target + # maintain all getattr mechanics + attr = getattr(target, key) + + # attribute is a method, that will be invoked against a + # "self"; so just return a new method with the same function and + # new self + if hasattr(attr, "__call__") and hasattr(attr, "__self__"): + return types.MethodType(attr.__func__, self) + + # attribute is a descriptor, that will be invoked against a + # "self"; so invoke the descriptor against this self + if hasattr(attr, "__get__"): + attr = attr.__get__(None, self) + + # attributes within the QueryableAttribute system will want this + # to be invoked so the object can be adapted + if hasattr(attr, "adapt_to_entity"): + attr = attr.adapt_to_entity(_aliased_insp) + setattr(self, key, attr) + + return attr + + def _get_from_serialized( + self, key: str, mapped_class: _O, aliased_insp: AliasedInsp[_O] + ) -> Any: + # this method is only used in terms of the + # sqlalchemy.ext.serializer extension + attr = getattr(mapped_class, key) + if hasattr(attr, "__call__") and hasattr(attr, "__self__"): + return types.MethodType(attr.__func__, self) + + # attribute is a descriptor, that will be invoked against a + # "self"; so invoke the descriptor against this self + if hasattr(attr, "__get__"): + attr = attr.__get__(None, self) + + # attributes within the QueryableAttribute system will want this + # to be invoked so the object can be adapted + if hasattr(attr, "adapt_to_entity"): + aliased_insp._weak_entity = weakref.ref(self) + attr = attr.adapt_to_entity(aliased_insp) + setattr(self, key, attr) + + return attr + + def __repr__(self) -> str: + return "" % ( + id(self), + self._aliased_insp._target.__name__, + ) + + def __str__(self) -> str: + return str(self._aliased_insp) + + +@inspection._self_inspects +class AliasedInsp( + ORMEntityColumnsClauseRole[_O], + ORMFromClauseRole, + HasCacheKey, + InspectionAttr, + MemoizedSlots, + inspection.Inspectable["AliasedInsp[_O]"], + Generic[_O], +): + """Provide an inspection interface for an + :class:`.AliasedClass` object. + + The :class:`.AliasedInsp` object is returned + given an :class:`.AliasedClass` using the + :func:`_sa.inspect` function:: + + from sqlalchemy import inspect + from sqlalchemy.orm import aliased + + my_alias = aliased(MyMappedClass) + insp = inspect(my_alias) + + Attributes on :class:`.AliasedInsp` + include: + + * ``entity`` - the :class:`.AliasedClass` represented. + * ``mapper`` - the :class:`_orm.Mapper` mapping the underlying class. + * ``selectable`` - the :class:`_expression.Alias` + construct which ultimately + represents an aliased :class:`_schema.Table` or + :class:`_expression.Select` + construct. + * ``name`` - the name of the alias. Also is used as the attribute + name when returned in a result tuple from :class:`_query.Query`. + * ``with_polymorphic_mappers`` - collection of :class:`_orm.Mapper` + objects + indicating all those mappers expressed in the select construct + for the :class:`.AliasedClass`. + * ``polymorphic_on`` - an alternate column or SQL expression which + will be used as the "discriminator" for a polymorphic load. + + .. seealso:: + + :ref:`inspection_toplevel` + + """ + + __slots__ = ( + "__weakref__", + "_weak_entity", + "mapper", + "selectable", + "name", + "_adapt_on_names", + "with_polymorphic_mappers", + "polymorphic_on", + "_use_mapper_path", + "_base_alias", + "represents_outer_join", + "persist_selectable", + "local_table", + "_is_with_polymorphic", + "_with_polymorphic_entities", + "_adapter", + "_target", + "__clause_element__", + "_memoized_values", + "_all_column_expressions", + "_nest_adapters", + ) + + _cache_key_traversal = [ + ("name", visitors.ExtendedInternalTraversal.dp_string), + ("_adapt_on_names", visitors.ExtendedInternalTraversal.dp_boolean), + ("_use_mapper_path", visitors.ExtendedInternalTraversal.dp_boolean), + ("_target", visitors.ExtendedInternalTraversal.dp_inspectable), + ("selectable", visitors.ExtendedInternalTraversal.dp_clauseelement), + ( + "with_polymorphic_mappers", + visitors.InternalTraversal.dp_has_cache_key_list, + ), + ("polymorphic_on", visitors.InternalTraversal.dp_clauseelement), + ] + + mapper: Mapper[_O] + selectable: FromClause + _adapter: ORMAdapter + with_polymorphic_mappers: Sequence[Mapper[Any]] + _with_polymorphic_entities: Sequence[AliasedInsp[Any]] + + _weak_entity: weakref.ref[AliasedClass[_O]] + """the AliasedClass that refers to this AliasedInsp""" + + _target: Union[Type[_O], AliasedClass[_O]] + """the thing referenced by the AliasedClass/AliasedInsp. + + In the vast majority of cases, this is the mapped class. However + it may also be another AliasedClass (alias of alias). + + """ + + def __init__( + self, + entity: AliasedClass[_O], + inspected: _InternalEntityType[_O], + selectable: FromClause, + name: Optional[str], + with_polymorphic_mappers: Optional[Sequence[Mapper[Any]]], + polymorphic_on: Optional[ColumnElement[Any]], + _base_alias: Optional[AliasedInsp[Any]], + _use_mapper_path: bool, + adapt_on_names: bool, + represents_outer_join: bool, + nest_adapters: bool, + ): + mapped_class_or_ac = inspected.entity + mapper = inspected.mapper + + self._weak_entity = weakref.ref(entity) + self.mapper = mapper + self.selectable = self.persist_selectable = self.local_table = ( + selectable + ) + self.name = name + self.polymorphic_on = polymorphic_on + self._base_alias = weakref.ref(_base_alias or self) + self._use_mapper_path = _use_mapper_path + self.represents_outer_join = represents_outer_join + self._nest_adapters = nest_adapters + + if with_polymorphic_mappers: + self._is_with_polymorphic = True + self.with_polymorphic_mappers = with_polymorphic_mappers + self._with_polymorphic_entities = [] + for poly in self.with_polymorphic_mappers: + if poly is not mapper: + ent = AliasedClass( + poly.class_, + selectable, + base_alias=self, + adapt_on_names=adapt_on_names, + use_mapper_path=_use_mapper_path, + ) + + setattr(self.entity, poly.class_.__name__, ent) + self._with_polymorphic_entities.append(ent._aliased_insp) + + else: + self._is_with_polymorphic = False + self.with_polymorphic_mappers = [mapper] + + self._adapter = ORMAdapter( + _TraceAdaptRole.ALIASED_INSP, + mapper, + selectable=selectable, + equivalents=mapper._equivalent_columns, + adapt_on_names=adapt_on_names, + anonymize_labels=True, + # make sure the adapter doesn't try to grab other tables that + # are not even the thing we are mapping, such as embedded + # selectables in subqueries or CTEs. See issue #6060 + adapt_from_selectables={ + m.selectable + for m in self.with_polymorphic_mappers + if not adapt_on_names + }, + limit_on_entity=False, + ) + + if nest_adapters: + # supports "aliased class of aliased class" use case + assert isinstance(inspected, AliasedInsp) + self._adapter = inspected._adapter.wrap(self._adapter) + + self._adapt_on_names = adapt_on_names + self._target = mapped_class_or_ac + + @classmethod + def _alias_factory( + cls, + element: Union[_EntityType[_O], FromClause], + alias: Optional[FromClause] = None, + name: Optional[str] = None, + flat: bool = False, + adapt_on_names: bool = False, + ) -> Union[AliasedClass[_O], FromClause]: + if isinstance(element, FromClause): + if adapt_on_names: + raise sa_exc.ArgumentError( + "adapt_on_names only applies to ORM elements" + ) + if name: + return element.alias(name=name, flat=flat) + else: + return coercions.expect( + roles.AnonymizedFromClauseRole, element, flat=flat + ) + else: + return AliasedClass( + element, + alias=alias, + flat=flat, + name=name, + adapt_on_names=adapt_on_names, + ) + + @classmethod + def _with_polymorphic_factory( + cls, + base: Union[Type[_O], Mapper[_O]], + classes: Union[Literal["*"], Iterable[_EntityType[Any]]], + selectable: Union[Literal[False, None], FromClause] = False, + flat: bool = False, + polymorphic_on: Optional[ColumnElement[Any]] = None, + aliased: bool = False, + innerjoin: bool = False, + adapt_on_names: bool = False, + name: Optional[str] = None, + _use_mapper_path: bool = False, + ) -> AliasedClass[_O]: + primary_mapper = _class_to_mapper(base) + + if selectable not in (None, False) and flat: + raise sa_exc.ArgumentError( + "the 'flat' and 'selectable' arguments cannot be passed " + "simultaneously to with_polymorphic()" + ) + + mappers, selectable = primary_mapper._with_polymorphic_args( + classes, selectable, innerjoin=innerjoin + ) + if aliased or flat: + assert selectable is not None + selectable = selectable._anonymous_fromclause(flat=flat) + + return AliasedClass( + base, + selectable, + name=name, + with_polymorphic_mappers=mappers, + adapt_on_names=adapt_on_names, + with_polymorphic_discriminator=polymorphic_on, + use_mapper_path=_use_mapper_path, + represents_outer_join=not innerjoin, + ) + + @property + def entity(self) -> AliasedClass[_O]: + # to eliminate reference cycles, the AliasedClass is held weakly. + # this produces some situations where the AliasedClass gets lost, + # particularly when one is created internally and only the AliasedInsp + # is passed around. + # to work around this case, we just generate a new one when we need + # it, as it is a simple class with very little initial state on it. + ent = self._weak_entity() + if ent is None: + ent = AliasedClass._reconstitute_from_aliased_insp(self) + self._weak_entity = weakref.ref(ent) + return ent + + is_aliased_class = True + "always returns True" + + def _memoized_method___clause_element__(self) -> FromClause: + return self.selectable._annotate( + { + "parentmapper": self.mapper, + "parententity": self, + "entity_namespace": self, + } + )._set_propagate_attrs( + {"compile_state_plugin": "orm", "plugin_subject": self} + ) + + @property + def entity_namespace(self) -> AliasedClass[_O]: + return self.entity + + @property + def class_(self) -> Type[_O]: + """Return the mapped class ultimately represented by this + :class:`.AliasedInsp`.""" + return self.mapper.class_ + + @property + def _path_registry(self) -> AbstractEntityRegistry: + if self._use_mapper_path: + return self.mapper._path_registry + else: + return PathRegistry.per_mapper(self) + + def __getstate__(self) -> Dict[str, Any]: + return { + "entity": self.entity, + "mapper": self.mapper, + "alias": self.selectable, + "name": self.name, + "adapt_on_names": self._adapt_on_names, + "with_polymorphic_mappers": self.with_polymorphic_mappers, + "with_polymorphic_discriminator": self.polymorphic_on, + "base_alias": self._base_alias(), + "use_mapper_path": self._use_mapper_path, + "represents_outer_join": self.represents_outer_join, + "nest_adapters": self._nest_adapters, + } + + def __setstate__(self, state: Dict[str, Any]) -> None: + self.__init__( # type: ignore + state["entity"], + state["mapper"], + state["alias"], + state["name"], + state["with_polymorphic_mappers"], + state["with_polymorphic_discriminator"], + state["base_alias"], + state["use_mapper_path"], + state["adapt_on_names"], + state["represents_outer_join"], + state["nest_adapters"], + ) + + def _merge_with(self, other: AliasedInsp[_O]) -> AliasedInsp[_O]: + # assert self._is_with_polymorphic + # assert other._is_with_polymorphic + + primary_mapper = other.mapper + + assert self.mapper is primary_mapper + + our_classes = util.to_set( + mp.class_ for mp in self.with_polymorphic_mappers + ) + new_classes = {mp.class_ for mp in other.with_polymorphic_mappers} + if our_classes == new_classes: + return other + else: + classes = our_classes.union(new_classes) + + mappers, selectable = primary_mapper._with_polymorphic_args( + classes, None, innerjoin=not other.represents_outer_join + ) + selectable = selectable._anonymous_fromclause(flat=True) + return AliasedClass( + primary_mapper, + selectable, + with_polymorphic_mappers=mappers, + with_polymorphic_discriminator=other.polymorphic_on, + use_mapper_path=other._use_mapper_path, + represents_outer_join=other.represents_outer_join, + )._aliased_insp + + def _adapt_element( + self, expr: _ORMCOLEXPR, key: Optional[str] = None + ) -> _ORMCOLEXPR: + assert isinstance(expr, ColumnElement) + d: Dict[str, Any] = { + "parententity": self, + "parentmapper": self.mapper, + } + if key: + d["proxy_key"] = key + + # IMO mypy should see this one also as returning the same type + # we put into it, but it's not + return ( + self._adapter.traverse(expr) + ._annotate(d) + ._set_propagate_attrs( + {"compile_state_plugin": "orm", "plugin_subject": self} + ) + ) + + if TYPE_CHECKING: + # establish compatibility with the _ORMAdapterProto protocol, + # which in turn is compatible with _CoreAdapterProto. + + def _orm_adapt_element( + self, + obj: _CE, + key: Optional[str] = None, + ) -> _CE: ... + + else: + _orm_adapt_element = _adapt_element + + def _entity_for_mapper(self, mapper): + self_poly = self.with_polymorphic_mappers + if mapper in self_poly: + if mapper is self.mapper: + return self + else: + return getattr( + self.entity, mapper.class_.__name__ + )._aliased_insp + elif mapper.isa(self.mapper): + return self + else: + assert False, "mapper %s doesn't correspond to %s" % (mapper, self) + + def _memoized_attr__get_clause(self): + onclause, replacemap = self.mapper._get_clause + return ( + self._adapter.traverse(onclause), + { + self._adapter.traverse(col): param + for col, param in replacemap.items() + }, + ) + + def _memoized_attr__memoized_values(self): + return {} + + def _memoized_attr__all_column_expressions(self): + if self._is_with_polymorphic: + cols_plus_keys = self.mapper._columns_plus_keys( + [ent.mapper for ent in self._with_polymorphic_entities] + ) + else: + cols_plus_keys = self.mapper._columns_plus_keys() + + cols_plus_keys = [ + (key, self._adapt_element(col)) for key, col in cols_plus_keys + ] + + return ColumnCollection(cols_plus_keys) + + def _memo(self, key, callable_, *args, **kw): + if key in self._memoized_values: + return self._memoized_values[key] + else: + self._memoized_values[key] = value = callable_(*args, **kw) + return value + + def __repr__(self): + if self.with_polymorphic_mappers: + with_poly = "(%s)" % ", ".join( + mp.class_.__name__ for mp in self.with_polymorphic_mappers + ) + else: + with_poly = "" + return "" % ( + id(self), + self.class_.__name__, + with_poly, + ) + + def __str__(self): + if self._is_with_polymorphic: + return "with_polymorphic(%s, [%s])" % ( + self._target.__name__, + ", ".join( + mp.class_.__name__ + for mp in self.with_polymorphic_mappers + if mp is not self.mapper + ), + ) + else: + return "aliased(%s)" % (self._target.__name__,) + + +class _WrapUserEntity: + """A wrapper used within the loader_criteria lambda caller so that + we can bypass declared_attr descriptors on unmapped mixins, which + normally emit a warning for such use. + + might also be useful for other per-lambda instrumentations should + the need arise. + + """ + + __slots__ = ("subject",) + + def __init__(self, subject): + self.subject = subject + + @util.preload_module("sqlalchemy.orm.decl_api") + def __getattribute__(self, name): + decl_api = util.preloaded.orm.decl_api + + subject = object.__getattribute__(self, "subject") + if name in subject.__dict__ and isinstance( + subject.__dict__[name], decl_api.declared_attr + ): + return subject.__dict__[name].fget(subject) + else: + return getattr(subject, name) + + +class LoaderCriteriaOption(CriteriaOption): + """Add additional WHERE criteria to the load for all occurrences of + a particular entity. + + :class:`_orm.LoaderCriteriaOption` is invoked using the + :func:`_orm.with_loader_criteria` function; see that function for + details. + + .. versionadded:: 1.4 + + """ + + __slots__ = ( + "root_entity", + "entity", + "deferred_where_criteria", + "where_criteria", + "_where_crit_orig", + "include_aliases", + "propagate_to_loaders", + ) + + _traverse_internals = [ + ("root_entity", visitors.ExtendedInternalTraversal.dp_plain_obj), + ("entity", visitors.ExtendedInternalTraversal.dp_has_cache_key), + ("where_criteria", visitors.InternalTraversal.dp_clauseelement), + ("include_aliases", visitors.InternalTraversal.dp_boolean), + ("propagate_to_loaders", visitors.InternalTraversal.dp_boolean), + ] + + root_entity: Optional[Type[Any]] + entity: Optional[_InternalEntityType[Any]] + where_criteria: Union[ColumnElement[bool], lambdas.DeferredLambdaElement] + deferred_where_criteria: bool + include_aliases: bool + propagate_to_loaders: bool + + _where_crit_orig: Any + + def __init__( + self, + entity_or_base: _EntityType[Any], + where_criteria: Union[ + _ColumnExpressionArgument[bool], + Callable[[Any], _ColumnExpressionArgument[bool]], + ], + loader_only: bool = False, + include_aliases: bool = False, + propagate_to_loaders: bool = True, + track_closure_variables: bool = True, + ): + entity = cast( + "_InternalEntityType[Any]", + inspection.inspect(entity_or_base, False), + ) + if entity is None: + self.root_entity = cast("Type[Any]", entity_or_base) + self.entity = None + else: + self.root_entity = None + self.entity = entity + + self._where_crit_orig = where_criteria + if callable(where_criteria): + if self.root_entity is not None: + wrap_entity = self.root_entity + else: + assert entity is not None + wrap_entity = entity.entity + + self.deferred_where_criteria = True + self.where_criteria = lambdas.DeferredLambdaElement( + where_criteria, + roles.WhereHavingRole, + lambda_args=(_WrapUserEntity(wrap_entity),), + opts=lambdas.LambdaOptions( + track_closure_variables=track_closure_variables + ), + ) + else: + self.deferred_where_criteria = False + self.where_criteria = coercions.expect( + roles.WhereHavingRole, where_criteria + ) + + self.include_aliases = include_aliases + self.propagate_to_loaders = propagate_to_loaders + + @classmethod + def _unreduce( + cls, entity, where_criteria, include_aliases, propagate_to_loaders + ): + return LoaderCriteriaOption( + entity, + where_criteria, + include_aliases=include_aliases, + propagate_to_loaders=propagate_to_loaders, + ) + + def __reduce__(self): + return ( + LoaderCriteriaOption._unreduce, + ( + self.entity.class_ if self.entity else self.root_entity, + self._where_crit_orig, + self.include_aliases, + self.propagate_to_loaders, + ), + ) + + def _all_mappers(self) -> Iterator[Mapper[Any]]: + if self.entity: + yield from self.entity.mapper.self_and_descendants + else: + assert self.root_entity + stack = list(self.root_entity.__subclasses__()) + while stack: + subclass = stack.pop(0) + ent = cast( + "_InternalEntityType[Any]", + inspection.inspect(subclass, raiseerr=False), + ) + if ent: + yield from ent.mapper.self_and_descendants + else: + stack.extend(subclass.__subclasses__()) + + def _should_include(self, compile_state: ORMCompileState) -> bool: + if ( + compile_state.select_statement._annotations.get( + "for_loader_criteria", None + ) + is self + ): + return False + return True + + def _resolve_where_criteria( + self, ext_info: _InternalEntityType[Any] + ) -> ColumnElement[bool]: + if self.deferred_where_criteria: + crit = cast( + "ColumnElement[bool]", + self.where_criteria._resolve_with_args(ext_info.entity), + ) + else: + crit = self.where_criteria # type: ignore + assert isinstance(crit, ColumnElement) + return sql_util._deep_annotate( + crit, + {"for_loader_criteria": self}, + detect_subquery_cols=True, + ind_cols_on_fromclause=True, + ) + + def process_compile_state_replaced_entities( + self, + compile_state: ORMCompileState, + mapper_entities: Iterable[_MapperEntity], + ) -> None: + self.process_compile_state(compile_state) + + def process_compile_state(self, compile_state: ORMCompileState) -> None: + """Apply a modification to a given :class:`.CompileState`.""" + + # if options to limit the criteria to immediate query only, + # use compile_state.attributes instead + + self.get_global_criteria(compile_state.global_attributes) + + def get_global_criteria(self, attributes: Dict[Any, Any]) -> None: + for mp in self._all_mappers(): + load_criteria = attributes.setdefault( + ("additional_entity_criteria", mp), [] + ) + + load_criteria.append(self) + + +inspection._inspects(AliasedClass)(lambda target: target._aliased_insp) + + +@inspection._inspects(type) +def _inspect_mc( + class_: Type[_O], +) -> Optional[Mapper[_O]]: + try: + class_manager = opt_manager_of_class(class_) + if class_manager is None or not class_manager.is_mapped: + return None + mapper = class_manager.mapper + except exc.NO_STATE: + return None + else: + return mapper + + +GenericAlias = type(List[Any]) + + +@inspection._inspects(GenericAlias) +def _inspect_generic_alias( + class_: Type[_O], +) -> Optional[Mapper[_O]]: + origin = cast("Type[_O]", typing_get_origin(class_)) + return _inspect_mc(origin) + + +@inspection._self_inspects +class Bundle( + ORMColumnsClauseRole[_T], + SupportsCloneAnnotations, + MemoizedHasCacheKey, + inspection.Inspectable["Bundle[_T]"], + InspectionAttr, +): + """A grouping of SQL expressions that are returned by a :class:`.Query` + under one namespace. + + The :class:`.Bundle` essentially allows nesting of the tuple-based + results returned by a column-oriented :class:`_query.Query` object. + It also + is extensible via simple subclassing, where the primary capability + to override is that of how the set of expressions should be returned, + allowing post-processing as well as custom return types, without + involving ORM identity-mapped classes. + + .. seealso:: + + :ref:`bundles` + + + """ + + single_entity = False + """If True, queries for a single Bundle will be returned as a single + entity, rather than an element within a keyed tuple.""" + + is_clause_element = False + + is_mapper = False + + is_aliased_class = False + + is_bundle = True + + _propagate_attrs: _PropagateAttrsType = util.immutabledict() + + proxy_set = util.EMPTY_SET # type: ignore + + exprs: List[_ColumnsClauseElement] + + def __init__( + self, name: str, *exprs: _ColumnExpressionArgument[Any], **kw: Any + ): + r"""Construct a new :class:`.Bundle`. + + e.g.:: + + bn = Bundle("mybundle", MyClass.x, MyClass.y) + + for row in session.query(bn).filter( + bn.c.x == 5).filter(bn.c.y == 4): + print(row.mybundle.x, row.mybundle.y) + + :param name: name of the bundle. + :param \*exprs: columns or SQL expressions comprising the bundle. + :param single_entity=False: if True, rows for this :class:`.Bundle` + can be returned as a "single entity" outside of any enclosing tuple + in the same manner as a mapped entity. + + """ + self.name = self._label = name + coerced_exprs = [ + coercions.expect( + roles.ColumnsClauseRole, expr, apply_propagate_attrs=self + ) + for expr in exprs + ] + self.exprs = coerced_exprs + + self.c = self.columns = ColumnCollection( + (getattr(col, "key", col._label), col) + for col in [e._annotations.get("bundle", e) for e in coerced_exprs] + ).as_readonly() + self.single_entity = kw.pop("single_entity", self.single_entity) + + def _gen_cache_key( + self, anon_map: anon_map, bindparams: List[BindParameter[Any]] + ) -> Tuple[Any, ...]: + return (self.__class__, self.name, self.single_entity) + tuple( + [expr._gen_cache_key(anon_map, bindparams) for expr in self.exprs] + ) + + @property + def mapper(self) -> Optional[Mapper[Any]]: + mp: Optional[Mapper[Any]] = self.exprs[0]._annotations.get( + "parentmapper", None + ) + return mp + + @property + def entity(self) -> Optional[_InternalEntityType[Any]]: + ie: Optional[_InternalEntityType[Any]] = self.exprs[ + 0 + ]._annotations.get("parententity", None) + return ie + + @property + def entity_namespace( + self, + ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]: + return self.c + + columns: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]] + + """A namespace of SQL expressions referred to by this :class:`.Bundle`. + + e.g.:: + + bn = Bundle("mybundle", MyClass.x, MyClass.y) + + q = sess.query(bn).filter(bn.c.x == 5) + + Nesting of bundles is also supported:: + + b1 = Bundle("b1", + Bundle('b2', MyClass.a, MyClass.b), + Bundle('b3', MyClass.x, MyClass.y) + ) + + q = sess.query(b1).filter( + b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9) + + .. seealso:: + + :attr:`.Bundle.c` + + """ + + c: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]] + """An alias for :attr:`.Bundle.columns`.""" + + def _clone(self, **kw): + cloned = self.__class__.__new__(self.__class__) + cloned.__dict__.update(self.__dict__) + return cloned + + def __clause_element__(self): + # ensure existing entity_namespace remains + annotations = {"bundle": self, "entity_namespace": self} + annotations.update(self._annotations) + + plugin_subject = self.exprs[0]._propagate_attrs.get( + "plugin_subject", self.entity + ) + return ( + expression.ClauseList( + _literal_as_text_role=roles.ColumnsClauseRole, + group=False, + *[e._annotations.get("bundle", e) for e in self.exprs], + ) + ._annotate(annotations) + ._set_propagate_attrs( + # the Bundle *must* use the orm plugin no matter what. the + # subject can be None but it's much better if it's not. + { + "compile_state_plugin": "orm", + "plugin_subject": plugin_subject, + } + ) + ) + + @property + def clauses(self): + return self.__clause_element__().clauses + + def label(self, name): + """Provide a copy of this :class:`.Bundle` passing a new label.""" + + cloned = self._clone() + cloned.name = name + return cloned + + def create_row_processor( + self, + query: Select[Any], + procs: Sequence[Callable[[Row[Any]], Any]], + labels: Sequence[str], + ) -> Callable[[Row[Any]], Any]: + """Produce the "row processing" function for this :class:`.Bundle`. + + May be overridden by subclasses to provide custom behaviors when + results are fetched. The method is passed the statement object and a + set of "row processor" functions at query execution time; these + processor functions when given a result row will return the individual + attribute value, which can then be adapted into any kind of return data + structure. + + The example below illustrates replacing the usual :class:`.Row` + return structure with a straight Python dictionary:: + + from sqlalchemy.orm import Bundle + + class DictBundle(Bundle): + def create_row_processor(self, query, procs, labels): + 'Override create_row_processor to return values as + dictionaries' + + def proc(row): + return dict( + zip(labels, (proc(row) for proc in procs)) + ) + return proc + + A result from the above :class:`_orm.Bundle` will return dictionary + values:: + + bn = DictBundle('mybundle', MyClass.data1, MyClass.data2) + for row in session.execute(select(bn)).where(bn.c.data1 == 'd1'): + print(row.mybundle['data1'], row.mybundle['data2']) + + """ + keyed_tuple = result_tuple(labels, [() for l in labels]) + + def proc(row: Row[Any]) -> Any: + return keyed_tuple([proc(row) for proc in procs]) + + return proc + + +def _orm_annotate(element: _SA, exclude: Optional[Any] = None) -> _SA: + """Deep copy the given ClauseElement, annotating each element with the + "_orm_adapt" flag. + + Elements within the exclude collection will be cloned but not annotated. + + """ + return sql_util._deep_annotate(element, {"_orm_adapt": True}, exclude) + + +def _orm_deannotate(element: _SA) -> _SA: + """Remove annotations that link a column to a particular mapping. + + Note this doesn't affect "remote" and "foreign" annotations + passed by the :func:`_orm.foreign` and :func:`_orm.remote` + annotators. + + """ + + return sql_util._deep_deannotate( + element, values=("_orm_adapt", "parententity") + ) + + +def _orm_full_deannotate(element: _SA) -> _SA: + return sql_util._deep_deannotate(element) + + +class _ORMJoin(expression.Join): + """Extend Join to support ORM constructs as input.""" + + __visit_name__ = expression.Join.__visit_name__ + + inherit_cache = True + + def __init__( + self, + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + isouter: bool = False, + full: bool = False, + _left_memo: Optional[Any] = None, + _right_memo: Optional[Any] = None, + _extra_criteria: Tuple[ColumnElement[bool], ...] = (), + ): + left_info = cast( + "Union[FromClause, _InternalEntityType[Any]]", + inspection.inspect(left), + ) + + right_info = cast( + "Union[FromClause, _InternalEntityType[Any]]", + inspection.inspect(right), + ) + adapt_to = right_info.selectable + + # used by joined eager loader + self._left_memo = _left_memo + self._right_memo = _right_memo + + if isinstance(onclause, attributes.QueryableAttribute): + if TYPE_CHECKING: + assert isinstance( + onclause.comparator, RelationshipProperty.Comparator + ) + on_selectable = onclause.comparator._source_selectable() + prop = onclause.property + _extra_criteria += onclause._extra_criteria + elif isinstance(onclause, MapperProperty): + # used internally by joined eager loader...possibly not ideal + prop = onclause + on_selectable = prop.parent.selectable + else: + prop = None + on_selectable = None + + left_selectable = left_info.selectable + if prop: + adapt_from: Optional[FromClause] + if sql_util.clause_is_present(on_selectable, left_selectable): + adapt_from = on_selectable + else: + assert isinstance(left_selectable, FromClause) + adapt_from = left_selectable + + ( + pj, + sj, + source, + dest, + secondary, + target_adapter, + ) = prop._create_joins( + source_selectable=adapt_from, + dest_selectable=adapt_to, + source_polymorphic=True, + of_type_entity=right_info, + alias_secondary=True, + extra_criteria=_extra_criteria, + ) + + if sj is not None: + if isouter: + # note this is an inner join from secondary->right + right = sql.join(secondary, right, sj) + onclause = pj + else: + left = sql.join(left, secondary, pj, isouter) + onclause = sj + else: + onclause = pj + + self._target_adapter = target_adapter + + # we don't use the normal coercions logic for _ORMJoin + # (probably should), so do some gymnastics to get the entity. + # logic here is for #8721, which was a major bug in 1.4 + # for almost two years, not reported/fixed until 1.4.43 (!) + if is_selectable(left_info): + parententity = left_selectable._annotations.get( + "parententity", None + ) + elif insp_is_mapper(left_info) or insp_is_aliased_class(left_info): + parententity = left_info + else: + parententity = None + + if parententity is not None: + self._annotations = self._annotations.union( + {"parententity": parententity} + ) + + augment_onclause = bool(_extra_criteria) and not prop + expression.Join.__init__(self, left, right, onclause, isouter, full) + + assert self.onclause is not None + + if augment_onclause: + self.onclause &= sql.and_(*_extra_criteria) + + if ( + not prop + and getattr(right_info, "mapper", None) + and right_info.mapper.single # type: ignore + ): + right_info = cast("_InternalEntityType[Any]", right_info) + # if single inheritance target and we are using a manual + # or implicit ON clause, augment it the same way we'd augment the + # WHERE. + single_crit = right_info.mapper._single_table_criterion + if single_crit is not None: + if insp_is_aliased_class(right_info): + single_crit = right_info._adapter.traverse(single_crit) + self.onclause = self.onclause & single_crit + + def _splice_into_center(self, other): + """Splice a join into the center. + + Given join(a, b) and join(b, c), return join(a, b).join(c) + + """ + leftmost = other + while isinstance(leftmost, sql.Join): + leftmost = leftmost.left + + assert self.right is leftmost + + left = _ORMJoin( + self.left, + other.left, + self.onclause, + isouter=self.isouter, + _left_memo=self._left_memo, + _right_memo=None, + ) + + return _ORMJoin( + left, + other.right, + other.onclause, + isouter=other.isouter, + _right_memo=other._right_memo, + ) + + def join( + self, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + isouter: bool = False, + full: bool = False, + ) -> _ORMJoin: + return _ORMJoin(self, right, onclause, full=full, isouter=isouter) + + def outerjoin( + self, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + full: bool = False, + ) -> _ORMJoin: + return _ORMJoin(self, right, onclause, isouter=True, full=full) + + +def with_parent( + instance: object, + prop: attributes.QueryableAttribute[Any], + from_entity: Optional[_EntityType[Any]] = None, +) -> ColumnElement[bool]: + """Create filtering criterion that relates this query's primary entity + to the given related instance, using established + :func:`_orm.relationship()` + configuration. + + E.g.:: + + stmt = select(Address).where(with_parent(some_user, User.addresses)) + + + The SQL rendered is the same as that rendered when a lazy loader + would fire off from the given parent on that attribute, meaning + that the appropriate state is taken from the parent object in + Python without the need to render joins to the parent table + in the rendered statement. + + The given property may also make use of :meth:`_orm.PropComparator.of_type` + to indicate the left side of the criteria:: + + + a1 = aliased(Address) + a2 = aliased(Address) + stmt = select(a1, a2).where( + with_parent(u1, User.addresses.of_type(a2)) + ) + + The above use is equivalent to using the + :func:`_orm.with_parent.from_entity` argument:: + + a1 = aliased(Address) + a2 = aliased(Address) + stmt = select(a1, a2).where( + with_parent(u1, User.addresses, from_entity=a2) + ) + + :param instance: + An instance which has some :func:`_orm.relationship`. + + :param property: + Class-bound attribute, which indicates + what relationship from the instance should be used to reconcile the + parent/child relationship. + + :param from_entity: + Entity in which to consider as the left side. This defaults to the + "zero" entity of the :class:`_query.Query` itself. + + .. versionadded:: 1.2 + + """ + prop_t: RelationshipProperty[Any] + + if isinstance(prop, str): + raise sa_exc.ArgumentError( + "with_parent() accepts class-bound mapped attributes, not strings" + ) + elif isinstance(prop, attributes.QueryableAttribute): + if prop._of_type: + from_entity = prop._of_type + mapper_property = prop.property + if mapper_property is None or not prop_is_relationship( + mapper_property + ): + raise sa_exc.ArgumentError( + f"Expected relationship property for with_parent(), " + f"got {mapper_property}" + ) + prop_t = mapper_property + else: + prop_t = prop + + return prop_t._with_parent(instance, from_entity=from_entity) + + +def has_identity(object_: object) -> bool: + """Return True if the given object has a database + identity. + + This typically corresponds to the object being + in either the persistent or detached state. + + .. seealso:: + + :func:`.was_deleted` + + """ + state = attributes.instance_state(object_) + return state.has_identity + + +def was_deleted(object_: object) -> bool: + """Return True if the given object was deleted + within a session flush. + + This is regardless of whether or not the object is + persistent or detached. + + .. seealso:: + + :attr:`.InstanceState.was_deleted` + + """ + + state = attributes.instance_state(object_) + return state.was_deleted + + +def _entity_corresponds_to( + given: _InternalEntityType[Any], entity: _InternalEntityType[Any] +) -> bool: + """determine if 'given' corresponds to 'entity', in terms + of an entity passed to Query that would match the same entity + being referred to elsewhere in the query. + + """ + if insp_is_aliased_class(entity): + if insp_is_aliased_class(given): + if entity._base_alias() is given._base_alias(): + return True + return False + elif insp_is_aliased_class(given): + if given._use_mapper_path: + return entity in given.with_polymorphic_mappers + else: + return entity is given + + assert insp_is_mapper(given) + return entity.common_parent(given) + + +def _entity_corresponds_to_use_path_impl( + given: _InternalEntityType[Any], entity: _InternalEntityType[Any] +) -> bool: + """determine if 'given' corresponds to 'entity', in terms + of a path of loader options where a mapped attribute is taken to + be a member of a parent entity. + + e.g.:: + + someoption(A).someoption(A.b) # -> fn(A, A) -> True + someoption(A).someoption(C.d) # -> fn(A, C) -> False + + a1 = aliased(A) + someoption(a1).someoption(A.b) # -> fn(a1, A) -> False + someoption(a1).someoption(a1.b) # -> fn(a1, a1) -> True + + wp = with_polymorphic(A, [A1, A2]) + someoption(wp).someoption(A1.foo) # -> fn(wp, A1) -> False + someoption(wp).someoption(wp.A1.foo) # -> fn(wp, wp.A1) -> True + + + """ + if insp_is_aliased_class(given): + return ( + insp_is_aliased_class(entity) + and not entity._use_mapper_path + and (given is entity or entity in given._with_polymorphic_entities) + ) + elif not insp_is_aliased_class(entity): + return given.isa(entity.mapper) + else: + return ( + entity._use_mapper_path + and given in entity.with_polymorphic_mappers + ) + + +def _entity_isa(given: _InternalEntityType[Any], mapper: Mapper[Any]) -> bool: + """determine if 'given' "is a" mapper, in terms of the given + would load rows of type 'mapper'. + + """ + if given.is_aliased_class: + return mapper in given.with_polymorphic_mappers or given.mapper.isa( + mapper + ) + elif given.with_polymorphic_mappers: + return mapper in given.with_polymorphic_mappers or given.isa(mapper) + else: + return given.isa(mapper) + + +def _getitem(iterable_query: Query[Any], item: Any) -> Any: + """calculate __getitem__ in terms of an iterable query object + that also has a slice() method. + + """ + + def _no_negative_indexes(): + raise IndexError( + "negative indexes are not accepted by SQL " + "index / slice operators" + ) + + if isinstance(item, slice): + start, stop, step = util.decode_slice(item) + + if ( + isinstance(stop, int) + and isinstance(start, int) + and stop - start <= 0 + ): + return [] + + elif (isinstance(start, int) and start < 0) or ( + isinstance(stop, int) and stop < 0 + ): + _no_negative_indexes() + + res = iterable_query.slice(start, stop) + if step is not None: + return list(res)[None : None : item.step] + else: + return list(res) + else: + if item == -1: + _no_negative_indexes() + else: + return list(iterable_query[item : item + 1])[0] + + +def _is_mapped_annotation( + raw_annotation: _AnnotationScanType, + cls: Type[Any], + originating_cls: Type[Any], +) -> bool: + try: + annotated = de_stringify_annotation( + cls, raw_annotation, originating_cls.__module__ + ) + except NameError: + # in most cases, at least within our own tests, we can raise + # here, which is more accurate as it prevents us from returning + # false negatives. However, in the real world, try to avoid getting + # involved with end-user annotations that have nothing to do with us. + # see issue #8888 where we bypass using this function in the case + # that we want to detect an unresolvable Mapped[] type. + return False + else: + return is_origin_of_cls(annotated, _MappedAnnotationBase) + + +class _CleanupError(Exception): + pass + + +def _cleanup_mapped_str_annotation( + annotation: str, originating_module: str +) -> str: + # fix up an annotation that comes in as the form: + # 'Mapped[List[Address]]' so that it instead looks like: + # 'Mapped[List["Address"]]' , which will allow us to get + # "Address" as a string + + # additionally, resolve symbols for these names since this is where + # we'd have to do it + + inner: Optional[Match[str]] + + mm = re.match(r"^(.+?)\[(.+)\]$", annotation) + + if not mm: + return annotation + + # ticket #8759. Resolve the Mapped name to a real symbol. + # originally this just checked the name. + try: + obj = eval_name_only(mm.group(1), originating_module) + except NameError as ne: + raise _CleanupError( + f'For annotation "{annotation}", could not resolve ' + f'container type "{mm.group(1)}". ' + "Please ensure this type is imported at the module level " + "outside of TYPE_CHECKING blocks" + ) from ne + + if obj is typing.ClassVar: + real_symbol = "ClassVar" + else: + try: + if issubclass(obj, _MappedAnnotationBase): + real_symbol = obj.__name__ + else: + return annotation + except TypeError: + # avoid isinstance(obj, type) check, just catch TypeError + return annotation + + # note: if one of the codepaths above didn't define real_symbol and + # then didn't return, real_symbol raises UnboundLocalError + # which is actually a NameError, and the calling routines don't + # notice this since they are catching NameError anyway. Just in case + # this is being modified in the future, something to be aware of. + + stack = [] + inner = mm + while True: + stack.append(real_symbol if mm is inner else inner.group(1)) + g2 = inner.group(2) + inner = re.match(r"^(.+?)\[(.+)\]$", g2) + if inner is None: + stack.append(g2) + break + + # stacks we want to rewrite, that is, quote the last entry which + # we think is a relationship class name: + # + # ['Mapped', 'List', 'Address'] + # ['Mapped', 'A'] + # + # stacks we dont want to rewrite, which are generally MappedColumn + # use cases: + # + # ['Mapped', "'Optional[Dict[str, str]]'"] + # ['Mapped', 'dict[str, str] | None'] + + if ( + # avoid already quoted symbols such as + # ['Mapped', "'Optional[Dict[str, str]]'"] + not re.match(r"""^["'].*["']$""", stack[-1]) + # avoid further generics like Dict[] such as + # ['Mapped', 'dict[str, str] | None'] + and not re.match(r".*\[.*\]", stack[-1]) + ): + stripchars = "\"' " + stack[-1] = ", ".join( + f'"{elem.strip(stripchars)}"' for elem in stack[-1].split(",") + ) + + annotation = "[".join(stack) + ("]" * (len(stack) - 1)) + + return annotation + + +def _extract_mapped_subtype( + raw_annotation: Optional[_AnnotationScanType], + cls: type, + originating_module: str, + key: str, + attr_cls: Type[Any], + required: bool, + is_dataclass_field: bool, + expect_mapped: bool = True, + raiseerr: bool = True, +) -> Optional[Tuple[Union[_AnnotationScanType, str], Optional[type]]]: + """given an annotation, figure out if it's ``Mapped[something]`` and if + so, return the ``something`` part. + + Includes error raise scenarios and other options. + + """ + + if raw_annotation is None: + if required: + raise sa_exc.ArgumentError( + f"Python typing annotation is required for attribute " + f'"{cls.__name__}.{key}" when primary argument(s) for ' + f'"{attr_cls.__name__}" construct are None or not present' + ) + return None + + try: + annotated = de_stringify_annotation( + cls, + raw_annotation, + originating_module, + str_cleanup_fn=_cleanup_mapped_str_annotation, + ) + except _CleanupError as ce: + raise sa_exc.ArgumentError( + f"Could not interpret annotation {raw_annotation}. " + "Check that it uses names that are correctly imported at the " + "module level. See chained stack trace for more hints." + ) from ce + except NameError as ne: + if raiseerr and "Mapped[" in raw_annotation: # type: ignore + raise sa_exc.ArgumentError( + f"Could not interpret annotation {raw_annotation}. " + "Check that it uses names that are correctly imported at the " + "module level. See chained stack trace for more hints." + ) from ne + + annotated = raw_annotation # type: ignore + + if is_dataclass_field: + return annotated, None + else: + if not hasattr(annotated, "__origin__") or not is_origin_of_cls( + annotated, _MappedAnnotationBase + ): + if expect_mapped: + if not raiseerr: + return None + + origin = getattr(annotated, "__origin__", None) + if origin is typing.ClassVar: + return None + + # check for other kind of ORM descriptor like AssociationProxy, + # don't raise for that (issue #9957) + elif isinstance(origin, type) and issubclass( + origin, ORMDescriptor + ): + return None + + raise sa_exc.ArgumentError( + f'Type annotation for "{cls.__name__}.{key}" ' + "can't be correctly interpreted for " + "Annotated Declarative Table form. ORM annotations " + "should normally make use of the ``Mapped[]`` generic " + "type, or other ORM-compatible generic type, as a " + "container for the actual type, which indicates the " + "intent that the attribute is mapped. " + "Class variables that are not intended to be mapped " + "by the ORM should use ClassVar[]. " + "To allow Annotated Declarative to disregard legacy " + "annotations which don't use Mapped[] to pass, set " + '"__allow_unmapped__ = True" on the class or a ' + "superclass this class.", + code="zlpr", + ) + + else: + return annotated, None + + if len(annotated.__args__) != 1: + raise sa_exc.ArgumentError( + "Expected sub-type for Mapped[] annotation" + ) + + return ( + # fix dict/list/set args to be ForwardRef, see #11814 + fixup_container_fwd_refs(annotated.__args__[0]), + annotated.__origin__, + ) + + +def _mapper_property_as_plain_name(prop: Type[Any]) -> str: + if hasattr(prop, "_mapper_property_name"): + name = prop._mapper_property_name() + else: + name = None + return util.clsname_as_plain_name(prop, name) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py new file mode 100644 index 00000000..5680cc70 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py @@ -0,0 +1,678 @@ +# orm/writeonly.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Write-only collection API. + +This is an alternate mapped attribute style that only supports single-item +collection mutation operations. To read the collection, a select() +object must be executed each time. + +.. versionadded:: 2.0 + + +""" + +from __future__ import annotations + +from typing import Any +from typing import Collection +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy.sql import bindparam +from . import attributes +from . import interfaces +from . import relationships +from . import strategies +from .base import NEVER_SET +from .base import object_mapper +from .base import PassiveFlag +from .base import RelationshipDirection +from .. import exc +from .. import inspect +from .. import log +from .. import util +from ..sql import delete +from ..sql import insert +from ..sql import select +from ..sql import update +from ..sql.dml import Delete +from ..sql.dml import Insert +from ..sql.dml import Update +from ..util.typing import Literal + +if TYPE_CHECKING: + from . import QueryableAttribute + from ._typing import _InstanceDict + from .attributes import AttributeEventToken + from .base import LoaderCallableStatus + from .collections import _AdaptedCollectionProtocol + from .collections import CollectionAdapter + from .mapper import Mapper + from .relationships import _RelationshipOrderByArg + from .state import InstanceState + from .util import AliasedClass + from ..event import _Dispatch + from ..sql.selectable import FromClause + from ..sql.selectable import Select + +_T = TypeVar("_T", bound=Any) + + +class WriteOnlyHistory(Generic[_T]): + """Overrides AttributeHistory to receive append/remove events directly.""" + + unchanged_items: util.OrderedIdentitySet + added_items: util.OrderedIdentitySet + deleted_items: util.OrderedIdentitySet + _reconcile_collection: bool + + def __init__( + self, + attr: WriteOnlyAttributeImpl, + state: InstanceState[_T], + passive: PassiveFlag, + apply_to: Optional[WriteOnlyHistory[_T]] = None, + ) -> None: + if apply_to: + if passive & PassiveFlag.SQL_OK: + raise exc.InvalidRequestError( + f"Attribute {attr} can't load the existing state from the " + "database for this operation; full iteration is not " + "permitted. If this is a delete operation, configure " + f"passive_deletes=True on the {attr} relationship in " + "order to resolve this error." + ) + + self.unchanged_items = apply_to.unchanged_items + self.added_items = apply_to.added_items + self.deleted_items = apply_to.deleted_items + self._reconcile_collection = apply_to._reconcile_collection + else: + self.deleted_items = util.OrderedIdentitySet() + self.added_items = util.OrderedIdentitySet() + self.unchanged_items = util.OrderedIdentitySet() + self._reconcile_collection = False + + @property + def added_plus_unchanged(self) -> List[_T]: + return list(self.added_items.union(self.unchanged_items)) + + @property + def all_items(self) -> List[_T]: + return list( + self.added_items.union(self.unchanged_items).union( + self.deleted_items + ) + ) + + def as_history(self) -> attributes.History: + if self._reconcile_collection: + added = self.added_items.difference(self.unchanged_items) + deleted = self.deleted_items.intersection(self.unchanged_items) + unchanged = self.unchanged_items.difference(deleted) + else: + added, unchanged, deleted = ( + self.added_items, + self.unchanged_items, + self.deleted_items, + ) + return attributes.History(list(added), list(unchanged), list(deleted)) + + def indexed(self, index: Union[int, slice]) -> Union[List[_T], _T]: + return list(self.added_items)[index] + + def add_added(self, value: _T) -> None: + self.added_items.add(value) + + def add_removed(self, value: _T) -> None: + if value in self.added_items: + self.added_items.remove(value) + else: + self.deleted_items.add(value) + + +class WriteOnlyAttributeImpl( + attributes.HasCollectionAdapter, attributes.AttributeImpl +): + uses_objects: bool = True + default_accepts_scalar_loader: bool = False + supports_population: bool = False + _supports_dynamic_iteration: bool = False + collection: bool = False + dynamic: bool = True + order_by: _RelationshipOrderByArg = () + collection_history_cls: Type[WriteOnlyHistory[Any]] = WriteOnlyHistory + + query_class: Type[WriteOnlyCollection[Any]] + + def __init__( + self, + class_: Union[Type[Any], AliasedClass[Any]], + key: str, + dispatch: _Dispatch[QueryableAttribute[Any]], + target_mapper: Mapper[_T], + order_by: _RelationshipOrderByArg, + **kw: Any, + ): + super().__init__(class_, key, None, dispatch, **kw) + self.target_mapper = target_mapper + self.query_class = WriteOnlyCollection + if order_by: + self.order_by = tuple(order_by) + + def get( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> Union[util.OrderedIdentitySet, WriteOnlyCollection[Any]]: + if not passive & PassiveFlag.SQL_OK: + return self._get_collection_history( + state, PassiveFlag.PASSIVE_NO_INITIALIZE + ).added_items + else: + return self.query_class(self, state) + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Literal[None] = ..., + passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., + ) -> CollectionAdapter: ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: _AdaptedCollectionProtocol = ..., + passive: PassiveFlag = ..., + ) -> CollectionAdapter: ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = ..., + passive: PassiveFlag = ..., + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: ... + + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: + data: Collection[Any] + if not passive & PassiveFlag.SQL_OK: + data = self._get_collection_history(state, passive).added_items + else: + history = self._get_collection_history(state, passive) + data = history.added_plus_unchanged + return DynamicCollectionAdapter(data) # type: ignore[return-value] + + @util.memoized_property + def _append_token( # type:ignore[override] + self, + ) -> attributes.AttributeEventToken: + return attributes.AttributeEventToken(self, attributes.OP_APPEND) + + @util.memoized_property + def _remove_token( # type:ignore[override] + self, + ) -> attributes.AttributeEventToken: + return attributes.AttributeEventToken(self, attributes.OP_REMOVE) + + def fire_append_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + collection_history: Optional[WriteOnlyHistory[Any]] = None, + ) -> None: + if collection_history is None: + collection_history = self._modified_event(state, dict_) + + collection_history.add_added(value) + + for fn in self.dispatch.append: + value = fn(state, value, initiator or self._append_token) + + if self.trackparent and value is not None: + self.sethasparent(attributes.instance_state(value), state, True) + + def fire_remove_event( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + collection_history: Optional[WriteOnlyHistory[Any]] = None, + ) -> None: + if collection_history is None: + collection_history = self._modified_event(state, dict_) + + collection_history.add_removed(value) + + if self.trackparent and value is not None: + self.sethasparent(attributes.instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, initiator or self._remove_token) + + def _modified_event( + self, state: InstanceState[Any], dict_: _InstanceDict + ) -> WriteOnlyHistory[Any]: + if self.key not in state.committed_state: + state.committed_state[self.key] = self.collection_history_cls( + self, state, PassiveFlag.PASSIVE_NO_FETCH + ) + + state._modified_event(dict_, self, NEVER_SET) + + # this is a hack to allow the entities.ComparableEntity fixture + # to work + dict_[self.key] = True + return state.committed_state[self.key] # type: ignore[no-any-return] + + def set( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + check_old: Any = None, + pop: bool = False, + _adapt: bool = True, + ) -> None: + if initiator and initiator.parent_token is self.parent_token: + return + + if pop and value is None: + return + + iterable = value + new_values = list(iterable) + if state.has_identity: + if not self._supports_dynamic_iteration: + raise exc.InvalidRequestError( + f'Collection "{self}" does not support implicit ' + "iteration; collection replacement operations " + "can't be used" + ) + old_collection = util.IdentitySet( + self.get(state, dict_, passive=passive) + ) + + collection_history = self._modified_event(state, dict_) + if not state.has_identity: + old_collection = collection_history.added_items + else: + old_collection = old_collection.union( + collection_history.added_items + ) + + constants = old_collection.intersection(new_values) + additions = util.IdentitySet(new_values).difference(constants) + removals = old_collection.difference(constants) + + for member in new_values: + if member in additions: + self.fire_append_event( + state, + dict_, + member, + None, + collection_history=collection_history, + ) + + for member in removals: + self.fire_remove_event( + state, + dict_, + member, + None, + collection_history=collection_history, + ) + + def delete(self, *args: Any, **kwargs: Any) -> NoReturn: + raise NotImplementedError() + + def set_committed_value( + self, state: InstanceState[Any], dict_: _InstanceDict, value: Any + ) -> NoReturn: + raise NotImplementedError( + "Dynamic attributes don't support collection population." + ) + + def get_history( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH, + ) -> attributes.History: + c = self._get_collection_history(state, passive) + return c.as_history() + + def get_all_pending( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + passive: PassiveFlag = PassiveFlag.PASSIVE_NO_INITIALIZE, + ) -> List[Tuple[InstanceState[Any], Any]]: + c = self._get_collection_history(state, passive) + return [(attributes.instance_state(x), x) for x in c.all_items] + + def _get_collection_history( + self, state: InstanceState[Any], passive: PassiveFlag + ) -> WriteOnlyHistory[Any]: + c: WriteOnlyHistory[Any] + if self.key in state.committed_state: + c = state.committed_state[self.key] + else: + c = self.collection_history_cls( + self, state, PassiveFlag.PASSIVE_NO_FETCH + ) + + if state.has_identity and (passive & PassiveFlag.INIT_OK): + return self.collection_history_cls( + self, state, passive, apply_to=c + ) + else: + return c + + def append( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH, + ) -> None: + if initiator is not self: + self.fire_append_event(state, dict_, value, initiator) + + def remove( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH, + ) -> None: + if initiator is not self: + self.fire_remove_event(state, dict_, value, initiator) + + def pop( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken], + passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH, + ) -> None: + self.remove(state, dict_, value, initiator, passive=passive) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy="write_only") +class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified): + impl_class = WriteOnlyAttributeImpl + + def init_class_attribute(self, mapper: Mapper[Any]) -> None: + self.is_class_level = True + if not self.uselist or self.parent_property.direction not in ( + interfaces.ONETOMANY, + interfaces.MANYTOMANY, + ): + raise exc.InvalidRequestError( + "On relationship %s, 'dynamic' loaders cannot be used with " + "many-to-one/one-to-one relationships and/or " + "uselist=False." % self.parent_property + ) + + strategies._register_attribute( # type: ignore[no-untyped-call] + self.parent_property, + mapper, + useobject=True, + impl_class=self.impl_class, + target_mapper=self.parent_property.mapper, + order_by=self.parent_property.order_by, + query_class=self.parent_property.query_class, + ) + + +class DynamicCollectionAdapter: + """simplified CollectionAdapter for internal API consistency""" + + data: Collection[Any] + + def __init__(self, data: Collection[Any]): + self.data = data + + def __iter__(self) -> Iterator[Any]: + return iter(self.data) + + def _reset_empty(self) -> None: + pass + + def __len__(self) -> int: + return len(self.data) + + def __bool__(self) -> bool: + return True + + +class AbstractCollectionWriter(Generic[_T]): + """Virtual collection which includes append/remove methods that synchronize + into the attribute event system. + + """ + + if not TYPE_CHECKING: + __slots__ = () + + instance: _T + _from_obj: Tuple[FromClause, ...] + + def __init__(self, attr: WriteOnlyAttributeImpl, state: InstanceState[_T]): + instance = state.obj() + if TYPE_CHECKING: + assert instance + self.instance = instance + self.attr = attr + + mapper = object_mapper(instance) + prop = mapper._props[self.attr.key] + + if prop.secondary is not None: + # this is a hack right now. The Query only knows how to + # make subsequent joins() without a given left-hand side + # from self._from_obj[0]. We need to ensure prop.secondary + # is in the FROM. So we purposely put the mapper selectable + # in _from_obj[0] to ensure a user-defined join() later on + # doesn't fail, and secondary is then in _from_obj[1]. + + # note also, we are using the official ORM-annotated selectable + # from __clause_element__(), see #7868 + self._from_obj = (prop.mapper.__clause_element__(), prop.secondary) + else: + self._from_obj = () + + self._where_criteria = ( + prop._with_parent(instance, alias_secondary=False), + ) + + if self.attr.order_by: + self._order_by_clauses = self.attr.order_by + else: + self._order_by_clauses = () + + def _add_all_impl(self, iterator: Iterable[_T]) -> None: + for item in iterator: + self.attr.append( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), + item, + None, + ) + + def _remove_impl(self, item: _T) -> None: + self.attr.remove( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), + item, + None, + ) + + +class WriteOnlyCollection(AbstractCollectionWriter[_T]): + """Write-only collection which can synchronize changes into the + attribute event system. + + The :class:`.WriteOnlyCollection` is used in a mapping by + using the ``"write_only"`` lazy loading strategy with + :func:`_orm.relationship`. For background on this configuration, + see :ref:`write_only_relationship`. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`write_only_relationship` + + """ + + __slots__ = ( + "instance", + "attr", + "_where_criteria", + "_from_obj", + "_order_by_clauses", + ) + + def __iter__(self) -> NoReturn: + raise TypeError( + "WriteOnly collections don't support iteration in-place; " + "to query for collection items, use the select() method to " + "produce a SQL statement and execute it with session.scalars()." + ) + + def select(self) -> Select[Tuple[_T]]: + """Produce a :class:`_sql.Select` construct that represents the + rows within this instance-local :class:`_orm.WriteOnlyCollection`. + + """ + stmt = select(self.attr.target_mapper).where(*self._where_criteria) + if self._from_obj: + stmt = stmt.select_from(*self._from_obj) + if self._order_by_clauses: + stmt = stmt.order_by(*self._order_by_clauses) + return stmt + + def insert(self) -> Insert: + """For one-to-many collections, produce a :class:`_dml.Insert` which + will insert new rows in terms of this this instance-local + :class:`_orm.WriteOnlyCollection`. + + This construct is only supported for a :class:`_orm.Relationship` + that does **not** include the :paramref:`_orm.relationship.secondary` + parameter. For relationships that refer to a many-to-many table, + use ordinary bulk insert techniques to produce new objects, then + use :meth:`_orm.AbstractCollectionWriter.add_all` to associate them + with the collection. + + + """ + + state = inspect(self.instance) + mapper = state.mapper + prop = mapper._props[self.attr.key] + + if prop.direction is not RelationshipDirection.ONETOMANY: + raise exc.InvalidRequestError( + "Write only bulk INSERT only supported for one-to-many " + "collections; for many-to-many, use a separate bulk " + "INSERT along with add_all()." + ) + + dict_: Dict[str, Any] = {} + + for l, r in prop.synchronize_pairs: + fn = prop._get_attr_w_warn_on_none( + mapper, + state, + state.dict, + l, + ) + + dict_[r.key] = bindparam(None, callable_=fn) + + return insert(self.attr.target_mapper).values(**dict_) + + def update(self) -> Update: + """Produce a :class:`_dml.Update` which will refer to rows in terms + of this instance-local :class:`_orm.WriteOnlyCollection`. + + """ + return update(self.attr.target_mapper).where(*self._where_criteria) + + def delete(self) -> Delete: + """Produce a :class:`_dml.Delete` which will refer to rows in terms + of this instance-local :class:`_orm.WriteOnlyCollection`. + + """ + return delete(self.attr.target_mapper).where(*self._where_criteria) + + def add_all(self, iterator: Iterable[_T]) -> None: + """Add an iterable of items to this :class:`_orm.WriteOnlyCollection`. + + The given items will be persisted to the database in terms of + the parent instance's collection on the next flush. + + """ + self._add_all_impl(iterator) + + def add(self, item: _T) -> None: + """Add an item to this :class:`_orm.WriteOnlyCollection`. + + The given item will be persisted to the database in terms of + the parent instance's collection on the next flush. + + """ + self._add_all_impl([item]) + + def remove(self, item: _T) -> None: + """Remove an item from this :class:`_orm.WriteOnlyCollection`. + + The given item will be removed from the parent instance's collection on + the next flush. + + """ + self._remove_impl(item) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__init__.py new file mode 100644 index 00000000..29fd6529 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__init__.py @@ -0,0 +1,44 @@ +# pool/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + + +"""Connection pooling for DB-API connections. + +Provides a number of connection pool implementations for a variety of +usage scenarios and thread behavior requirements imposed by the +application, DB-API or database itself. + +Also provides a DB-API 2.0 connection proxying mechanism allowing +regular DB-API connect() methods to be transparently managed by a +SQLAlchemy connection pool. +""" + +from . import events +from .base import _AdhocProxiedConnection as _AdhocProxiedConnection +from .base import _ConnectionFairy as _ConnectionFairy +from .base import _ConnectionRecord +from .base import _CreatorFnType as _CreatorFnType +from .base import _CreatorWRecFnType as _CreatorWRecFnType +from .base import _finalize_fairy +from .base import _ResetStyleArgType as _ResetStyleArgType +from .base import ConnectionPoolEntry as ConnectionPoolEntry +from .base import ManagesConnection as ManagesConnection +from .base import Pool as Pool +from .base import PoolProxiedConnection as PoolProxiedConnection +from .base import PoolResetState as PoolResetState +from .base import reset_commit as reset_commit +from .base import reset_none as reset_none +from .base import reset_rollback as reset_rollback +from .impl import AssertionPool as AssertionPool +from .impl import AsyncAdaptedQueuePool as AsyncAdaptedQueuePool +from .impl import ( + FallbackAsyncAdaptedQueuePool as FallbackAsyncAdaptedQueuePool, +) +from .impl import NullPool as NullPool +from .impl import QueuePool as QueuePool +from .impl import SingletonThreadPool as SingletonThreadPool +from .impl import StaticPool as StaticPool diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..585bc0a1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..a0ba0d22 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/events.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..b035ea9a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/events.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/impl.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/impl.cpython-312.pyc new file mode 100644 index 00000000..3fb33899 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/__pycache__/impl.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py new file mode 100644 index 00000000..98d20278 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py @@ -0,0 +1,1515 @@ +# pool/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + + +"""Base constructs for connection pools. + +""" + +from __future__ import annotations + +from collections import deque +import dataclasses +from enum import Enum +import threading +import time +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Deque +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union +import weakref + +from .. import event +from .. import exc +from .. import log +from .. import util +from ..util.typing import Literal +from ..util.typing import Protocol + +if TYPE_CHECKING: + from ..engine.interfaces import DBAPIConnection + from ..engine.interfaces import DBAPICursor + from ..engine.interfaces import Dialect + from ..event import _DispatchCommon + from ..event import _ListenerFnType + from ..event import dispatcher + from ..sql._typing import _InfoType + + +@dataclasses.dataclass(frozen=True) +class PoolResetState: + """describes the state of a DBAPI connection as it is being passed to + the :meth:`.PoolEvents.reset` connection pool event. + + .. versionadded:: 2.0.0b3 + + """ + + __slots__ = ("transaction_was_reset", "terminate_only", "asyncio_safe") + + transaction_was_reset: bool + """Indicates if the transaction on the DBAPI connection was already + essentially "reset" back by the :class:`.Connection` object. + + This boolean is True if the :class:`.Connection` had transactional + state present upon it, which was then not closed using the + :meth:`.Connection.rollback` or :meth:`.Connection.commit` method; + instead, the transaction was closed inline within the + :meth:`.Connection.close` method so is guaranteed to remain non-present + when this event is reached. + + """ + + terminate_only: bool + """indicates if the connection is to be immediately terminated and + not checked in to the pool. + + This occurs for connections that were invalidated, as well as asyncio + connections that were not cleanly handled by the calling code that + are instead being garbage collected. In the latter case, + operations can't be safely run on asyncio connections within garbage + collection as there is not necessarily an event loop present. + + """ + + asyncio_safe: bool + """Indicates if the reset operation is occurring within a scope where + an enclosing event loop is expected to be present for asyncio applications. + + Will be False in the case that the connection is being garbage collected. + + """ + + +class ResetStyle(Enum): + """Describe options for "reset on return" behaviors.""" + + reset_rollback = 0 + reset_commit = 1 + reset_none = 2 + + +_ResetStyleArgType = Union[ + ResetStyle, + Literal[True, None, False, "commit", "rollback"], +] +reset_rollback, reset_commit, reset_none = list(ResetStyle) + + +class _ConnDialect: + """partial implementation of :class:`.Dialect` + which provides DBAPI connection methods. + + When a :class:`_pool.Pool` is combined with an :class:`_engine.Engine`, + the :class:`_engine.Engine` replaces this with its own + :class:`.Dialect`. + + """ + + is_async = False + has_terminate = False + + def do_rollback(self, dbapi_connection: PoolProxiedConnection) -> None: + dbapi_connection.rollback() + + def do_commit(self, dbapi_connection: PoolProxiedConnection) -> None: + dbapi_connection.commit() + + def do_terminate(self, dbapi_connection: DBAPIConnection) -> None: + dbapi_connection.close() + + def do_close(self, dbapi_connection: DBAPIConnection) -> None: + dbapi_connection.close() + + def _do_ping_w_event(self, dbapi_connection: DBAPIConnection) -> bool: + raise NotImplementedError( + "The ping feature requires that a dialect is " + "passed to the connection pool." + ) + + def get_driver_connection(self, connection: DBAPIConnection) -> Any: + return connection + + +class _AsyncConnDialect(_ConnDialect): + is_async = True + + +class _CreatorFnType(Protocol): + def __call__(self) -> DBAPIConnection: ... + + +class _CreatorWRecFnType(Protocol): + def __call__(self, rec: ConnectionPoolEntry) -> DBAPIConnection: ... + + +class Pool(log.Identified, event.EventTarget): + """Abstract base class for connection pools.""" + + dispatch: dispatcher[Pool] + echo: log._EchoFlagType + + _orig_logging_name: Optional[str] + _dialect: Union[_ConnDialect, Dialect] = _ConnDialect() + _creator_arg: Union[_CreatorFnType, _CreatorWRecFnType] + _invoke_creator: _CreatorWRecFnType + _invalidate_time: float + + def __init__( + self, + creator: Union[_CreatorFnType, _CreatorWRecFnType], + recycle: int = -1, + echo: log._EchoFlagType = None, + logging_name: Optional[str] = None, + reset_on_return: _ResetStyleArgType = True, + events: Optional[List[Tuple[_ListenerFnType, str]]] = None, + dialect: Optional[Union[_ConnDialect, Dialect]] = None, + pre_ping: bool = False, + _dispatch: Optional[_DispatchCommon[Pool]] = None, + ): + """ + Construct a Pool. + + :param creator: a callable function that returns a DB-API + connection object. The function will be called with + parameters. + + :param recycle: If set to a value other than -1, number of + seconds between connection recycling, which means upon + checkout, if this timeout is surpassed the connection will be + closed and replaced with a newly opened connection. Defaults to -1. + + :param logging_name: String identifier which will be used within + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + id. + + :param echo: if True, the connection pool will log + informational output such as when connections are invalidated + as well as when connections are recycled to the default log handler, + which defaults to ``sys.stdout`` for output.. If set to the string + ``"debug"``, the logging will include pool checkouts and checkins. + + The :paramref:`_pool.Pool.echo` parameter can also be set from the + :func:`_sa.create_engine` call by using the + :paramref:`_sa.create_engine.echo_pool` parameter. + + .. seealso:: + + :ref:`dbengine_logging` - further detail on how to configure + logging. + + :param reset_on_return: Determine steps to take on + connections as they are returned to the pool, which were + not otherwise handled by a :class:`_engine.Connection`. + Available from :func:`_sa.create_engine` via the + :paramref:`_sa.create_engine.pool_reset_on_return` parameter. + + :paramref:`_pool.Pool.reset_on_return` can have any of these values: + + * ``"rollback"`` - call rollback() on the connection, + to release locks and transaction resources. + This is the default value. The vast majority + of use cases should leave this value set. + * ``"commit"`` - call commit() on the connection, + to release locks and transaction resources. + A commit here may be desirable for databases that + cache query plans if a commit is emitted, + such as Microsoft SQL Server. However, this + value is more dangerous than 'rollback' because + any data changes present on the transaction + are committed unconditionally. + * ``None`` - don't do anything on the connection. + This setting may be appropriate if the database / DBAPI + works in pure "autocommit" mode at all times, or if + a custom reset handler is established using the + :meth:`.PoolEvents.reset` event handler. + + * ``True`` - same as 'rollback', this is here for + backwards compatibility. + * ``False`` - same as None, this is here for + backwards compatibility. + + For further customization of reset on return, the + :meth:`.PoolEvents.reset` event hook may be used which can perform + any connection activity desired on reset. + + .. seealso:: + + :ref:`pool_reset_on_return` + + :meth:`.PoolEvents.reset` + + :param events: a list of 2-tuples, each of the form + ``(callable, target)`` which will be passed to :func:`.event.listen` + upon construction. Provided here so that event listeners + can be assigned via :func:`_sa.create_engine` before dialect-level + listeners are applied. + + :param dialect: a :class:`.Dialect` that will handle the job + of calling rollback(), close(), or commit() on DBAPI connections. + If omitted, a built-in "stub" dialect is used. Applications that + make use of :func:`_sa.create_engine` should not use this parameter + as it is handled by the engine creation strategy. + + :param pre_ping: if True, the pool will emit a "ping" (typically + "SELECT 1", but is dialect-specific) on the connection + upon checkout, to test if the connection is alive or not. If not, + the connection is transparently re-connected and upon success, all + other pooled connections established prior to that timestamp are + invalidated. Requires that a dialect is passed as well to + interpret the disconnection error. + + .. versionadded:: 1.2 + + """ + if logging_name: + self.logging_name = self._orig_logging_name = logging_name + else: + self._orig_logging_name = None + + log.instance_logger(self, echoflag=echo) + self._creator = creator + self._recycle = recycle + self._invalidate_time = 0 + self._pre_ping = pre_ping + self._reset_on_return = util.parse_user_argument_for_enum( + reset_on_return, + { + ResetStyle.reset_rollback: ["rollback", True], + ResetStyle.reset_none: ["none", None, False], + ResetStyle.reset_commit: ["commit"], + }, + "reset_on_return", + ) + + self.echo = echo + + if _dispatch: + self.dispatch._update(_dispatch, only_propagate=False) + if dialect: + self._dialect = dialect + if events: + for fn, target in events: + event.listen(self, target, fn) + + @util.hybridproperty + def _is_asyncio(self) -> bool: + return self._dialect.is_async + + @property + def _creator(self) -> Union[_CreatorFnType, _CreatorWRecFnType]: + return self._creator_arg + + @_creator.setter + def _creator( + self, creator: Union[_CreatorFnType, _CreatorWRecFnType] + ) -> None: + self._creator_arg = creator + + # mypy seems to get super confused assigning functions to + # attributes + self._invoke_creator = self._should_wrap_creator(creator) + + @_creator.deleter + def _creator(self) -> None: + # needed for mock testing + del self._creator_arg + del self._invoke_creator + + def _should_wrap_creator( + self, creator: Union[_CreatorFnType, _CreatorWRecFnType] + ) -> _CreatorWRecFnType: + """Detect if creator accepts a single argument, or is sent + as a legacy style no-arg function. + + """ + + try: + argspec = util.get_callable_argspec(self._creator, no_self=True) + except TypeError: + creator_fn = cast(_CreatorFnType, creator) + return lambda rec: creator_fn() + + if argspec.defaults is not None: + defaulted = len(argspec.defaults) + else: + defaulted = 0 + positionals = len(argspec[0]) - defaulted + + # look for the exact arg signature that DefaultStrategy + # sends us + if (argspec[0], argspec[3]) == (["connection_record"], (None,)): + return cast(_CreatorWRecFnType, creator) + # or just a single positional + elif positionals == 1: + return cast(_CreatorWRecFnType, creator) + # all other cases, just wrap and assume legacy "creator" callable + # thing + else: + creator_fn = cast(_CreatorFnType, creator) + return lambda rec: creator_fn() + + def _close_connection( + self, connection: DBAPIConnection, *, terminate: bool = False + ) -> None: + self.logger.debug( + "%s connection %r", + "Hard-closing" if terminate else "Closing", + connection, + ) + try: + if terminate: + self._dialect.do_terminate(connection) + else: + self._dialect.do_close(connection) + except BaseException as e: + self.logger.error( + f"Exception {'terminating' if terminate else 'closing'} " + f"connection %r", + connection, + exc_info=True, + ) + if not isinstance(e, Exception): + raise + + def _create_connection(self) -> ConnectionPoolEntry: + """Called by subclasses to create a new ConnectionRecord.""" + + return _ConnectionRecord(self) + + def _invalidate( + self, + connection: PoolProxiedConnection, + exception: Optional[BaseException] = None, + _checkin: bool = True, + ) -> None: + """Mark all connections established within the generation + of the given connection as invalidated. + + If this pool's last invalidate time is before when the given + connection was created, update the timestamp til now. Otherwise, + no action is performed. + + Connections with a start time prior to this pool's invalidation + time will be recycled upon next checkout. + """ + rec = getattr(connection, "_connection_record", None) + if not rec or self._invalidate_time < rec.starttime: + self._invalidate_time = time.time() + if _checkin and getattr(connection, "is_valid", False): + connection.invalidate(exception) + + def recreate(self) -> Pool: + """Return a new :class:`_pool.Pool`, of the same class as this one + and configured with identical creation arguments. + + This method is used in conjunction with :meth:`dispose` + to close out an entire :class:`_pool.Pool` and create a new one in + its place. + + """ + + raise NotImplementedError() + + def dispose(self) -> None: + """Dispose of this pool. + + This method leaves the possibility of checked-out connections + remaining open, as it only affects connections that are + idle in the pool. + + .. seealso:: + + :meth:`Pool.recreate` + + """ + + raise NotImplementedError() + + def connect(self) -> PoolProxiedConnection: + """Return a DBAPI connection from the pool. + + The connection is instrumented such that when its + ``close()`` method is called, the connection will be returned to + the pool. + + """ + return _ConnectionFairy._checkout(self) + + def _return_conn(self, record: ConnectionPoolEntry) -> None: + """Given a _ConnectionRecord, return it to the :class:`_pool.Pool`. + + This method is called when an instrumented DBAPI connection + has its ``close()`` method called. + + """ + self._do_return_conn(record) + + def _do_get(self) -> ConnectionPoolEntry: + """Implementation for :meth:`get`, supplied by subclasses.""" + + raise NotImplementedError() + + def _do_return_conn(self, record: ConnectionPoolEntry) -> None: + """Implementation for :meth:`return_conn`, supplied by subclasses.""" + + raise NotImplementedError() + + def status(self) -> str: + raise NotImplementedError() + + +class ManagesConnection: + """Common base for the two connection-management interfaces + :class:`.PoolProxiedConnection` and :class:`.ConnectionPoolEntry`. + + These two objects are typically exposed in the public facing API + via the connection pool event hooks, documented at :class:`.PoolEvents`. + + .. versionadded:: 2.0 + + """ + + __slots__ = () + + dbapi_connection: Optional[DBAPIConnection] + """A reference to the actual DBAPI connection being tracked. + + This is a :pep:`249`-compliant object that for traditional sync-style + dialects is provided by the third-party + DBAPI implementation in use. For asyncio dialects, the implementation + is typically an adapter object provided by the SQLAlchemy dialect + itself; the underlying asyncio object is available via the + :attr:`.ManagesConnection.driver_connection` attribute. + + SQLAlchemy's interface for the DBAPI connection is based on the + :class:`.DBAPIConnection` protocol object + + .. seealso:: + + :attr:`.ManagesConnection.driver_connection` + + :ref:`faq_dbapi_connection` + + """ + + driver_connection: Optional[Any] + """The "driver level" connection object as used by the Python + DBAPI or database driver. + + For traditional :pep:`249` DBAPI implementations, this object will + be the same object as that of + :attr:`.ManagesConnection.dbapi_connection`. For an asyncio database + driver, this will be the ultimate "connection" object used by that + driver, such as the ``asyncpg.Connection`` object which will not have + standard pep-249 methods. + + .. versionadded:: 1.4.24 + + .. seealso:: + + :attr:`.ManagesConnection.dbapi_connection` + + :ref:`faq_dbapi_connection` + + """ + + @util.ro_memoized_property + def info(self) -> _InfoType: + """Info dictionary associated with the underlying DBAPI connection + referred to by this :class:`.ManagesConnection` instance, allowing + user-defined data to be associated with the connection. + + The data in this dictionary is persistent for the lifespan + of the DBAPI connection itself, including across pool checkins + and checkouts. When the connection is invalidated + and replaced with a new one, this dictionary is cleared. + + For a :class:`.PoolProxiedConnection` instance that's not associated + with a :class:`.ConnectionPoolEntry`, such as if it were detached, the + attribute returns a dictionary that is local to that + :class:`.ConnectionPoolEntry`. Therefore the + :attr:`.ManagesConnection.info` attribute will always provide a Python + dictionary. + + .. seealso:: + + :attr:`.ManagesConnection.record_info` + + + """ + raise NotImplementedError() + + @util.ro_memoized_property + def record_info(self) -> Optional[_InfoType]: + """Persistent info dictionary associated with this + :class:`.ManagesConnection`. + + Unlike the :attr:`.ManagesConnection.info` dictionary, the lifespan + of this dictionary is that of the :class:`.ConnectionPoolEntry` + which owns it; therefore this dictionary will persist across + reconnects and connection invalidation for a particular entry + in the connection pool. + + For a :class:`.PoolProxiedConnection` instance that's not associated + with a :class:`.ConnectionPoolEntry`, such as if it were detached, the + attribute returns None. Contrast to the :attr:`.ManagesConnection.info` + dictionary which is never None. + + + .. seealso:: + + :attr:`.ManagesConnection.info` + + """ + raise NotImplementedError() + + def invalidate( + self, e: Optional[BaseException] = None, soft: bool = False + ) -> None: + """Mark the managed connection as invalidated. + + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. seealso:: + + :ref:`pool_connection_invalidation` + + + """ + raise NotImplementedError() + + +class ConnectionPoolEntry(ManagesConnection): + """Interface for the object that maintains an individual database + connection on behalf of a :class:`_pool.Pool` instance. + + The :class:`.ConnectionPoolEntry` object represents the long term + maintainance of a particular connection for a pool, including expiring or + invalidating that connection to have it replaced with a new one, which will + continue to be maintained by that same :class:`.ConnectionPoolEntry` + instance. Compared to :class:`.PoolProxiedConnection`, which is the + short-term, per-checkout connection manager, this object lasts for the + lifespan of a particular "slot" within a connection pool. + + The :class:`.ConnectionPoolEntry` object is mostly visible to public-facing + API code when it is delivered to connection pool event hooks, such as + :meth:`_events.PoolEvents.connect` and :meth:`_events.PoolEvents.checkout`. + + .. versionadded:: 2.0 :class:`.ConnectionPoolEntry` provides the public + facing interface for the :class:`._ConnectionRecord` internal class. + + """ + + __slots__ = () + + @property + def in_use(self) -> bool: + """Return True the connection is currently checked out""" + + raise NotImplementedError() + + def close(self) -> None: + """Close the DBAPI connection managed by this connection pool entry.""" + raise NotImplementedError() + + +class _ConnectionRecord(ConnectionPoolEntry): + """Maintains a position in a connection pool which references a pooled + connection. + + This is an internal object used by the :class:`_pool.Pool` implementation + to provide context management to a DBAPI connection maintained by + that :class:`_pool.Pool`. The public facing interface for this class + is described by the :class:`.ConnectionPoolEntry` class. See that + class for public API details. + + .. seealso:: + + :class:`.ConnectionPoolEntry` + + :class:`.PoolProxiedConnection` + + """ + + __slots__ = ( + "__pool", + "fairy_ref", + "finalize_callback", + "fresh", + "starttime", + "dbapi_connection", + "__weakref__", + "__dict__", + ) + + finalize_callback: Deque[Callable[[DBAPIConnection], None]] + fresh: bool + fairy_ref: Optional[weakref.ref[_ConnectionFairy]] + starttime: float + + def __init__(self, pool: Pool, connect: bool = True): + self.fresh = False + self.fairy_ref = None + self.starttime = 0 + self.dbapi_connection = None + + self.__pool = pool + if connect: + self.__connect() + self.finalize_callback = deque() + + dbapi_connection: Optional[DBAPIConnection] + + @property + def driver_connection(self) -> Optional[Any]: # type: ignore[override] # mypy#4125 # noqa: E501 + if self.dbapi_connection is None: + return None + else: + return self.__pool._dialect.get_driver_connection( + self.dbapi_connection + ) + + @property + @util.deprecated( + "2.0", + "The _ConnectionRecord.connection attribute is deprecated; " + "please use 'driver_connection'", + ) + def connection(self) -> Optional[DBAPIConnection]: + return self.dbapi_connection + + _soft_invalidate_time: float = 0 + + @util.ro_memoized_property + def info(self) -> _InfoType: + return {} + + @util.ro_memoized_property + def record_info(self) -> Optional[_InfoType]: + return {} + + @classmethod + def checkout(cls, pool: Pool) -> _ConnectionFairy: + if TYPE_CHECKING: + rec = cast(_ConnectionRecord, pool._do_get()) + else: + rec = pool._do_get() + + try: + dbapi_connection = rec.get_connection() + except BaseException as err: + with util.safe_reraise(): + rec._checkin_failed(err, _fairy_was_created=False) + + # not reached, for code linters only + raise + + echo = pool._should_log_debug() + fairy = _ConnectionFairy(pool, dbapi_connection, rec, echo) + + rec.fairy_ref = ref = weakref.ref( + fairy, + lambda ref: ( + _finalize_fairy( + None, rec, pool, ref, echo, transaction_was_reset=False + ) + if _finalize_fairy is not None + else None + ), + ) + _strong_ref_connection_records[ref] = rec + if echo: + pool.logger.debug( + "Connection %r checked out from pool", dbapi_connection + ) + return fairy + + def _checkin_failed( + self, err: BaseException, _fairy_was_created: bool = True + ) -> None: + self.invalidate(e=err) + self.checkin( + _fairy_was_created=_fairy_was_created, + ) + + def checkin(self, _fairy_was_created: bool = True) -> None: + if self.fairy_ref is None and _fairy_was_created: + # _fairy_was_created is False for the initial get connection phase; + # meaning there was no _ConnectionFairy and we must unconditionally + # do a checkin. + # + # otherwise, if fairy_was_created==True, if fairy_ref is None here + # that means we were checked in already, so this looks like + # a double checkin. + util.warn("Double checkin attempted on %s" % self) + return + self.fairy_ref = None + connection = self.dbapi_connection + pool = self.__pool + while self.finalize_callback: + finalizer = self.finalize_callback.pop() + if connection is not None: + finalizer(connection) + if pool.dispatch.checkin: + pool.dispatch.checkin(connection, self) + + pool._return_conn(self) + + @property + def in_use(self) -> bool: + return self.fairy_ref is not None + + @property + def last_connect_time(self) -> float: + return self.starttime + + def close(self) -> None: + if self.dbapi_connection is not None: + self.__close() + + def invalidate( + self, e: Optional[BaseException] = None, soft: bool = False + ) -> None: + # already invalidated + if self.dbapi_connection is None: + return + if soft: + self.__pool.dispatch.soft_invalidate( + self.dbapi_connection, self, e + ) + else: + self.__pool.dispatch.invalidate(self.dbapi_connection, self, e) + if e is not None: + self.__pool.logger.info( + "%sInvalidate connection %r (reason: %s:%s)", + "Soft " if soft else "", + self.dbapi_connection, + e.__class__.__name__, + e, + ) + else: + self.__pool.logger.info( + "%sInvalidate connection %r", + "Soft " if soft else "", + self.dbapi_connection, + ) + + if soft: + self._soft_invalidate_time = time.time() + else: + self.__close(terminate=True) + self.dbapi_connection = None + + def get_connection(self) -> DBAPIConnection: + recycle = False + + # NOTE: the various comparisons here are assuming that measurable time + # passes between these state changes. however, time.time() is not + # guaranteed to have sub-second precision. comparisons of + # "invalidation time" to "starttime" should perhaps use >= so that the + # state change can take place assuming no measurable time has passed, + # however this does not guarantee correct behavior here as if time + # continues to not pass, it will try to reconnect repeatedly until + # these timestamps diverge, so in that sense using > is safer. Per + # https://stackoverflow.com/a/1938096/34549, Windows time.time() may be + # within 16 milliseconds accuracy, so unit tests for connection + # invalidation need a sleep of at least this long between initial start + # time and invalidation for the logic below to work reliably. + + if self.dbapi_connection is None: + self.info.clear() + self.__connect() + elif ( + self.__pool._recycle > -1 + and time.time() - self.starttime > self.__pool._recycle + ): + self.__pool.logger.info( + "Connection %r exceeded timeout; recycling", + self.dbapi_connection, + ) + recycle = True + elif self.__pool._invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to pool invalidation; " + + "recycling", + self.dbapi_connection, + ) + recycle = True + elif self._soft_invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to local soft invalidation; " + + "recycling", + self.dbapi_connection, + ) + recycle = True + + if recycle: + self.__close(terminate=True) + self.info.clear() + + self.__connect() + + assert self.dbapi_connection is not None + return self.dbapi_connection + + def _is_hard_or_soft_invalidated(self) -> bool: + return ( + self.dbapi_connection is None + or self.__pool._invalidate_time > self.starttime + or (self._soft_invalidate_time > self.starttime) + ) + + def __close(self, *, terminate: bool = False) -> None: + self.finalize_callback.clear() + if self.__pool.dispatch.close: + self.__pool.dispatch.close(self.dbapi_connection, self) + assert self.dbapi_connection is not None + self.__pool._close_connection( + self.dbapi_connection, terminate=terminate + ) + self.dbapi_connection = None + + def __connect(self) -> None: + pool = self.__pool + + # ensure any existing connection is removed, so that if + # creator fails, this attribute stays None + self.dbapi_connection = None + try: + self.starttime = time.time() + self.dbapi_connection = connection = pool._invoke_creator(self) + pool.logger.debug("Created new connection %r", connection) + self.fresh = True + except BaseException as e: + with util.safe_reraise(): + pool.logger.debug("Error on connect(): %s", e) + else: + # in SQLAlchemy 1.4 the first_connect event is not used by + # the engine, so this will usually not be set + if pool.dispatch.first_connect: + pool.dispatch.first_connect.for_modify( + pool.dispatch + ).exec_once_unless_exception(self.dbapi_connection, self) + + # init of the dialect now takes place within the connect + # event, so ensure a mutex is used on the first run + pool.dispatch.connect.for_modify( + pool.dispatch + )._exec_w_sync_on_first_run(self.dbapi_connection, self) + + +def _finalize_fairy( + dbapi_connection: Optional[DBAPIConnection], + connection_record: Optional[_ConnectionRecord], + pool: Pool, + ref: Optional[ + weakref.ref[_ConnectionFairy] + ], # this is None when called directly, not by the gc + echo: Optional[log._EchoFlagType], + transaction_was_reset: bool = False, + fairy: Optional[_ConnectionFairy] = None, +) -> None: + """Cleanup for a :class:`._ConnectionFairy` whether or not it's already + been garbage collected. + + When using an async dialect no IO can happen here (without using + a dedicated thread), since this is called outside the greenlet + context and with an already running loop. In this case function + will only log a message and raise a warning. + """ + + is_gc_cleanup = ref is not None + + if is_gc_cleanup: + assert ref is not None + _strong_ref_connection_records.pop(ref, None) + assert connection_record is not None + if connection_record.fairy_ref is not ref: + return + assert dbapi_connection is None + dbapi_connection = connection_record.dbapi_connection + + elif fairy: + _strong_ref_connection_records.pop(weakref.ref(fairy), None) + + # null pool is not _is_asyncio but can be used also with async dialects + dont_restore_gced = pool._dialect.is_async + + if dont_restore_gced: + detach = connection_record is None or is_gc_cleanup + can_manipulate_connection = not is_gc_cleanup + can_close_or_terminate_connection = ( + not pool._dialect.is_async or pool._dialect.has_terminate + ) + requires_terminate_for_close = ( + pool._dialect.is_async and pool._dialect.has_terminate + ) + + else: + detach = connection_record is None + can_manipulate_connection = can_close_or_terminate_connection = True + requires_terminate_for_close = False + + if dbapi_connection is not None: + if connection_record and echo: + pool.logger.debug( + "Connection %r being returned to pool", dbapi_connection + ) + + try: + if not fairy: + assert connection_record is not None + fairy = _ConnectionFairy( + pool, + dbapi_connection, + connection_record, + echo, + ) + assert fairy.dbapi_connection is dbapi_connection + + fairy._reset( + pool, + transaction_was_reset=transaction_was_reset, + terminate_only=detach, + asyncio_safe=can_manipulate_connection, + ) + + if detach: + if connection_record: + fairy._pool = pool + fairy.detach() + + if can_close_or_terminate_connection: + if pool.dispatch.close_detached: + pool.dispatch.close_detached(dbapi_connection) + + pool._close_connection( + dbapi_connection, + terminate=requires_terminate_for_close, + ) + + except BaseException as e: + pool.logger.error( + "Exception during reset or similar", exc_info=True + ) + if connection_record: + connection_record.invalidate(e=e) + if not isinstance(e, Exception): + raise + finally: + if detach and is_gc_cleanup and dont_restore_gced: + message = ( + "The garbage collector is trying to clean up " + f"non-checked-in connection {dbapi_connection!r}, " + f"""which will be { + 'dropped, as it cannot be safely terminated' + if not can_close_or_terminate_connection + else 'terminated' + }. """ + "Please ensure that SQLAlchemy pooled connections are " + "returned to " + "the pool explicitly, either by calling ``close()`` " + "or by using appropriate context managers to manage " + "their lifecycle." + ) + pool.logger.error(message) + util.warn(message) + + if connection_record and connection_record.fairy_ref is not None: + connection_record.checkin() + + # give gc some help. See + # test/engine/test_pool.py::PoolEventsTest::test_checkin_event_gc[True] + # which actually started failing when pytest warnings plugin was + # turned on, due to util.warn() above + if fairy is not None: + fairy.dbapi_connection = None # type: ignore + fairy._connection_record = None + del dbapi_connection + del connection_record + del fairy + + +# a dictionary of the _ConnectionFairy weakrefs to _ConnectionRecord, so that +# GC under pypy will call ConnectionFairy finalizers. linked directly to the +# weakref that will empty itself when collected so that it should not create +# any unmanaged memory references. +_strong_ref_connection_records: Dict[ + weakref.ref[_ConnectionFairy], _ConnectionRecord +] = {} + + +class PoolProxiedConnection(ManagesConnection): + """A connection-like adapter for a :pep:`249` DBAPI connection, which + includes additional methods specific to the :class:`.Pool` implementation. + + :class:`.PoolProxiedConnection` is the public-facing interface for the + internal :class:`._ConnectionFairy` implementation object; users familiar + with :class:`._ConnectionFairy` can consider this object to be equivalent. + + .. versionadded:: 2.0 :class:`.PoolProxiedConnection` provides the public- + facing interface for the :class:`._ConnectionFairy` internal class. + + """ + + __slots__ = () + + if typing.TYPE_CHECKING: + + def commit(self) -> None: ... + + def cursor(self) -> DBAPICursor: ... + + def rollback(self) -> None: ... + + @property + def is_valid(self) -> bool: + """Return True if this :class:`.PoolProxiedConnection` still refers + to an active DBAPI connection.""" + + raise NotImplementedError() + + @property + def is_detached(self) -> bool: + """Return True if this :class:`.PoolProxiedConnection` is detached + from its pool.""" + + raise NotImplementedError() + + def detach(self) -> None: + """Separate this connection from its Pool. + + This means that the connection will no longer be returned to the + pool when closed, and will instead be literally closed. The + associated :class:`.ConnectionPoolEntry` is de-associated from this + DBAPI connection. + + Note that any overall connection limiting constraints imposed by a + Pool implementation may be violated after a detach, as the detached + connection is removed from the pool's knowledge and control. + + """ + + raise NotImplementedError() + + def close(self) -> None: + """Release this connection back to the pool. + + The :meth:`.PoolProxiedConnection.close` method shadows the + :pep:`249` ``.close()`` method, altering its behavior to instead + :term:`release` the proxied connection back to the connection pool. + + Upon release to the pool, whether the connection stays "opened" and + pooled in the Python process, versus actually closed out and removed + from the Python process, is based on the pool implementation in use and + its configuration and current state. + + """ + raise NotImplementedError() + + +class _AdhocProxiedConnection(PoolProxiedConnection): + """provides the :class:`.PoolProxiedConnection` interface for cases where + the DBAPI connection is not actually proxied. + + This is used by the engine internals to pass a consistent + :class:`.PoolProxiedConnection` object to consuming dialects in response to + pool events that may not always have the :class:`._ConnectionFairy` + available. + + """ + + __slots__ = ("dbapi_connection", "_connection_record", "_is_valid") + + dbapi_connection: DBAPIConnection + _connection_record: ConnectionPoolEntry + + def __init__( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ): + self.dbapi_connection = dbapi_connection + self._connection_record = connection_record + self._is_valid = True + + @property + def driver_connection(self) -> Any: # type: ignore[override] # mypy#4125 + return self._connection_record.driver_connection + + @property + def connection(self) -> DBAPIConnection: + return self.dbapi_connection + + @property + def is_valid(self) -> bool: + """Implement is_valid state attribute. + + for the adhoc proxied connection it's assumed the connection is valid + as there is no "invalidate" routine. + + """ + return self._is_valid + + def invalidate( + self, e: Optional[BaseException] = None, soft: bool = False + ) -> None: + self._is_valid = False + + @util.ro_non_memoized_property + def record_info(self) -> Optional[_InfoType]: + return self._connection_record.record_info + + def cursor(self, *args: Any, **kwargs: Any) -> DBAPICursor: + return self.dbapi_connection.cursor(*args, **kwargs) + + def __getattr__(self, key: Any) -> Any: + return getattr(self.dbapi_connection, key) + + +class _ConnectionFairy(PoolProxiedConnection): + """Proxies a DBAPI connection and provides return-on-dereference + support. + + This is an internal object used by the :class:`_pool.Pool` implementation + to provide context management to a DBAPI connection delivered by + that :class:`_pool.Pool`. The public facing interface for this class + is described by the :class:`.PoolProxiedConnection` class. See that + class for public API details. + + The name "fairy" is inspired by the fact that the + :class:`._ConnectionFairy` object's lifespan is transitory, as it lasts + only for the length of a specific DBAPI connection being checked out from + the pool, and additionally that as a transparent proxy, it is mostly + invisible. + + .. seealso:: + + :class:`.PoolProxiedConnection` + + :class:`.ConnectionPoolEntry` + + + """ + + __slots__ = ( + "dbapi_connection", + "_connection_record", + "_echo", + "_pool", + "_counter", + "__weakref__", + "__dict__", + ) + + pool: Pool + dbapi_connection: DBAPIConnection + _echo: log._EchoFlagType + + def __init__( + self, + pool: Pool, + dbapi_connection: DBAPIConnection, + connection_record: _ConnectionRecord, + echo: log._EchoFlagType, + ): + self._pool = pool + self._counter = 0 + self.dbapi_connection = dbapi_connection + self._connection_record = connection_record + self._echo = echo + + _connection_record: Optional[_ConnectionRecord] + + @property + def driver_connection(self) -> Optional[Any]: # type: ignore[override] # mypy#4125 # noqa: E501 + if self._connection_record is None: + return None + return self._connection_record.driver_connection + + @property + @util.deprecated( + "2.0", + "The _ConnectionFairy.connection attribute is deprecated; " + "please use 'driver_connection'", + ) + def connection(self) -> DBAPIConnection: + return self.dbapi_connection + + @classmethod + def _checkout( + cls, + pool: Pool, + threadconns: Optional[threading.local] = None, + fairy: Optional[_ConnectionFairy] = None, + ) -> _ConnectionFairy: + if not fairy: + fairy = _ConnectionRecord.checkout(pool) + + if threadconns is not None: + threadconns.current = weakref.ref(fairy) + + assert ( + fairy._connection_record is not None + ), "can't 'checkout' a detached connection fairy" + assert ( + fairy.dbapi_connection is not None + ), "can't 'checkout' an invalidated connection fairy" + + fairy._counter += 1 + if ( + not pool.dispatch.checkout and not pool._pre_ping + ) or fairy._counter != 1: + return fairy + + # Pool listeners can trigger a reconnection on checkout, as well + # as the pre-pinger. + # there are three attempts made here, but note that if the database + # is not accessible from a connection standpoint, those won't proceed + # here. + + attempts = 2 + + while attempts > 0: + connection_is_fresh = fairy._connection_record.fresh + fairy._connection_record.fresh = False + try: + if pool._pre_ping: + if not connection_is_fresh: + if fairy._echo: + pool.logger.debug( + "Pool pre-ping on connection %s", + fairy.dbapi_connection, + ) + result = pool._dialect._do_ping_w_event( + fairy.dbapi_connection + ) + if not result: + if fairy._echo: + pool.logger.debug( + "Pool pre-ping on connection %s failed, " + "will invalidate pool", + fairy.dbapi_connection, + ) + raise exc.InvalidatePoolError() + elif fairy._echo: + pool.logger.debug( + "Connection %s is fresh, skipping pre-ping", + fairy.dbapi_connection, + ) + + pool.dispatch.checkout( + fairy.dbapi_connection, fairy._connection_record, fairy + ) + return fairy + except exc.DisconnectionError as e: + if e.invalidate_pool: + pool.logger.info( + "Disconnection detected on checkout, " + "invalidating all pooled connections prior to " + "current timestamp (reason: %r)", + e, + ) + fairy._connection_record.invalidate(e) + pool._invalidate(fairy, e, _checkin=False) + else: + pool.logger.info( + "Disconnection detected on checkout, " + "invalidating individual connection %s (reason: %r)", + fairy.dbapi_connection, + e, + ) + fairy._connection_record.invalidate(e) + try: + fairy.dbapi_connection = ( + fairy._connection_record.get_connection() + ) + except BaseException as err: + with util.safe_reraise(): + fairy._connection_record._checkin_failed( + err, + _fairy_was_created=True, + ) + + # prevent _ConnectionFairy from being carried + # in the stack trace. Do this after the + # connection record has been checked in, so that + # if the del triggers a finalize fairy, it won't + # try to checkin a second time. + del fairy + + # never called, this is for code linters + raise + + attempts -= 1 + except BaseException as be_outer: + with util.safe_reraise(): + rec = fairy._connection_record + if rec is not None: + rec._checkin_failed( + be_outer, + _fairy_was_created=True, + ) + + # prevent _ConnectionFairy from being carried + # in the stack trace, see above + del fairy + + # never called, this is for code linters + raise + + pool.logger.info("Reconnection attempts exhausted on checkout") + fairy.invalidate() + raise exc.InvalidRequestError("This connection is closed") + + def _checkout_existing(self) -> _ConnectionFairy: + return _ConnectionFairy._checkout(self._pool, fairy=self) + + def _checkin(self, transaction_was_reset: bool = False) -> None: + _finalize_fairy( + self.dbapi_connection, + self._connection_record, + self._pool, + None, + self._echo, + transaction_was_reset=transaction_was_reset, + fairy=self, + ) + + def _close(self) -> None: + self._checkin() + + def _reset( + self, + pool: Pool, + transaction_was_reset: bool, + terminate_only: bool, + asyncio_safe: bool, + ) -> None: + if pool.dispatch.reset: + pool.dispatch.reset( + self.dbapi_connection, + self._connection_record, + PoolResetState( + transaction_was_reset=transaction_was_reset, + terminate_only=terminate_only, + asyncio_safe=asyncio_safe, + ), + ) + + if not asyncio_safe: + return + + if pool._reset_on_return is reset_rollback: + if transaction_was_reset: + if self._echo: + pool.logger.debug( + "Connection %s reset, transaction already reset", + self.dbapi_connection, + ) + else: + if self._echo: + pool.logger.debug( + "Connection %s rollback-on-return", + self.dbapi_connection, + ) + pool._dialect.do_rollback(self) + elif pool._reset_on_return is reset_commit: + if self._echo: + pool.logger.debug( + "Connection %s commit-on-return", + self.dbapi_connection, + ) + pool._dialect.do_commit(self) + + @property + def _logger(self) -> log._IdentifiedLoggerType: + return self._pool.logger + + @property + def is_valid(self) -> bool: + return self.dbapi_connection is not None + + @property + def is_detached(self) -> bool: + return self._connection_record is None + + @util.ro_memoized_property + def info(self) -> _InfoType: + if self._connection_record is None: + return {} + else: + return self._connection_record.info + + @util.ro_non_memoized_property + def record_info(self) -> Optional[_InfoType]: + if self._connection_record is None: + return None + else: + return self._connection_record.record_info + + def invalidate( + self, e: Optional[BaseException] = None, soft: bool = False + ) -> None: + if self.dbapi_connection is None: + util.warn("Can't invalidate an already-closed connection.") + return + if self._connection_record: + self._connection_record.invalidate(e=e, soft=soft) + if not soft: + # prevent any rollback / reset actions etc. on + # the connection + self.dbapi_connection = None # type: ignore + + # finalize + self._checkin() + + def cursor(self, *args: Any, **kwargs: Any) -> DBAPICursor: + assert self.dbapi_connection is not None + return self.dbapi_connection.cursor(*args, **kwargs) + + def __getattr__(self, key: str) -> Any: + return getattr(self.dbapi_connection, key) + + def detach(self) -> None: + if self._connection_record is not None: + rec = self._connection_record + rec.fairy_ref = None + rec.dbapi_connection = None + # TODO: should this be _return_conn? + self._pool._do_return_conn(self._connection_record) + + # can't get the descriptor assignment to work here + # in pylance. mypy is OK w/ it + self.info = self.info.copy() # type: ignore + + self._connection_record = None + + if self._pool.dispatch.detach: + self._pool.dispatch.detach(self.dbapi_connection, rec) + + def close(self) -> None: + self._counter -= 1 + if self._counter == 0: + self._checkin() + + def _close_special(self, transaction_reset: bool = False) -> None: + self._counter -= 1 + if self._counter == 0: + self._checkin(transaction_was_reset=transaction_reset) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/events.py new file mode 100644 index 00000000..4b4f4e47 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/events.py @@ -0,0 +1,370 @@ +# pool/events.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import typing +from typing import Any +from typing import Optional +from typing import Type +from typing import Union + +from .base import ConnectionPoolEntry +from .base import Pool +from .base import PoolProxiedConnection +from .base import PoolResetState +from .. import event +from .. import util + +if typing.TYPE_CHECKING: + from ..engine import Engine + from ..engine.interfaces import DBAPIConnection + + +class PoolEvents(event.Events[Pool]): + """Available events for :class:`_pool.Pool`. + + The methods here define the name of an event as well + as the names of members that are passed to listener + functions. + + e.g.:: + + from sqlalchemy import event + + def my_on_checkout(dbapi_conn, connection_rec, connection_proxy): + "handle an on checkout event" + + event.listen(Pool, 'checkout', my_on_checkout) + + In addition to accepting the :class:`_pool.Pool` class and + :class:`_pool.Pool` instances, :class:`_events.PoolEvents` also accepts + :class:`_engine.Engine` objects and the :class:`_engine.Engine` class as + targets, which will be resolved to the ``.pool`` attribute of the + given engine or the :class:`_pool.Pool` class:: + + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") + + # will associate with engine.pool + event.listen(engine, 'checkout', my_on_checkout) + + """ # noqa: E501 + + _target_class_doc = "SomeEngineOrPool" + _dispatch_target = Pool + + @util.preload_module("sqlalchemy.engine") + @classmethod + def _accept_with( + cls, + target: Union[Pool, Type[Pool], Engine, Type[Engine]], + identifier: str, + ) -> Optional[Union[Pool, Type[Pool]]]: + if not typing.TYPE_CHECKING: + Engine = util.preloaded.engine.Engine + + if isinstance(target, type): + if issubclass(target, Engine): + return Pool + else: + assert issubclass(target, Pool) + return target + elif isinstance(target, Engine): + return target.pool + elif isinstance(target, Pool): + return target + elif hasattr(target, "_no_async_engine_events"): + target._no_async_engine_events() + else: + return None + + @classmethod + def _listen( + cls, + event_key: event._EventKey[Pool], + **kw: Any, + ) -> None: + target = event_key.dispatch_target + + kw.setdefault("asyncio", target._is_asyncio) + + event_key.base_listen(**kw) + + def connect( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ) -> None: + """Called at the moment a particular DBAPI connection is first + created for a given :class:`_pool.Pool`. + + This event allows one to capture the point directly after which + the DBAPI module-level ``.connect()`` method has been used in order + to produce a new DBAPI connection. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + """ + + def first_connect( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ) -> None: + """Called exactly once for the first time a DBAPI connection is + checked out from a particular :class:`_pool.Pool`. + + The rationale for :meth:`_events.PoolEvents.first_connect` + is to determine + information about a particular series of database connections based + on the settings used for all connections. Since a particular + :class:`_pool.Pool` + refers to a single "creator" function (which in terms + of a :class:`_engine.Engine` + refers to the URL and connection options used), + it is typically valid to make observations about a single connection + that can be safely assumed to be valid about all subsequent + connections, such as the database version, the server and client + encoding settings, collation settings, and many others. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + """ + + def checkout( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + connection_proxy: PoolProxiedConnection, + ) -> None: + """Called when a connection is retrieved from the Pool. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + :param connection_proxy: the :class:`.PoolProxiedConnection` object + which will proxy the public interface of the DBAPI connection for the + lifespan of the checkout. + + If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current + connection will be disposed and a fresh connection retrieved. + Processing of all checkout listeners will abort and restart + using the new connection. + + .. seealso:: :meth:`_events.ConnectionEvents.engine_connect` + - a similar event + which occurs upon creation of a new :class:`_engine.Connection`. + + """ + + def checkin( + self, + dbapi_connection: Optional[DBAPIConnection], + connection_record: ConnectionPoolEntry, + ) -> None: + """Called when a connection returns to the pool. + + Note that the connection may be closed, and may be None if the + connection has been invalidated. ``checkin`` will not be called + for detached connections. (They do not return to the pool.) + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + """ + + @event._legacy_signature( + "2.0", + ["dbapi_connection", "connection_record"], + lambda dbapi_connection, connection_record, reset_state: ( + dbapi_connection, + connection_record, + ), + ) + def reset( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + reset_state: PoolResetState, + ) -> None: + """Called before the "reset" action occurs for a pooled connection. + + This event represents + when the ``rollback()`` method is called on the DBAPI connection + before it is returned to the pool or discarded. + A custom "reset" strategy may be implemented using this event hook, + which may also be combined with disabling the default "reset" + behavior using the :paramref:`_pool.Pool.reset_on_return` parameter. + + The primary difference between the :meth:`_events.PoolEvents.reset` and + :meth:`_events.PoolEvents.checkin` events are that + :meth:`_events.PoolEvents.reset` is called not just for pooled + connections that are being returned to the pool, but also for + connections that were detached using the + :meth:`_engine.Connection.detach` method as well as asyncio connections + that are being discarded due to garbage collection taking place on + connections before the connection was checked in. + + Note that the event **is not** invoked for connections that were + invalidated using :meth:`_engine.Connection.invalidate`. These + events may be intercepted using the :meth:`.PoolEvents.soft_invalidate` + and :meth:`.PoolEvents.invalidate` event hooks, and all "connection + close" events may be intercepted using :meth:`.PoolEvents.close`. + + The :meth:`_events.PoolEvents.reset` event is usually followed by the + :meth:`_events.PoolEvents.checkin` event, except in those + cases where the connection is discarded immediately after reset. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + :param reset_state: :class:`.PoolResetState` instance which provides + information about the circumstances under which the connection + is being reset. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`pool_reset_on_return` + + :meth:`_events.ConnectionEvents.rollback` + + :meth:`_events.ConnectionEvents.commit` + + """ + + def invalidate( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + exception: Optional[BaseException], + ) -> None: + """Called when a DBAPI connection is to be "invalidated". + + This event is called any time the + :meth:`.ConnectionPoolEntry.invalidate` method is invoked, either from + API usage or via "auto-invalidation", without the ``soft`` flag. + + The event occurs before a final attempt to call ``.close()`` on the + connection occurs. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + :param exception: the exception object corresponding to the reason + for this invalidation, if any. May be ``None``. + + .. seealso:: + + :ref:`pool_connection_invalidation` + + """ + + def soft_invalidate( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + exception: Optional[BaseException], + ) -> None: + """Called when a DBAPI connection is to be "soft invalidated". + + This event is called any time the + :meth:`.ConnectionPoolEntry.invalidate` + method is invoked with the ``soft`` flag. + + Soft invalidation refers to when the connection record that tracks + this connection will force a reconnect after the current connection + is checked in. It does not actively close the dbapi_connection + at the point at which it is called. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + :param exception: the exception object corresponding to the reason + for this invalidation, if any. May be ``None``. + + """ + + def close( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ) -> None: + """Called when a DBAPI connection is closed. + + The event is emitted before the close occurs. + + The close of a connection can fail; typically this is because + the connection is already closed. If the close operation fails, + the connection is discarded. + + The :meth:`.close` event corresponds to a connection that's still + associated with the pool. To intercept close events for detached + connections use :meth:`.close_detached`. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + """ + + def detach( + self, + dbapi_connection: DBAPIConnection, + connection_record: ConnectionPoolEntry, + ) -> None: + """Called when a DBAPI connection is "detached" from a pool. + + This event is emitted after the detach occurs. The connection + is no longer associated with the given connection record. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + :param connection_record: the :class:`.ConnectionPoolEntry` managing + the DBAPI connection. + + """ + + def close_detached(self, dbapi_connection: DBAPIConnection) -> None: + """Called when a detached DBAPI connection is closed. + + The event is emitted before the close occurs. + + The close of a connection can fail; typically this is because + the connection is already closed. If the close operation fails, + the connection is discarded. + + :param dbapi_connection: a DBAPI connection. + The :attr:`.ConnectionPoolEntry.dbapi_connection` attribute. + + """ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/pool/impl.py b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/impl.py new file mode 100644 index 00000000..157455cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/pool/impl.py @@ -0,0 +1,581 @@ +# pool/impl.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + + +"""Pool implementation classes. + +""" +from __future__ import annotations + +import threading +import traceback +import typing +from typing import Any +from typing import cast +from typing import List +from typing import Optional +from typing import Set +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +import weakref + +from .base import _AsyncConnDialect +from .base import _ConnectionFairy +from .base import _ConnectionRecord +from .base import _CreatorFnType +from .base import _CreatorWRecFnType +from .base import ConnectionPoolEntry +from .base import Pool +from .base import PoolProxiedConnection +from .. import exc +from .. import util +from ..util import chop_traceback +from ..util import queue as sqla_queue +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from ..engine.interfaces import DBAPIConnection + + +class QueuePool(Pool): + """A :class:`_pool.Pool` + that imposes a limit on the number of open connections. + + :class:`.QueuePool` is the default pooling implementation used for + all :class:`_engine.Engine` objects other than SQLite with a ``:memory:`` + database. + + The :class:`.QueuePool` class **is not compatible** with asyncio and + :func:`_asyncio.create_async_engine`. The + :class:`.AsyncAdaptedQueuePool` class is used automatically when + using :func:`_asyncio.create_async_engine`, if no other kind of pool + is specified. + + .. seealso:: + + :class:`.AsyncAdaptedQueuePool` + + """ + + _is_asyncio = False # type: ignore[assignment] + + _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( + sqla_queue.Queue + ) + + _pool: sqla_queue.QueueCommon[ConnectionPoolEntry] + + def __init__( + self, + creator: Union[_CreatorFnType, _CreatorWRecFnType], + pool_size: int = 5, + max_overflow: int = 10, + timeout: float = 30.0, + use_lifo: bool = False, + **kw: Any, + ): + r""" + Construct a QueuePool. + + :param creator: a callable function that returns a DB-API + connection object, same as that of :paramref:`_pool.Pool.creator`. + + :param pool_size: The size of the pool to be maintained, + defaults to 5. This is the largest number of connections that + will be kept persistently in the pool. Note that the pool + begins with no connections; once this number of connections + is requested, that number of connections will remain. + ``pool_size`` can be set to 0 to indicate no size limit; to + disable pooling, use a :class:`~sqlalchemy.pool.NullPool` + instead. + + :param max_overflow: The maximum overflow size of the + pool. When the number of checked-out connections reaches the + size set in pool_size, additional connections will be + returned up to this limit. When those additional connections + are returned to the pool, they are disconnected and + discarded. It follows then that the total number of + simultaneous connections the pool will allow is pool_size + + `max_overflow`, and the total number of "sleeping" + connections the pool will allow is pool_size. `max_overflow` + can be set to -1 to indicate no overflow limit; no limit + will be placed on the total number of concurrent + connections. Defaults to 10. + + :param timeout: The number of seconds to wait before giving up + on returning a connection. Defaults to 30.0. This can be a float + but is subject to the limitations of Python time functions which + may not be reliable in the tens of milliseconds. + + :param use_lifo: use LIFO (last-in-first-out) when retrieving + connections instead of FIFO (first-in-first-out). Using LIFO, a + server-side timeout scheme can reduce the number of connections used + during non-peak periods of use. When planning for server-side + timeouts, ensure that a recycle or pre-ping strategy is in use to + gracefully handle stale connections. + + .. versionadded:: 1.3 + + .. seealso:: + + :ref:`pool_use_lifo` + + :ref:`pool_disconnects` + + :param \**kw: Other keyword arguments including + :paramref:`_pool.Pool.recycle`, :paramref:`_pool.Pool.echo`, + :paramref:`_pool.Pool.reset_on_return` and others are passed to the + :class:`_pool.Pool` constructor. + + """ + + Pool.__init__(self, creator, **kw) + self._pool = self._queue_class(pool_size, use_lifo=use_lifo) + self._overflow = 0 - pool_size + self._max_overflow = -1 if pool_size == 0 else max_overflow + self._timeout = timeout + self._overflow_lock = threading.Lock() + + def _do_return_conn(self, record: ConnectionPoolEntry) -> None: + try: + self._pool.put(record, False) + except sqla_queue.Full: + try: + record.close() + finally: + self._dec_overflow() + + def _do_get(self) -> ConnectionPoolEntry: + use_overflow = self._max_overflow > -1 + + wait = use_overflow and self._overflow >= self._max_overflow + try: + return self._pool.get(wait, self._timeout) + except sqla_queue.Empty: + # don't do things inside of "except Empty", because when we say + # we timed out or can't connect and raise, Python 3 tells + # people the real error is queue.Empty which it isn't. + pass + if use_overflow and self._overflow >= self._max_overflow: + if not wait: + return self._do_get() + else: + raise exc.TimeoutError( + "QueuePool limit of size %d overflow %d reached, " + "connection timed out, timeout %0.2f" + % (self.size(), self.overflow(), self._timeout), + code="3o7r", + ) + + if self._inc_overflow(): + try: + return self._create_connection() + except: + with util.safe_reraise(): + self._dec_overflow() + raise + else: + return self._do_get() + + def _inc_overflow(self) -> bool: + if self._max_overflow == -1: + self._overflow += 1 + return True + with self._overflow_lock: + if self._overflow < self._max_overflow: + self._overflow += 1 + return True + else: + return False + + def _dec_overflow(self) -> Literal[True]: + if self._max_overflow == -1: + self._overflow -= 1 + return True + with self._overflow_lock: + self._overflow -= 1 + return True + + def recreate(self) -> QueuePool: + self.logger.info("Pool recreating") + return self.__class__( + self._creator, + pool_size=self._pool.maxsize, + max_overflow=self._max_overflow, + pre_ping=self._pre_ping, + use_lifo=self._pool.use_lifo, + timeout=self._timeout, + recycle=self._recycle, + echo=self.echo, + logging_name=self._orig_logging_name, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + dialect=self._dialect, + ) + + def dispose(self) -> None: + while True: + try: + conn = self._pool.get(False) + conn.close() + except sqla_queue.Empty: + break + + self._overflow = 0 - self.size() + self.logger.info("Pool disposed. %s", self.status()) + + def status(self) -> str: + return ( + "Pool size: %d Connections in pool: %d " + "Current Overflow: %d Current Checked out " + "connections: %d" + % ( + self.size(), + self.checkedin(), + self.overflow(), + self.checkedout(), + ) + ) + + def size(self) -> int: + return self._pool.maxsize + + def timeout(self) -> float: + return self._timeout + + def checkedin(self) -> int: + return self._pool.qsize() + + def overflow(self) -> int: + return self._overflow if self._pool.maxsize else 0 + + def checkedout(self) -> int: + return self._pool.maxsize - self._pool.qsize() + self._overflow + + +class AsyncAdaptedQueuePool(QueuePool): + """An asyncio-compatible version of :class:`.QueuePool`. + + This pool is used by default when using :class:`.AsyncEngine` engines that + were generated from :func:`_asyncio.create_async_engine`. It uses an + asyncio-compatible queue implementation that does not use + ``threading.Lock``. + + The arguments and operation of :class:`.AsyncAdaptedQueuePool` are + otherwise identical to that of :class:`.QueuePool`. + + """ + + _is_asyncio = True # type: ignore[assignment] + _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( + sqla_queue.AsyncAdaptedQueue + ) + + _dialect = _AsyncConnDialect() + + +class FallbackAsyncAdaptedQueuePool(AsyncAdaptedQueuePool): + _queue_class = sqla_queue.FallbackAsyncAdaptedQueue + + +class NullPool(Pool): + """A Pool which does not pool connections. + + Instead it literally opens and closes the underlying DB-API connection + per each connection open/close. + + Reconnect-related functions such as ``recycle`` and connection + invalidation are not supported by this Pool implementation, since + no connections are held persistently. + + The :class:`.NullPool` class **is compatible** with asyncio and + :func:`_asyncio.create_async_engine`. + + """ + + def status(self) -> str: + return "NullPool" + + def _do_return_conn(self, record: ConnectionPoolEntry) -> None: + record.close() + + def _do_get(self) -> ConnectionPoolEntry: + return self._create_connection() + + def recreate(self) -> NullPool: + self.logger.info("Pool recreating") + + return self.__class__( + self._creator, + recycle=self._recycle, + echo=self.echo, + logging_name=self._orig_logging_name, + reset_on_return=self._reset_on_return, + pre_ping=self._pre_ping, + _dispatch=self.dispatch, + dialect=self._dialect, + ) + + def dispose(self) -> None: + pass + + +class SingletonThreadPool(Pool): + """A Pool that maintains one connection per thread. + + Maintains one connection per each thread, never moving a connection to a + thread other than the one which it was created in. + + .. warning:: the :class:`.SingletonThreadPool` will call ``.close()`` + on arbitrary connections that exist beyond the size setting of + ``pool_size``, e.g. if more unique **thread identities** + than what ``pool_size`` states are used. This cleanup is + non-deterministic and not sensitive to whether or not the connections + linked to those thread identities are currently in use. + + :class:`.SingletonThreadPool` may be improved in a future release, + however in its current status it is generally used only for test + scenarios using a SQLite ``:memory:`` database and is not recommended + for production use. + + The :class:`.SingletonThreadPool` class **is not compatible** with asyncio + and :func:`_asyncio.create_async_engine`. + + + Options are the same as those of :class:`_pool.Pool`, as well as: + + :param pool_size: The number of threads in which to maintain connections + at once. Defaults to five. + + :class:`.SingletonThreadPool` is used by the SQLite dialect + automatically when a memory-based database is used. + See :ref:`sqlite_toplevel`. + + """ + + _is_asyncio = False # type: ignore[assignment] + + def __init__( + self, + creator: Union[_CreatorFnType, _CreatorWRecFnType], + pool_size: int = 5, + **kw: Any, + ): + Pool.__init__(self, creator, **kw) + self._conn = threading.local() + self._fairy = threading.local() + self._all_conns: Set[ConnectionPoolEntry] = set() + self.size = pool_size + + def recreate(self) -> SingletonThreadPool: + self.logger.info("Pool recreating") + return self.__class__( + self._creator, + pool_size=self.size, + recycle=self._recycle, + echo=self.echo, + pre_ping=self._pre_ping, + logging_name=self._orig_logging_name, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + dialect=self._dialect, + ) + + def dispose(self) -> None: + """Dispose of this pool.""" + + for conn in self._all_conns: + try: + conn.close() + except Exception: + # pysqlite won't even let you close a conn from a thread + # that didn't create it + pass + + self._all_conns.clear() + + def _cleanup(self) -> None: + while len(self._all_conns) >= self.size: + c = self._all_conns.pop() + c.close() + + def status(self) -> str: + return "SingletonThreadPool id:%d size: %d" % ( + id(self), + len(self._all_conns), + ) + + def _do_return_conn(self, record: ConnectionPoolEntry) -> None: + try: + del self._fairy.current + except AttributeError: + pass + + def _do_get(self) -> ConnectionPoolEntry: + try: + if TYPE_CHECKING: + c = cast(ConnectionPoolEntry, self._conn.current()) + else: + c = self._conn.current() + if c: + return c + except AttributeError: + pass + c = self._create_connection() + self._conn.current = weakref.ref(c) + if len(self._all_conns) >= self.size: + self._cleanup() + self._all_conns.add(c) + return c + + def connect(self) -> PoolProxiedConnection: + # vendored from Pool to include the now removed use_threadlocal + # behavior + try: + rec = cast(_ConnectionFairy, self._fairy.current()) + except AttributeError: + pass + else: + if rec is not None: + return rec._checkout_existing() + + return _ConnectionFairy._checkout(self, self._fairy) + + +class StaticPool(Pool): + """A Pool of exactly one connection, used for all requests. + + Reconnect-related functions such as ``recycle`` and connection + invalidation (which is also used to support auto-reconnect) are only + partially supported right now and may not yield good results. + + The :class:`.StaticPool` class **is compatible** with asyncio and + :func:`_asyncio.create_async_engine`. + + """ + + @util.memoized_property + def connection(self) -> _ConnectionRecord: + return _ConnectionRecord(self) + + def status(self) -> str: + return "StaticPool" + + def dispose(self) -> None: + if ( + "connection" in self.__dict__ + and self.connection.dbapi_connection is not None + ): + self.connection.close() + del self.__dict__["connection"] + + def recreate(self) -> StaticPool: + self.logger.info("Pool recreating") + return self.__class__( + creator=self._creator, + recycle=self._recycle, + reset_on_return=self._reset_on_return, + pre_ping=self._pre_ping, + echo=self.echo, + logging_name=self._orig_logging_name, + _dispatch=self.dispatch, + dialect=self._dialect, + ) + + def _transfer_from(self, other_static_pool: StaticPool) -> None: + # used by the test suite to make a new engine / pool without + # losing the state of an existing SQLite :memory: connection + def creator(rec: ConnectionPoolEntry) -> DBAPIConnection: + conn = other_static_pool.connection.dbapi_connection + assert conn is not None + return conn + + self._invoke_creator = creator + + def _create_connection(self) -> ConnectionPoolEntry: + raise NotImplementedError() + + def _do_return_conn(self, record: ConnectionPoolEntry) -> None: + pass + + def _do_get(self) -> ConnectionPoolEntry: + rec = self.connection + if rec._is_hard_or_soft_invalidated(): + del self.__dict__["connection"] + rec = self.connection + + return rec + + +class AssertionPool(Pool): + """A :class:`_pool.Pool` that allows at most one checked out connection at + any given time. + + This will raise an exception if more than one connection is checked out + at a time. Useful for debugging code that is using more connections + than desired. + + The :class:`.AssertionPool` class **is compatible** with asyncio and + :func:`_asyncio.create_async_engine`. + + """ + + _conn: Optional[ConnectionPoolEntry] + _checkout_traceback: Optional[List[str]] + + def __init__(self, *args: Any, **kw: Any): + self._conn = None + self._checked_out = False + self._store_traceback = kw.pop("store_traceback", True) + self._checkout_traceback = None + Pool.__init__(self, *args, **kw) + + def status(self) -> str: + return "AssertionPool" + + def _do_return_conn(self, record: ConnectionPoolEntry) -> None: + if not self._checked_out: + raise AssertionError("connection is not checked out") + self._checked_out = False + assert record is self._conn + + def dispose(self) -> None: + self._checked_out = False + if self._conn: + self._conn.close() + + def recreate(self) -> AssertionPool: + self.logger.info("Pool recreating") + return self.__class__( + self._creator, + echo=self.echo, + pre_ping=self._pre_ping, + recycle=self._recycle, + reset_on_return=self._reset_on_return, + logging_name=self._orig_logging_name, + _dispatch=self.dispatch, + dialect=self._dialect, + ) + + def _do_get(self) -> ConnectionPoolEntry: + if self._checked_out: + if self._checkout_traceback: + suffix = " at:\n%s" % "".join( + chop_traceback(self._checkout_traceback) + ) + else: + suffix = "" + raise AssertionError("connection is already checked out" + suffix) + + if not self._conn: + self._conn = self._create_connection() + + self._checked_out = True + if self._store_traceback: + self._checkout_traceback = traceback.format_stack() + return self._conn diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/py.typed b/.venv/lib/python3.12/site-packages/sqlalchemy/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/schema.py b/.venv/lib/python3.12/site-packages/sqlalchemy/schema.py new file mode 100644 index 00000000..9edca4e5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/schema.py @@ -0,0 +1,70 @@ +# schema.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Compatibility namespace for sqlalchemy.sql.schema and related. + +""" + +from __future__ import annotations + +from .sql.base import SchemaVisitor as SchemaVisitor +from .sql.ddl import _CreateDropBase as _CreateDropBase +from .sql.ddl import _DropView as _DropView +from .sql.ddl import AddConstraint as AddConstraint +from .sql.ddl import BaseDDLElement as BaseDDLElement +from .sql.ddl import CreateColumn as CreateColumn +from .sql.ddl import CreateIndex as CreateIndex +from .sql.ddl import CreateSchema as CreateSchema +from .sql.ddl import CreateSequence as CreateSequence +from .sql.ddl import CreateTable as CreateTable +from .sql.ddl import DDL as DDL +from .sql.ddl import DDLElement as DDLElement +from .sql.ddl import DropColumnComment as DropColumnComment +from .sql.ddl import DropConstraint as DropConstraint +from .sql.ddl import DropConstraintComment as DropConstraintComment +from .sql.ddl import DropIndex as DropIndex +from .sql.ddl import DropSchema as DropSchema +from .sql.ddl import DropSequence as DropSequence +from .sql.ddl import DropTable as DropTable +from .sql.ddl import DropTableComment as DropTableComment +from .sql.ddl import ExecutableDDLElement as ExecutableDDLElement +from .sql.ddl import InvokeDDLBase as InvokeDDLBase +from .sql.ddl import SetColumnComment as SetColumnComment +from .sql.ddl import SetConstraintComment as SetConstraintComment +from .sql.ddl import SetTableComment as SetTableComment +from .sql.ddl import sort_tables as sort_tables +from .sql.ddl import ( + sort_tables_and_constraints as sort_tables_and_constraints, +) +from .sql.naming import conv as conv +from .sql.schema import _get_table_key as _get_table_key +from .sql.schema import BLANK_SCHEMA as BLANK_SCHEMA +from .sql.schema import CheckConstraint as CheckConstraint +from .sql.schema import Column as Column +from .sql.schema import ( + ColumnCollectionConstraint as ColumnCollectionConstraint, +) +from .sql.schema import ColumnCollectionMixin as ColumnCollectionMixin +from .sql.schema import ColumnDefault as ColumnDefault +from .sql.schema import Computed as Computed +from .sql.schema import Constraint as Constraint +from .sql.schema import DefaultClause as DefaultClause +from .sql.schema import DefaultGenerator as DefaultGenerator +from .sql.schema import FetchedValue as FetchedValue +from .sql.schema import ForeignKey as ForeignKey +from .sql.schema import ForeignKeyConstraint as ForeignKeyConstraint +from .sql.schema import HasConditionalDDL as HasConditionalDDL +from .sql.schema import Identity as Identity +from .sql.schema import Index as Index +from .sql.schema import insert_sentinel as insert_sentinel +from .sql.schema import MetaData as MetaData +from .sql.schema import PrimaryKeyConstraint as PrimaryKeyConstraint +from .sql.schema import SchemaConst as SchemaConst +from .sql.schema import SchemaItem as SchemaItem +from .sql.schema import Sequence as Sequence +from .sql.schema import Table as Table +from .sql.schema import UniqueConstraint as UniqueConstraint diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py new file mode 100644 index 00000000..9e0d2ca2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py @@ -0,0 +1,145 @@ +# sql/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from typing import Any +from typing import TYPE_CHECKING + +from ._typing import ColumnExpressionArgument as ColumnExpressionArgument +from ._typing import NotNullable as NotNullable +from ._typing import Nullable as Nullable +from .base import Executable as Executable +from .compiler import COLLECT_CARTESIAN_PRODUCTS as COLLECT_CARTESIAN_PRODUCTS +from .compiler import FROM_LINTING as FROM_LINTING +from .compiler import NO_LINTING as NO_LINTING +from .compiler import WARN_LINTING as WARN_LINTING +from .ddl import BaseDDLElement as BaseDDLElement +from .ddl import DDL as DDL +from .ddl import DDLElement as DDLElement +from .ddl import ExecutableDDLElement as ExecutableDDLElement +from .expression import Alias as Alias +from .expression import alias as alias +from .expression import all_ as all_ +from .expression import and_ as and_ +from .expression import any_ as any_ +from .expression import asc as asc +from .expression import between as between +from .expression import bindparam as bindparam +from .expression import case as case +from .expression import cast as cast +from .expression import ClauseElement as ClauseElement +from .expression import collate as collate +from .expression import column as column +from .expression import ColumnCollection as ColumnCollection +from .expression import ColumnElement as ColumnElement +from .expression import CompoundSelect as CompoundSelect +from .expression import cte as cte +from .expression import Delete as Delete +from .expression import delete as delete +from .expression import desc as desc +from .expression import distinct as distinct +from .expression import except_ as except_ +from .expression import except_all as except_all +from .expression import exists as exists +from .expression import extract as extract +from .expression import false as false +from .expression import False_ as False_ +from .expression import FromClause as FromClause +from .expression import func as func +from .expression import funcfilter as funcfilter +from .expression import Insert as Insert +from .expression import insert as insert +from .expression import intersect as intersect +from .expression import intersect_all as intersect_all +from .expression import Join as Join +from .expression import join as join +from .expression import label as label +from .expression import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT +from .expression import ( + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, +) +from .expression import LABEL_STYLE_NONE as LABEL_STYLE_NONE +from .expression import ( + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, +) +from .expression import lambda_stmt as lambda_stmt +from .expression import LambdaElement as LambdaElement +from .expression import lateral as lateral +from .expression import literal as literal +from .expression import literal_column as literal_column +from .expression import modifier as modifier +from .expression import not_ as not_ +from .expression import null as null +from .expression import nulls_first as nulls_first +from .expression import nulls_last as nulls_last +from .expression import nullsfirst as nullsfirst +from .expression import nullslast as nullslast +from .expression import or_ as or_ +from .expression import outerjoin as outerjoin +from .expression import outparam as outparam +from .expression import over as over +from .expression import quoted_name as quoted_name +from .expression import Select as Select +from .expression import select as select +from .expression import Selectable as Selectable +from .expression import SelectLabelStyle as SelectLabelStyle +from .expression import SQLColumnExpression as SQLColumnExpression +from .expression import StatementLambdaElement as StatementLambdaElement +from .expression import Subquery as Subquery +from .expression import table as table +from .expression import TableClause as TableClause +from .expression import TableSample as TableSample +from .expression import tablesample as tablesample +from .expression import text as text +from .expression import true as true +from .expression import True_ as True_ +from .expression import try_cast as try_cast +from .expression import tuple_ as tuple_ +from .expression import type_coerce as type_coerce +from .expression import union as union +from .expression import union_all as union_all +from .expression import Update as Update +from .expression import update as update +from .expression import Values as Values +from .expression import values as values +from .expression import within_group as within_group +from .visitors import ClauseVisitor as ClauseVisitor + + +def __go(lcls: Any) -> None: + from .. import util as _sa_util + + from . import base + from . import coercions + from . import elements + from . import lambdas + from . import selectable + from . import schema + from . import traversals + from . import type_api + + if not TYPE_CHECKING: + base.coercions = elements.coercions = coercions + base.elements = elements + base.type_api = type_api + coercions.elements = elements + coercions.lambdas = lambdas + coercions.schema = schema + coercions.selectable = selectable + + from .annotation import _prepare_annotations + from .annotation import Annotated + from .elements import AnnotatedColumnElement + from .elements import ClauseList + from .selectable import AnnotatedFromClause + + _prepare_annotations(ColumnElement, AnnotatedColumnElement) + _prepare_annotations(FromClause, AnnotatedFromClause) + _prepare_annotations(ClauseList, Annotated) + + _sa_util.preloaded.import_prefix("sqlalchemy.sql") + + +__go(locals()) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..21ae3e39 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc new file mode 100644 index 00000000..c6b09ca0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc new file mode 100644 index 00000000..7ef0f7b6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc new file mode 100644 index 00000000..c64538d2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc new file mode 100644 index 00000000..4b4ed5f2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc new file mode 100644 index 00000000..1b9d4c17 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc new file mode 100644 index 00000000..ec42b12e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc new file mode 100644 index 00000000..133931dc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..04c738c2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/base.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc new file mode 100644 index 00000000..4cabf7c3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc new file mode 100644 index 00000000..57b94cbb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc new file mode 100644 index 00000000..4c33e399 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-312.pyc new file mode 100644 index 00000000..3257af60 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc new file mode 100644 index 00000000..299e9c2c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc new file mode 100644 index 00000000..a6d9a39a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-312.pyc new file mode 100644 index 00000000..f56aced0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-312.pyc new file mode 100644 index 00000000..c9cc9582 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/events.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..de94f816 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/events.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-312.pyc new file mode 100644 index 00000000..81f53012 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-312.pyc new file mode 100644 index 00000000..93cf7aea Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc new file mode 100644 index 00000000..3fb34eef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-312.pyc new file mode 100644 index 00000000..9333eff2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-312.pyc new file mode 100644 index 00000000..2663b7aa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-312.pyc new file mode 100644 index 00000000..b4b6ed70 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-312.pyc new file mode 100644 index 00000000..eff60479 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc new file mode 100644 index 00000000..a5296c59 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc new file mode 100644 index 00000000..72762087 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc new file mode 100644 index 00000000..ef2f9cc4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc new file mode 100644 index 00000000..c93f275d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/util.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..c7007060 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/util.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc new file mode 100644 index 00000000..81c340bc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py new file mode 100644 index 00000000..a7ead521 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py @@ -0,0 +1,140 @@ +# sql/_dml_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from .dml import Delete +from .dml import Insert +from .dml import Update + +if TYPE_CHECKING: + from ._typing import _DMLTableArgument + + +def insert(table: _DMLTableArgument) -> Insert: + """Construct an :class:`_expression.Insert` object. + + E.g.:: + + from sqlalchemy import insert + + stmt = ( + insert(user_table). + values(name='username', fullname='Full Username') + ) + + Similar functionality is available via the + :meth:`_expression.TableClause.insert` method on + :class:`_schema.Table`. + + .. seealso:: + + :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial` + + + :param table: :class:`_expression.TableClause` + which is the subject of the + insert. + + :param values: collection of values to be inserted; see + :meth:`_expression.Insert.values` + for a description of allowed formats here. + Can be omitted entirely; a :class:`_expression.Insert` construct + will also dynamically render the VALUES clause at execution time + based on the parameters passed to :meth:`_engine.Connection.execute`. + + :param inline: if True, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. + + If both :paramref:`_expression.insert.values` and compile-time bind + parameters are present, the compile-time bind parameters override the + information specified within :paramref:`_expression.insert.values` on a + per-key basis. + + The keys within :paramref:`_expression.Insert.values` can be either + :class:`~sqlalchemy.schema.Column` objects or their string + identifiers. Each key may reference one of: + + * a literal data value (i.e. string, number, etc.); + * a Column object; + * a SELECT statement. + + If a ``SELECT`` statement is specified which references this + ``INSERT`` statement's table, the statement will be correlated + against the ``INSERT`` statement. + + .. seealso:: + + :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial` + + """ + return Insert(table) + + +def update(table: _DMLTableArgument) -> Update: + r"""Construct an :class:`_expression.Update` object. + + E.g.:: + + from sqlalchemy import update + + stmt = ( + update(user_table). + where(user_table.c.id == 5). + values(name='user #5') + ) + + Similar functionality is available via the + :meth:`_expression.TableClause.update` method on + :class:`_schema.Table`. + + :param table: A :class:`_schema.Table` + object representing the database + table to be updated. + + + .. seealso:: + + :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial` + + + """ + return Update(table) + + +def delete(table: _DMLTableArgument) -> Delete: + r"""Construct :class:`_expression.Delete` object. + + E.g.:: + + from sqlalchemy import delete + + stmt = ( + delete(user_table). + where(user_table.c.id == 5) + ) + + Similar functionality is available via the + :meth:`_expression.TableClause.delete` method on + :class:`_schema.Table`. + + :param table: The table to delete rows from. + + .. seealso:: + + :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial` + + + """ + return Delete(table) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py new file mode 100644 index 00000000..51d8ac39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py @@ -0,0 +1,1847 @@ +# sql/_elements_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple as typing_Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import roles +from .base import _NoArg +from .coercions import _document_text_coercion +from .elements import BindParameter +from .elements import BooleanClauseList +from .elements import Case +from .elements import Cast +from .elements import CollationClause +from .elements import CollectionAggregate +from .elements import ColumnClause +from .elements import ColumnElement +from .elements import Extract +from .elements import False_ +from .elements import FunctionFilter +from .elements import Label +from .elements import Null +from .elements import Over +from .elements import TextClause +from .elements import True_ +from .elements import TryCast +from .elements import Tuple +from .elements import TypeCoerce +from .elements import UnaryExpression +from .elements import WithinGroup +from .functions import FunctionElement +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from ._typing import _ByArgument + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnExpressionOrLiteralArgument + from ._typing import _ColumnExpressionOrStrLabelArgument + from ._typing import _TypeEngineArgument + from .elements import BinaryExpression + from .selectable import FromClause + from .type_api import TypeEngine + +_T = TypeVar("_T") + + +def all_(expr: _ColumnExpressionArgument[_T]) -> CollectionAggregate[bool]: + """Produce an ALL expression. + + For dialects such as that of PostgreSQL, this operator applies + to usage of the :class:`_types.ARRAY` datatype, for that of + MySQL, it may apply to a subquery. e.g.:: + + # renders on PostgreSQL: + # '5 = ALL (somearray)' + expr = 5 == all_(mytable.c.somearray) + + # renders on MySQL: + # '5 = ALL (SELECT value FROM table)' + expr = 5 == all_(select(table.c.value)) + + Comparison to NULL may work using ``None``:: + + None == all_(mytable.c.somearray) + + The any_() / all_() operators also feature a special "operand flipping" + behavior such that if any_() / all_() are used on the left side of a + comparison using a standalone operator such as ``==``, ``!=``, etc. + (not including operator methods such as + :meth:`_sql.ColumnOperators.is_`) the rendered expression is flipped:: + + # would render '5 = ALL (column)` + all_(mytable.c.column) == 5 + + Or with ``None``, which note will not perform + the usual step of rendering "IS" as is normally the case for NULL:: + + # would render 'NULL = ALL(somearray)' + all_(mytable.c.somearray) == None + + .. versionchanged:: 1.4.26 repaired the use of any_() / all_() + comparing to NULL on the right side to be flipped to the left. + + The column-level :meth:`_sql.ColumnElement.all_` method (not to be + confused with :class:`_types.ARRAY` level + :meth:`_types.ARRAY.Comparator.all`) is shorthand for + ``all_(col)``:: + + 5 == mytable.c.somearray.all_() + + .. seealso:: + + :meth:`_sql.ColumnOperators.all_` + + :func:`_expression.any_` + + """ + return CollectionAggregate._create_all(expr) + + +def and_( # type: ignore[empty-body] + initial_clause: Union[Literal[True], _ColumnExpressionArgument[bool]], + *clauses: _ColumnExpressionArgument[bool], +) -> ColumnElement[bool]: + r"""Produce a conjunction of expressions joined by ``AND``. + + E.g.:: + + from sqlalchemy import and_ + + stmt = select(users_table).where( + and_( + users_table.c.name == 'wendy', + users_table.c.enrolled == True + ) + ) + + The :func:`.and_` conjunction is also available using the + Python ``&`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') & + (users_table.c.enrolled == True) + ) + + The :func:`.and_` operation is also implicit in some cases; + the :meth:`_expression.Select.where` + method for example can be invoked multiple + times against a statement, which will have the effect of each + clause being combined using :func:`.and_`:: + + stmt = select(users_table).\ + where(users_table.c.name == 'wendy').\ + where(users_table.c.enrolled == True) + + The :func:`.and_` construct must be given at least one positional + argument in order to be valid; a :func:`.and_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.and_` expression, from a given list of expressions, + a "default" element of :func:`_sql.true` (or just ``True``) should be + specified:: + + from sqlalchemy import true + criteria = and_(true(), *expressions) + + The above expression will compile to SQL as the expression ``true`` + or ``1 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.true` value is + ignored as it does not affect the outcome of an AND expression that + has other elements. + + .. deprecated:: 1.4 The :func:`.and_` element now requires that at + least one argument is passed; creating the :func:`.and_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.or_` + + """ + ... + + +if not TYPE_CHECKING: + # handle deprecated case which allows zero-arguments + def and_(*clauses): # noqa: F811 + r"""Produce a conjunction of expressions joined by ``AND``. + + E.g.:: + + from sqlalchemy import and_ + + stmt = select(users_table).where( + and_( + users_table.c.name == 'wendy', + users_table.c.enrolled == True + ) + ) + + The :func:`.and_` conjunction is also available using the + Python ``&`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') & + (users_table.c.enrolled == True) + ) + + The :func:`.and_` operation is also implicit in some cases; + the :meth:`_expression.Select.where` + method for example can be invoked multiple + times against a statement, which will have the effect of each + clause being combined using :func:`.and_`:: + + stmt = select(users_table).\ + where(users_table.c.name == 'wendy').\ + where(users_table.c.enrolled == True) + + The :func:`.and_` construct must be given at least one positional + argument in order to be valid; a :func:`.and_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.and_` expression, from a given list of expressions, + a "default" element of :func:`_sql.true` (or just ``True``) should be + specified:: + + from sqlalchemy import true + criteria = and_(true(), *expressions) + + The above expression will compile to SQL as the expression ``true`` + or ``1 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.true` value + is ignored as it does not affect the outcome of an AND expression that + has other elements. + + .. deprecated:: 1.4 The :func:`.and_` element now requires that at + least one argument is passed; creating the :func:`.and_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.or_` + + """ + return BooleanClauseList.and_(*clauses) + + +def any_(expr: _ColumnExpressionArgument[_T]) -> CollectionAggregate[bool]: + """Produce an ANY expression. + + For dialects such as that of PostgreSQL, this operator applies + to usage of the :class:`_types.ARRAY` datatype, for that of + MySQL, it may apply to a subquery. e.g.:: + + # renders on PostgreSQL: + # '5 = ANY (somearray)' + expr = 5 == any_(mytable.c.somearray) + + # renders on MySQL: + # '5 = ANY (SELECT value FROM table)' + expr = 5 == any_(select(table.c.value)) + + Comparison to NULL may work using ``None`` or :func:`_sql.null`:: + + None == any_(mytable.c.somearray) + + The any_() / all_() operators also feature a special "operand flipping" + behavior such that if any_() / all_() are used on the left side of a + comparison using a standalone operator such as ``==``, ``!=``, etc. + (not including operator methods such as + :meth:`_sql.ColumnOperators.is_`) the rendered expression is flipped:: + + # would render '5 = ANY (column)` + any_(mytable.c.column) == 5 + + Or with ``None``, which note will not perform + the usual step of rendering "IS" as is normally the case for NULL:: + + # would render 'NULL = ANY(somearray)' + any_(mytable.c.somearray) == None + + .. versionchanged:: 1.4.26 repaired the use of any_() / all_() + comparing to NULL on the right side to be flipped to the left. + + The column-level :meth:`_sql.ColumnElement.any_` method (not to be + confused with :class:`_types.ARRAY` level + :meth:`_types.ARRAY.Comparator.any`) is shorthand for + ``any_(col)``:: + + 5 = mytable.c.somearray.any_() + + .. seealso:: + + :meth:`_sql.ColumnOperators.any_` + + :func:`_expression.all_` + + """ + return CollectionAggregate._create_any(expr) + + +def asc( + column: _ColumnExpressionOrStrLabelArgument[_T], +) -> UnaryExpression[_T]: + """Produce an ascending ``ORDER BY`` clause element. + + e.g.:: + + from sqlalchemy import asc + stmt = select(users_table).order_by(asc(users_table.c.name)) + + will produce SQL as:: + + SELECT id, name FROM user ORDER BY name ASC + + The :func:`.asc` function is a standalone version of the + :meth:`_expression.ColumnElement.asc` + method available on all SQL expressions, + e.g.:: + + + stmt = select(users_table).order_by(users_table.c.name.asc()) + + :param column: A :class:`_expression.ColumnElement` (e.g. + scalar SQL expression) + with which to apply the :func:`.asc` operation. + + .. seealso:: + + :func:`.desc` + + :func:`.nulls_first` + + :func:`.nulls_last` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_asc(column) + + +def collate( + expression: _ColumnExpressionArgument[str], collation: str +) -> BinaryExpression[str]: + """Return the clause ``expression COLLATE collation``. + + e.g.:: + + collate(mycolumn, 'utf8_bin') + + produces:: + + mycolumn COLLATE utf8_bin + + The collation expression is also quoted if it is a case sensitive + identifier, e.g. contains uppercase characters. + + .. versionchanged:: 1.2 quoting is automatically applied to COLLATE + expressions if they are case sensitive. + + """ + return CollationClause._create_collation_expression(expression, collation) + + +def between( + expr: _ColumnExpressionOrLiteralArgument[_T], + lower_bound: Any, + upper_bound: Any, + symmetric: bool = False, +) -> BinaryExpression[bool]: + """Produce a ``BETWEEN`` predicate clause. + + E.g.:: + + from sqlalchemy import between + stmt = select(users_table).where(between(users_table.c.id, 5, 7)) + + Would produce SQL resembling:: + + SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2 + + The :func:`.between` function is a standalone version of the + :meth:`_expression.ColumnElement.between` method available on all + SQL expressions, as in:: + + stmt = select(users_table).where(users_table.c.id.between(5, 7)) + + All arguments passed to :func:`.between`, including the left side + column expression, are coerced from Python scalar values if a + the value is not a :class:`_expression.ColumnElement` subclass. + For example, + three fixed values can be compared as in:: + + print(between(5, 3, 7)) + + Which would produce:: + + :param_1 BETWEEN :param_2 AND :param_3 + + :param expr: a column expression, typically a + :class:`_expression.ColumnElement` + instance or alternatively a Python scalar expression to be coerced + into a column expression, serving as the left side of the ``BETWEEN`` + expression. + + :param lower_bound: a column or Python scalar expression serving as the + lower bound of the right side of the ``BETWEEN`` expression. + + :param upper_bound: a column or Python scalar expression serving as the + upper bound of the right side of the ``BETWEEN`` expression. + + :param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note + that not all databases support this syntax. + + .. seealso:: + + :meth:`_expression.ColumnElement.between` + + """ + col_expr = coercions.expect(roles.ExpressionElementRole, expr) + return col_expr.between(lower_bound, upper_bound, symmetric=symmetric) + + +def outparam( + key: str, type_: Optional[TypeEngine[_T]] = None +) -> BindParameter[_T]: + """Create an 'OUT' parameter for usage in functions (stored procedures), + for databases which support them. + + The ``outparam`` can be used like a regular function parameter. + The "output" value will be available from the + :class:`~sqlalchemy.engine.CursorResult` object via its ``out_parameters`` + attribute, which returns a dictionary containing the values. + + """ + return BindParameter(key, None, type_=type_, unique=False, isoutparam=True) + + +@overload +def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: ... + + +@overload +def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: ... + + +def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: + """Return a negation of the given clause, i.e. ``NOT(clause)``. + + The ``~`` operator is also overloaded on all + :class:`_expression.ColumnElement` subclasses to produce the + same result. + + """ + + return coercions.expect(roles.ExpressionElementRole, clause).__invert__() + + +def bindparam( + key: Optional[str], + value: Any = _NoArg.NO_ARG, + type_: Optional[_TypeEngineArgument[_T]] = None, + unique: bool = False, + required: Union[bool, Literal[_NoArg.NO_ARG]] = _NoArg.NO_ARG, + quote: Optional[bool] = None, + callable_: Optional[Callable[[], Any]] = None, + expanding: bool = False, + isoutparam: bool = False, + literal_execute: bool = False, +) -> BindParameter[_T]: + r"""Produce a "bound expression". + + The return value is an instance of :class:`.BindParameter`; this + is a :class:`_expression.ColumnElement` + subclass which represents a so-called + "placeholder" value in a SQL expression, the value of which is + supplied at the point at which the statement in executed against a + database connection. + + In SQLAlchemy, the :func:`.bindparam` construct has + the ability to carry along the actual value that will be ultimately + used at expression time. In this way, it serves not just as + a "placeholder" for eventual population, but also as a means of + representing so-called "unsafe" values which should not be rendered + directly in a SQL statement, but rather should be passed along + to the :term:`DBAPI` as values which need to be correctly escaped + and potentially handled for type-safety. + + When using :func:`.bindparam` explicitly, the use case is typically + one of traditional deferment of parameters; the :func:`.bindparam` + construct accepts a name which can then be referred to at execution + time:: + + from sqlalchemy import bindparam + + stmt = select(users_table).where( + users_table.c.name == bindparam("username") + ) + + The above statement, when rendered, will produce SQL similar to:: + + SELECT id, name FROM user WHERE name = :username + + In order to populate the value of ``:username`` above, the value + would typically be applied at execution time to a method + like :meth:`_engine.Connection.execute`:: + + result = connection.execute(stmt, {"username": "wendy"}) + + Explicit use of :func:`.bindparam` is also common when producing + UPDATE or DELETE statements that are to be invoked multiple times, + where the WHERE criterion of the statement is to change on each + invocation, such as:: + + stmt = ( + users_table.update() + .where(user_table.c.name == bindparam("username")) + .values(fullname=bindparam("fullname")) + ) + + connection.execute( + stmt, + [ + {"username": "wendy", "fullname": "Wendy Smith"}, + {"username": "jack", "fullname": "Jack Jones"}, + ], + ) + + SQLAlchemy's Core expression system makes wide use of + :func:`.bindparam` in an implicit sense. It is typical that Python + literal values passed to virtually all SQL expression functions are + coerced into fixed :func:`.bindparam` constructs. For example, given + a comparison operation such as:: + + expr = users_table.c.name == 'Wendy' + + The above expression will produce a :class:`.BinaryExpression` + construct, where the left side is the :class:`_schema.Column` object + representing the ``name`` column, and the right side is a + :class:`.BindParameter` representing the literal value:: + + print(repr(expr.right)) + BindParameter('%(4327771088 name)s', 'Wendy', type_=String()) + + The expression above will render SQL such as:: + + user.name = :name_1 + + Where the ``:name_1`` parameter name is an anonymous name. The + actual string ``Wendy`` is not in the rendered string, but is carried + along where it is later used within statement execution. If we + invoke a statement like the following:: + + stmt = select(users_table).where(users_table.c.name == 'Wendy') + result = connection.execute(stmt) + + We would see SQL logging output as:: + + SELECT "user".id, "user".name + FROM "user" + WHERE "user".name = %(name_1)s + {'name_1': 'Wendy'} + + Above, we see that ``Wendy`` is passed as a parameter to the database, + while the placeholder ``:name_1`` is rendered in the appropriate form + for the target database, in this case the PostgreSQL database. + + Similarly, :func:`.bindparam` is invoked automatically when working + with :term:`CRUD` statements as far as the "VALUES" portion is + concerned. The :func:`_expression.insert` construct produces an + ``INSERT`` expression which will, at statement execution time, generate + bound placeholders based on the arguments passed, as in:: + + stmt = users_table.insert() + result = connection.execute(stmt, {"name": "Wendy"}) + + The above will produce SQL output as:: + + INSERT INTO "user" (name) VALUES (%(name)s) + {'name': 'Wendy'} + + The :class:`_expression.Insert` construct, at + compilation/execution time, rendered a single :func:`.bindparam` + mirroring the column name ``name`` as a result of the single ``name`` + parameter we passed to the :meth:`_engine.Connection.execute` method. + + :param key: + the key (e.g. the name) for this bind param. + Will be used in the generated + SQL statement for dialects that use named parameters. This + value may be modified when part of a compilation operation, + if other :class:`BindParameter` objects exist with the same + key, or if its length is too long and truncation is + required. + + If omitted, an "anonymous" name is generated for the bound parameter; + when given a value to bind, the end result is equivalent to calling upon + the :func:`.literal` function with a value to bind, particularly + if the :paramref:`.bindparam.unique` parameter is also provided. + + :param value: + Initial value for this bind param. Will be used at statement + execution time as the value for this parameter passed to the + DBAPI, if no other value is indicated to the statement execution + method for this particular parameter name. Defaults to ``None``. + + :param callable\_: + A callable function that takes the place of "value". The function + will be called at statement execution time to determine the + ultimate value. Used for scenarios where the actual bind + value cannot be determined at the point at which the clause + construct is created, but embedded bind values are still desirable. + + :param type\_: + A :class:`.TypeEngine` class or instance representing an optional + datatype for this :func:`.bindparam`. If not passed, a type + may be determined automatically for the bind, based on the given + value; for example, trivial Python types such as ``str``, + ``int``, ``bool`` + may result in the :class:`.String`, :class:`.Integer` or + :class:`.Boolean` types being automatically selected. + + The type of a :func:`.bindparam` is significant especially in that + the type will apply pre-processing to the value before it is + passed to the database. For example, a :func:`.bindparam` which + refers to a datetime value, and is specified as holding the + :class:`.DateTime` type, may apply conversion needed to the + value (such as stringification on SQLite) before passing the value + to the database. + + :param unique: + if True, the key name of this :class:`.BindParameter` will be + modified if another :class:`.BindParameter` of the same name + already has been located within the containing + expression. This flag is used generally by the internals + when producing so-called "anonymous" bound expressions, it + isn't generally applicable to explicitly-named :func:`.bindparam` + constructs. + + :param required: + If ``True``, a value is required at execution time. If not passed, + it defaults to ``True`` if neither :paramref:`.bindparam.value` + or :paramref:`.bindparam.callable` were passed. If either of these + parameters are present, then :paramref:`.bindparam.required` + defaults to ``False``. + + :param quote: + True if this parameter name requires quoting and is not + currently known as a SQLAlchemy reserved word; this currently + only applies to the Oracle backend, where bound names must + sometimes be quoted. + + :param isoutparam: + if True, the parameter should be treated like a stored procedure + "OUT" parameter. This applies to backends such as Oracle which + support OUT parameters. + + :param expanding: + if True, this parameter will be treated as an "expanding" parameter + at execution time; the parameter value is expected to be a sequence, + rather than a scalar value, and the string SQL statement will + be transformed on a per-execution basis to accommodate the sequence + with a variable number of parameter slots passed to the DBAPI. + This is to allow statement caching to be used in conjunction with + an IN clause. + + .. seealso:: + + :meth:`.ColumnOperators.in_` + + :ref:`baked_in` - with baked queries + + .. note:: The "expanding" feature does not support "executemany"- + style parameter sets. + + .. versionadded:: 1.2 + + .. versionchanged:: 1.3 the "expanding" bound parameter feature now + supports empty lists. + + :param literal_execute: + if True, the bound parameter will be rendered in the compile phase + with a special "POSTCOMPILE" token, and the SQLAlchemy compiler will + render the final value of the parameter into the SQL statement at + statement execution time, omitting the value from the parameter + dictionary / list passed to DBAPI ``cursor.execute()``. This + produces a similar effect as that of using the ``literal_binds``, + compilation flag, however takes place as the statement is sent to + the DBAPI ``cursor.execute()`` method, rather than when the statement + is compiled. The primary use of this + capability is for rendering LIMIT / OFFSET clauses for database + drivers that can't accommodate for bound parameters in these + contexts, while allowing SQL constructs to be cacheable at the + compilation level. + + .. versionadded:: 1.4 Added "post compile" bound parameters + + .. seealso:: + + :ref:`change_4808`. + + .. seealso:: + + :ref:`tutorial_sending_parameters` - in the + :ref:`unified_tutorial` + + + """ + return BindParameter( + key, + value, + type_, + unique, + required, + quote, + callable_, + expanding, + isoutparam, + literal_execute, + ) + + +def case( + *whens: Union[ + typing_Tuple[_ColumnExpressionArgument[bool], Any], Mapping[Any, Any] + ], + value: Optional[Any] = None, + else_: Optional[Any] = None, +) -> Case[Any]: + r"""Produce a ``CASE`` expression. + + The ``CASE`` construct in SQL is a conditional object that + acts somewhat analogously to an "if/then" construct in other + languages. It returns an instance of :class:`.Case`. + + :func:`.case` in its usual form is passed a series of "when" + constructs, that is, a list of conditions and results as tuples:: + + from sqlalchemy import case + + stmt = select(users_table).\ + where( + case( + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J'), + else_='E' + ) + ) + + The above statement will produce SQL resembling:: + + SELECT id, name FROM user + WHERE CASE + WHEN (name = :name_1) THEN :param_1 + WHEN (name = :name_2) THEN :param_2 + ELSE :param_3 + END + + When simple equality expressions of several values against a single + parent column are needed, :func:`.case` also has a "shorthand" format + used via the + :paramref:`.case.value` parameter, which is passed a column + expression to be compared. In this form, the :paramref:`.case.whens` + parameter is passed as a dictionary containing expressions to be + compared against keyed to result expressions. The statement below is + equivalent to the preceding statement:: + + stmt = select(users_table).\ + where( + case( + {"wendy": "W", "jack": "J"}, + value=users_table.c.name, + else_='E' + ) + ) + + The values which are accepted as result values in + :paramref:`.case.whens` as well as with :paramref:`.case.else_` are + coerced from Python literals into :func:`.bindparam` constructs. + SQL expressions, e.g. :class:`_expression.ColumnElement` constructs, + are accepted + as well. To coerce a literal string expression into a constant + expression rendered inline, use the :func:`_expression.literal_column` + construct, + as in:: + + from sqlalchemy import case, literal_column + + case( + ( + orderline.c.qty > 100, + literal_column("'greaterthan100'") + ), + ( + orderline.c.qty > 10, + literal_column("'greaterthan10'") + ), + else_=literal_column("'lessthan10'") + ) + + The above will render the given constants without using bound + parameters for the result values (but still for the comparison + values), as in:: + + CASE + WHEN (orderline.qty > :qty_1) THEN 'greaterthan100' + WHEN (orderline.qty > :qty_2) THEN 'greaterthan10' + ELSE 'lessthan10' + END + + :param \*whens: The criteria to be compared against, + :paramref:`.case.whens` accepts two different forms, based on + whether or not :paramref:`.case.value` is used. + + .. versionchanged:: 1.4 the :func:`_sql.case` + function now accepts the series of WHEN conditions positionally + + In the first form, it accepts multiple 2-tuples passed as positional + arguments; each 2-tuple consists of ``(, )``, + where the SQL expression is a boolean expression and "value" is a + resulting value, e.g.:: + + case( + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J') + ) + + In the second form, it accepts a Python dictionary of comparison + values mapped to a resulting value; this form requires + :paramref:`.case.value` to be present, and values will be compared + using the ``==`` operator, e.g.:: + + case( + {"wendy": "W", "jack": "J"}, + value=users_table.c.name + ) + + :param value: An optional SQL expression which will be used as a + fixed "comparison point" for candidate values within a dictionary + passed to :paramref:`.case.whens`. + + :param else\_: An optional SQL expression which will be the evaluated + result of the ``CASE`` construct if all expressions within + :paramref:`.case.whens` evaluate to false. When omitted, most + databases will produce a result of NULL if none of the "when" + expressions evaluate to true. + + + """ + return Case(*whens, value=value, else_=else_) + + +def cast( + expression: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T], +) -> Cast[_T]: + r"""Produce a ``CAST`` expression. + + :func:`.cast` returns an instance of :class:`.Cast`. + + E.g.:: + + from sqlalchemy import cast, Numeric + + stmt = select(cast(product_table.c.unit_price, Numeric(10, 4))) + + The above statement will produce SQL resembling:: + + SELECT CAST(unit_price AS NUMERIC(10, 4)) FROM product + + The :func:`.cast` function performs two distinct functions when + used. The first is that it renders the ``CAST`` expression within + the resulting SQL string. The second is that it associates the given + type (e.g. :class:`.TypeEngine` class or instance) with the column + expression on the Python side, which means the expression will take + on the expression operator behavior associated with that type, + as well as the bound-value handling and result-row-handling behavior + of the type. + + An alternative to :func:`.cast` is the :func:`.type_coerce` function. + This function performs the second task of associating an expression + with a specific type, but does not render the ``CAST`` expression + in SQL. + + :param expression: A SQL expression, such as a + :class:`_expression.ColumnElement` + expression or a Python string which will be coerced into a bound + literal value. + + :param type\_: A :class:`.TypeEngine` class or instance indicating + the type to which the ``CAST`` should apply. + + .. seealso:: + + :ref:`tutorial_casts` + + :func:`.try_cast` - an alternative to CAST that results in + NULLs when the cast fails, instead of raising an error. + Only supported by some dialects. + + :func:`.type_coerce` - an alternative to CAST that coerces the type + on the Python side only, which is often sufficient to generate the + correct SQL and data coercion. + + + """ + return Cast(expression, type_) + + +def try_cast( + expression: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T], +) -> TryCast[_T]: + """Produce a ``TRY_CAST`` expression for backends which support it; + this is a ``CAST`` which returns NULL for un-castable conversions. + + In SQLAlchemy, this construct is supported **only** by the SQL Server + dialect, and will raise a :class:`.CompileError` if used on other + included backends. However, third party backends may also support + this construct. + + .. tip:: As :func:`_sql.try_cast` originates from the SQL Server dialect, + it's importable both from ``sqlalchemy.`` as well as from + ``sqlalchemy.dialects.mssql``. + + :func:`_sql.try_cast` returns an instance of :class:`.TryCast` and + generally behaves similarly to the :class:`.Cast` construct; + at the SQL level, the difference between ``CAST`` and ``TRY_CAST`` + is that ``TRY_CAST`` returns NULL for an un-castable expression, + such as attempting to cast a string ``"hi"`` to an integer value. + + E.g.:: + + from sqlalchemy import select, try_cast, Numeric + + stmt = select( + try_cast(product_table.c.unit_price, Numeric(10, 4)) + ) + + The above would render on Microsoft SQL Server as:: + + SELECT TRY_CAST (product_table.unit_price AS NUMERIC(10, 4)) + FROM product_table + + .. versionadded:: 2.0.14 :func:`.try_cast` has been + generalized from the SQL Server dialect into a general use + construct that may be supported by additional dialects. + + """ + return TryCast(expression, type_) + + +def column( + text: str, + type_: Optional[_TypeEngineArgument[_T]] = None, + is_literal: bool = False, + _selectable: Optional[FromClause] = None, +) -> ColumnClause[_T]: + """Produce a :class:`.ColumnClause` object. + + The :class:`.ColumnClause` is a lightweight analogue to the + :class:`_schema.Column` class. The :func:`_expression.column` + function can + be invoked with just a name alone, as in:: + + from sqlalchemy import column + + id, name = column("id"), column("name") + stmt = select(id, name).select_from("user") + + The above statement would produce SQL like:: + + SELECT id, name FROM user + + Once constructed, :func:`_expression.column` + may be used like any other SQL + expression element such as within :func:`_expression.select` + constructs:: + + from sqlalchemy.sql import column + + id, name = column("id"), column("name") + stmt = select(id, name).select_from("user") + + The text handled by :func:`_expression.column` + is assumed to be handled + like the name of a database column; if the string contains mixed case, + special characters, or matches a known reserved word on the target + backend, the column expression will render using the quoting + behavior determined by the backend. To produce a textual SQL + expression that is rendered exactly without any quoting, + use :func:`_expression.literal_column` instead, + or pass ``True`` as the + value of :paramref:`_expression.column.is_literal`. Additionally, + full SQL + statements are best handled using the :func:`_expression.text` + construct. + + :func:`_expression.column` can be used in a table-like + fashion by combining it with the :func:`.table` function + (which is the lightweight analogue to :class:`_schema.Table` + ) to produce + a working table construct with minimal boilerplate:: + + from sqlalchemy import table, column, select + + user = table("user", + column("id"), + column("name"), + column("description"), + ) + + stmt = select(user.c.description).where(user.c.name == 'wendy') + + A :func:`_expression.column` / :func:`.table` + construct like that illustrated + above can be created in an + ad-hoc fashion and is not associated with any + :class:`_schema.MetaData`, DDL, or events, unlike its + :class:`_schema.Table` counterpart. + + :param text: the text of the element. + + :param type: :class:`_types.TypeEngine` object which can associate + this :class:`.ColumnClause` with a type. + + :param is_literal: if True, the :class:`.ColumnClause` is assumed to + be an exact expression that will be delivered to the output with no + quoting rules applied regardless of case sensitive settings. the + :func:`_expression.literal_column()` function essentially invokes + :func:`_expression.column` while passing ``is_literal=True``. + + .. seealso:: + + :class:`_schema.Column` + + :func:`_expression.literal_column` + + :func:`.table` + + :func:`_expression.text` + + :ref:`tutorial_select_arbitrary_text` + + """ + return ColumnClause(text, type_, is_literal, _selectable) + + +def desc( + column: _ColumnExpressionOrStrLabelArgument[_T], +) -> UnaryExpression[_T]: + """Produce a descending ``ORDER BY`` clause element. + + e.g.:: + + from sqlalchemy import desc + + stmt = select(users_table).order_by(desc(users_table.c.name)) + + will produce SQL as:: + + SELECT id, name FROM user ORDER BY name DESC + + The :func:`.desc` function is a standalone version of the + :meth:`_expression.ColumnElement.desc` + method available on all SQL expressions, + e.g.:: + + + stmt = select(users_table).order_by(users_table.c.name.desc()) + + :param column: A :class:`_expression.ColumnElement` (e.g. + scalar SQL expression) + with which to apply the :func:`.desc` operation. + + .. seealso:: + + :func:`.asc` + + :func:`.nulls_first` + + :func:`.nulls_last` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_desc(column) + + +def distinct(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce an column-expression-level unary ``DISTINCT`` clause. + + This applies the ``DISTINCT`` keyword to an **individual column + expression** (e.g. not the whole statement), and renders **specifically + in that column position**; this is used for containment within + an aggregate function, as in:: + + from sqlalchemy import distinct, func + stmt = select(users_table.c.id, func.count(distinct(users_table.c.name))) + + The above would produce an statement resembling:: + + SELECT user.id, count(DISTINCT user.name) FROM user + + .. tip:: The :func:`_sql.distinct` function does **not** apply DISTINCT + to the full SELECT statement, instead applying a DISTINCT modifier + to **individual column expressions**. For general ``SELECT DISTINCT`` + support, use the + :meth:`_sql.Select.distinct` method on :class:`_sql.Select`. + + The :func:`.distinct` function is also available as a column-level + method, e.g. :meth:`_expression.ColumnElement.distinct`, as in:: + + stmt = select(func.count(users_table.c.name.distinct())) + + The :func:`.distinct` operator is different from the + :meth:`_expression.Select.distinct` method of + :class:`_expression.Select`, + which produces a ``SELECT`` statement + with ``DISTINCT`` applied to the result set as a whole, + e.g. a ``SELECT DISTINCT`` expression. See that method for further + information. + + .. seealso:: + + :meth:`_expression.ColumnElement.distinct` + + :meth:`_expression.Select.distinct` + + :data:`.func` + + """ # noqa: E501 + return UnaryExpression._create_distinct(expr) + + +def bitwise_not(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce a unary bitwise NOT clause, typically via the ``~`` operator. + + Not to be confused with boolean negation :func:`_sql.not_`. + + .. versionadded:: 2.0.2 + + .. seealso:: + + :ref:`operators_bitwise` + + + """ + + return UnaryExpression._create_bitwise_not(expr) + + +def extract(field: str, expr: _ColumnExpressionArgument[Any]) -> Extract: + """Return a :class:`.Extract` construct. + + This is typically available as :func:`.extract` + as well as ``func.extract`` from the + :data:`.func` namespace. + + :param field: The field to extract. + + :param expr: A column or Python scalar expression serving as the + right side of the ``EXTRACT`` expression. + + E.g.:: + + from sqlalchemy import extract + from sqlalchemy import table, column + + logged_table = table("user", + column("id"), + column("date_created"), + ) + + stmt = select(logged_table.c.id).where( + extract("YEAR", logged_table.c.date_created) == 2021 + ) + + In the above example, the statement is used to select ids from the + database where the ``YEAR`` component matches a specific value. + + Similarly, one can also select an extracted component:: + + stmt = select( + extract("YEAR", logged_table.c.date_created) + ).where(logged_table.c.id == 1) + + The implementation of ``EXTRACT`` may vary across database backends. + Users are reminded to consult their database documentation. + """ + return Extract(field, expr) + + +def false() -> False_: + """Return a :class:`.False_` construct. + + E.g.: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import false + >>> print(select(t.c.x).where(false())) + {printsql}SELECT x FROM t WHERE false + + A backend which does not support true/false constants will render as + an expression against 1 or 0: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(false())) + {printsql}SELECT x FROM t WHERE 0 = 1 + + The :func:`.true` and :func:`.false` constants also feature + "short circuit" operation within an :func:`.and_` or :func:`.or_` + conjunction: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(or_(t.c.x > 5, true()))) + {printsql}SELECT x FROM t WHERE true{stop} + + >>> print(select(t.c.x).where(and_(t.c.x > 5, false()))) + {printsql}SELECT x FROM t WHERE false{stop} + + .. seealso:: + + :func:`.true` + + """ + + return False_._instance() + + +def funcfilter( + func: FunctionElement[_T], *criterion: _ColumnExpressionArgument[bool] +) -> FunctionFilter[_T]: + """Produce a :class:`.FunctionFilter` object against a function. + + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. + + E.g.:: + + from sqlalchemy import funcfilter + funcfilter(func.count(1), MyClass.name == 'some name') + + Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.filter` method. + + .. seealso:: + + :ref:`tutorial_functions_within_group` - in the + :ref:`unified_tutorial` + + :meth:`.FunctionElement.filter` + + """ + return FunctionFilter(func, *criterion) + + +def label( + name: str, + element: _ColumnExpressionArgument[_T], + type_: Optional[_TypeEngineArgument[_T]] = None, +) -> Label[_T]: + """Return a :class:`Label` object for the + given :class:`_expression.ColumnElement`. + + A label changes the name of an element in the columns clause of a + ``SELECT`` statement, typically via the ``AS`` SQL keyword. + + This functionality is more conveniently available via the + :meth:`_expression.ColumnElement.label` method on + :class:`_expression.ColumnElement`. + + :param name: label name + + :param obj: a :class:`_expression.ColumnElement`. + + """ + return Label(name, element, type_) + + +def null() -> Null: + """Return a constant :class:`.Null` construct.""" + + return Null._instance() + + +def nulls_first(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce the ``NULLS FIRST`` modifier for an ``ORDER BY`` expression. + + :func:`.nulls_first` is intended to modify the expression produced + by :func:`.asc` or :func:`.desc`, and indicates how NULL values + should be handled when they are encountered during ordering:: + + + from sqlalchemy import desc, nulls_first + + stmt = select(users_table).order_by( + nulls_first(desc(users_table.c.name))) + + The SQL expression from the above would resemble:: + + SELECT id, name FROM user ORDER BY name DESC NULLS FIRST + + Like :func:`.asc` and :func:`.desc`, :func:`.nulls_first` is typically + invoked from the column expression itself using + :meth:`_expression.ColumnElement.nulls_first`, + rather than as its standalone + function version, as in:: + + stmt = select(users_table).order_by( + users_table.c.name.desc().nulls_first()) + + .. versionchanged:: 1.4 :func:`.nulls_first` is renamed from + :func:`.nullsfirst` in previous releases. + The previous name remains available for backwards compatibility. + + .. seealso:: + + :func:`.asc` + + :func:`.desc` + + :func:`.nulls_last` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_nulls_first(column) + + +def nulls_last(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce the ``NULLS LAST`` modifier for an ``ORDER BY`` expression. + + :func:`.nulls_last` is intended to modify the expression produced + by :func:`.asc` or :func:`.desc`, and indicates how NULL values + should be handled when they are encountered during ordering:: + + + from sqlalchemy import desc, nulls_last + + stmt = select(users_table).order_by( + nulls_last(desc(users_table.c.name))) + + The SQL expression from the above would resemble:: + + SELECT id, name FROM user ORDER BY name DESC NULLS LAST + + Like :func:`.asc` and :func:`.desc`, :func:`.nulls_last` is typically + invoked from the column expression itself using + :meth:`_expression.ColumnElement.nulls_last`, + rather than as its standalone + function version, as in:: + + stmt = select(users_table).order_by( + users_table.c.name.desc().nulls_last()) + + .. versionchanged:: 1.4 :func:`.nulls_last` is renamed from + :func:`.nullslast` in previous releases. + The previous name remains available for backwards compatibility. + + .. seealso:: + + :func:`.asc` + + :func:`.desc` + + :func:`.nulls_first` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_nulls_last(column) + + +def or_( # type: ignore[empty-body] + initial_clause: Union[Literal[False], _ColumnExpressionArgument[bool]], + *clauses: _ColumnExpressionArgument[bool], +) -> ColumnElement[bool]: + """Produce a conjunction of expressions joined by ``OR``. + + E.g.:: + + from sqlalchemy import or_ + + stmt = select(users_table).where( + or_( + users_table.c.name == 'wendy', + users_table.c.name == 'jack' + ) + ) + + The :func:`.or_` conjunction is also available using the + Python ``|`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') | + (users_table.c.name == 'jack') + ) + + The :func:`.or_` construct must be given at least one positional + argument in order to be valid; a :func:`.or_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.or_` expression, from a given list of expressions, + a "default" element of :func:`_sql.false` (or just ``False``) should be + specified:: + + from sqlalchemy import false + or_criteria = or_(false(), *expressions) + + The above expression will compile to SQL as the expression ``false`` + or ``0 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.false` value is + ignored as it does not affect the outcome of an OR expression which + has other elements. + + .. deprecated:: 1.4 The :func:`.or_` element now requires that at + least one argument is passed; creating the :func:`.or_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.and_` + + """ + ... + + +if not TYPE_CHECKING: + # handle deprecated case which allows zero-arguments + def or_(*clauses): # noqa: F811 + """Produce a conjunction of expressions joined by ``OR``. + + E.g.:: + + from sqlalchemy import or_ + + stmt = select(users_table).where( + or_( + users_table.c.name == 'wendy', + users_table.c.name == 'jack' + ) + ) + + The :func:`.or_` conjunction is also available using the + Python ``|`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') | + (users_table.c.name == 'jack') + ) + + The :func:`.or_` construct must be given at least one positional + argument in order to be valid; a :func:`.or_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.or_` expression, from a given list of expressions, + a "default" element of :func:`_sql.false` (or just ``False``) should be + specified:: + + from sqlalchemy import false + or_criteria = or_(false(), *expressions) + + The above expression will compile to SQL as the expression ``false`` + or ``0 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.false` value + is ignored as it does not affect the outcome of an OR expression which + has other elements. + + .. deprecated:: 1.4 The :func:`.or_` element now requires that at + least one argument is passed; creating the :func:`.or_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.and_` + + """ + return BooleanClauseList.or_(*clauses) + + +def over( + element: FunctionElement[_T], + partition_by: Optional[_ByArgument] = None, + order_by: Optional[_ByArgument] = None, + range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, +) -> Over[_T]: + r"""Produce an :class:`.Over` object against a function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + :func:`_expression.over` is usually called using + the :meth:`.FunctionElement.over` method, e.g.:: + + func.row_number().over(order_by=mytable.c.some_column) + + Would produce:: + + ROW_NUMBER() OVER(ORDER BY some_column) + + Ranges are also possible using the :paramref:`.expression.over.range_` + and :paramref:`.expression.over.rows` parameters. These + mutually-exclusive parameters each accept a 2-tuple, which contains + a combination of integers and None:: + + func.row_number().over( + order_by=my_table.c.some_column, range_=(None, 0)) + + The above would produce:: + + ROW_NUMBER() OVER(ORDER BY some_column + RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) + + A value of ``None`` indicates "unbounded", a + value of zero indicates "current row", and negative / positive + integers indicate "preceding" and "following": + + * RANGE BETWEEN 5 PRECEDING AND 10 FOLLOWING:: + + func.row_number().over(order_by='x', range_=(-5, 10)) + + * ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW:: + + func.row_number().over(order_by='x', rows=(None, 0)) + + * RANGE BETWEEN 2 PRECEDING AND UNBOUNDED FOLLOWING:: + + func.row_number().over(order_by='x', range_=(-2, None)) + + * RANGE BETWEEN 1 FOLLOWING AND 3 FOLLOWING:: + + func.row_number().over(order_by='x', range_=(1, 3)) + + :param element: a :class:`.FunctionElement`, :class:`.WithinGroup`, + or other compatible construct. + :param partition_by: a column element or string, or a list + of such, that will be used as the PARTITION BY clause + of the OVER construct. + :param order_by: a column element or string, or a list + of such, that will be used as the ORDER BY clause + of the OVER construct. + :param range\_: optional range clause for the window. This is a + tuple value which can contain integer values or ``None``, + and will render a RANGE BETWEEN PRECEDING / FOLLOWING clause. + + :param rows: optional rows clause for the window. This is a tuple + value which can contain integer values or None, and will render + a ROWS BETWEEN PRECEDING / FOLLOWING clause. + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.over` method. + + .. seealso:: + + :ref:`tutorial_window_functions` - in the :ref:`unified_tutorial` + + :data:`.expression.func` + + :func:`_expression.within_group` + + """ + return Over(element, partition_by, order_by, range_, rows) + + +@_document_text_coercion("text", ":func:`.text`", ":paramref:`.text.text`") +def text(text: str) -> TextClause: + r"""Construct a new :class:`_expression.TextClause` clause, + representing + a textual SQL string directly. + + E.g.:: + + from sqlalchemy import text + + t = text("SELECT * FROM users") + result = connection.execute(t) + + The advantages :func:`_expression.text` + provides over a plain string are + backend-neutral support for bind parameters, per-statement + execution options, as well as + bind parameter and result-column typing behavior, allowing + SQLAlchemy type constructs to play a role when executing + a statement that is specified literally. The construct can also + be provided with a ``.c`` collection of column elements, allowing + it to be embedded in other SQL expression constructs as a subquery. + + Bind parameters are specified by name, using the format ``:name``. + E.g.:: + + t = text("SELECT * FROM users WHERE id=:user_id") + result = connection.execute(t, {"user_id": 12}) + + For SQL statements where a colon is required verbatim, as within + an inline string, use a backslash to escape:: + + t = text(r"SELECT * FROM users WHERE name='\:username'") + + The :class:`_expression.TextClause` + construct includes methods which can + provide information about the bound parameters as well as the column + values which would be returned from the textual statement, assuming + it's an executable SELECT type of statement. The + :meth:`_expression.TextClause.bindparams` + method is used to provide bound + parameter detail, and :meth:`_expression.TextClause.columns` + method allows + specification of return columns including names and types:: + + t = text("SELECT * FROM users WHERE id=:user_id").\ + bindparams(user_id=7).\ + columns(id=Integer, name=String) + + for id, name in connection.execute(t): + print(id, name) + + The :func:`_expression.text` construct is used in cases when + a literal string SQL fragment is specified as part of a larger query, + such as for the WHERE clause of a SELECT statement:: + + s = select(users.c.id, users.c.name).where(text("id=:user_id")) + result = connection.execute(s, {"user_id": 12}) + + :func:`_expression.text` is also used for the construction + of a full, standalone statement using plain text. + As such, SQLAlchemy refers + to it as an :class:`.Executable` object and may be used + like any other statement passed to an ``.execute()`` method. + + :param text: + the text of the SQL statement to be created. Use ``:`` + to specify bind parameters; they will be compiled to their + engine-specific format. + + .. seealso:: + + :ref:`tutorial_select_arbitrary_text` + + """ + return TextClause(text) + + +def true() -> True_: + """Return a constant :class:`.True_` construct. + + E.g.: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import true + >>> print(select(t.c.x).where(true())) + {printsql}SELECT x FROM t WHERE true + + A backend which does not support true/false constants will render as + an expression against 1 or 0: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(true())) + {printsql}SELECT x FROM t WHERE 1 = 1 + + The :func:`.true` and :func:`.false` constants also feature + "short circuit" operation within an :func:`.and_` or :func:`.or_` + conjunction: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(or_(t.c.x > 5, true()))) + {printsql}SELECT x FROM t WHERE true{stop} + + >>> print(select(t.c.x).where(and_(t.c.x > 5, false()))) + {printsql}SELECT x FROM t WHERE false{stop} + + .. seealso:: + + :func:`.false` + + """ + + return True_._instance() + + +def tuple_( + *clauses: _ColumnExpressionArgument[Any], + types: Optional[Sequence[_TypeEngineArgument[Any]]] = None, +) -> Tuple: + """Return a :class:`.Tuple`. + + Main usage is to produce a composite IN construct using + :meth:`.ColumnOperators.in_` :: + + from sqlalchemy import tuple_ + + tuple_(table.c.col1, table.c.col2).in_( + [(1, 2), (5, 12), (10, 19)] + ) + + .. versionchanged:: 1.3.6 Added support for SQLite IN tuples. + + .. warning:: + + The composite IN construct is not supported by all backends, and is + currently known to work on PostgreSQL, MySQL, and SQLite. + Unsupported backends will raise a subclass of + :class:`~sqlalchemy.exc.DBAPIError` when such an expression is + invoked. + + """ + return Tuple(*clauses, types=types) + + +def type_coerce( + expression: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T], +) -> TypeCoerce[_T]: + r"""Associate a SQL expression with a particular type, without rendering + ``CAST``. + + E.g.:: + + from sqlalchemy import type_coerce + + stmt = select(type_coerce(log_table.date_string, StringDateTime())) + + The above construct will produce a :class:`.TypeCoerce` object, which + does not modify the rendering in any way on the SQL side, with the + possible exception of a generated label if used in a columns clause + context: + + .. sourcecode:: sql + + SELECT date_string AS date_string FROM log + + When result rows are fetched, the ``StringDateTime`` type processor + will be applied to result rows on behalf of the ``date_string`` column. + + .. note:: the :func:`.type_coerce` construct does not render any + SQL syntax of its own, including that it does not imply + parenthesization. Please use :meth:`.TypeCoerce.self_group` + if explicit parenthesization is required. + + In order to provide a named label for the expression, use + :meth:`_expression.ColumnElement.label`:: + + stmt = select( + type_coerce(log_table.date_string, StringDateTime()).label('date') + ) + + + A type that features bound-value handling will also have that behavior + take effect when literal values or :func:`.bindparam` constructs are + passed to :func:`.type_coerce` as targets. + For example, if a type implements the + :meth:`.TypeEngine.bind_expression` + method or :meth:`.TypeEngine.bind_processor` method or equivalent, + these functions will take effect at statement compilation/execution + time when a literal value is passed, as in:: + + # bound-value handling of MyStringType will be applied to the + # literal value "some string" + stmt = select(type_coerce("some string", MyStringType)) + + When using :func:`.type_coerce` with composed expressions, note that + **parenthesis are not applied**. If :func:`.type_coerce` is being + used in an operator context where the parenthesis normally present from + CAST are necessary, use the :meth:`.TypeCoerce.self_group` method: + + .. sourcecode:: pycon+sql + + >>> some_integer = column("someint", Integer) + >>> some_string = column("somestr", String) + >>> expr = type_coerce(some_integer + 5, String) + some_string + >>> print(expr) + {printsql}someint + :someint_1 || somestr{stop} + >>> expr = type_coerce(some_integer + 5, String).self_group() + some_string + >>> print(expr) + {printsql}(someint + :someint_1) || somestr{stop} + + :param expression: A SQL expression, such as a + :class:`_expression.ColumnElement` + expression or a Python string which will be coerced into a bound + literal value. + + :param type\_: A :class:`.TypeEngine` class or instance indicating + the type to which the expression is coerced. + + .. seealso:: + + :ref:`tutorial_casts` + + :func:`.cast` + + """ # noqa + return TypeCoerce(expression, type_) + + +def within_group( + element: FunctionElement[_T], *order_by: _ColumnExpressionArgument[Any] +) -> WithinGroup[_T]: + r"""Produce a :class:`.WithinGroup` object against a function. + + Used against so-called "ordered set aggregate" and "hypothetical + set aggregate" functions, including :class:`.percentile_cont`, + :class:`.rank`, :class:`.dense_rank`, etc. + + :func:`_expression.within_group` is usually called using + the :meth:`.FunctionElement.within_group` method, e.g.:: + + from sqlalchemy import within_group + stmt = select( + department.c.id, + func.percentile_cont(0.5).within_group( + department.c.salary.desc() + ) + ) + + The above statement would produce SQL similar to + ``SELECT department.id, percentile_cont(0.5) + WITHIN GROUP (ORDER BY department.salary DESC)``. + + :param element: a :class:`.FunctionElement` construct, typically + generated by :data:`~.expression.func`. + :param \*order_by: one or more column elements that will be used + as the ORDER BY clause of the WITHIN GROUP construct. + + .. seealso:: + + :ref:`tutorial_functions_within_group` - in the + :ref:`unified_tutorial` + + :data:`.expression.func` + + :func:`_expression.over` + + """ + return WithinGroup(element, *order_by) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py new file mode 100644 index 00000000..bccb533c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py @@ -0,0 +1,20 @@ +# sql/_orm_types.py +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""ORM types that need to present specifically for **documentation only** of +the Executable.execution_options() method, which includes options that +are meaningful to the ORM. + +""" + + +from __future__ import annotations + +from ..util.typing import Literal + +SynchronizeSessionArgument = Literal[False, "auto", "evaluate", "fetch"] +DMLStrategyArgument = Literal["bulk", "raw", "orm", "auto"] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py new file mode 100644 index 00000000..df372bf5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py @@ -0,0 +1,75 @@ +# sql/_py_util.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import typing +from typing import Any +from typing import Dict +from typing import Tuple +from typing import Union + +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from .cache_key import CacheConst + + +class prefix_anon_map(Dict[str, str]): + """A map that creates new keys for missing key access. + + Considers keys of the form " " to produce + new symbols "_", where "index" is an incrementing integer + corresponding to . + + Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which + is otherwise usually used for this type of operation. + + """ + + def __missing__(self, key: str) -> str: + (ident, derived) = key.split(" ", 1) + anonymous_counter = self.get(derived, 1) + self[derived] = anonymous_counter + 1 # type: ignore + value = f"{derived}_{anonymous_counter}" + self[key] = value + return value + + +class cache_anon_map( + Dict[Union[int, "Literal[CacheConst.NO_CACHE]"], Union[Literal[True], str]] +): + """A map that creates new keys for missing key access. + + Produces an incrementing sequence given a series of unique keys. + + This is similar to the compiler prefix_anon_map class although simpler. + + Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which + is otherwise usually used for this type of operation. + + """ + + _index = 0 + + def get_anon(self, object_: Any) -> Tuple[str, bool]: + idself = id(object_) + if idself in self: + s_val = self[idself] + assert s_val is not True + return s_val, True + else: + # inline of __missing__ + self[idself] = id_ = str(self._index) + self._index += 1 + + return id_, False + + def __missing__(self, key: int) -> str: + self[key] = val = str(self._index) + self._index += 1 + return val diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py new file mode 100644 index 00000000..c2b5008c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py @@ -0,0 +1,635 @@ +# sql/_selectable_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Any +from typing import Optional +from typing import overload +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import roles +from ._typing import _ColumnsClauseArgument +from ._typing import _no_kw +from .elements import ColumnClause +from .selectable import Alias +from .selectable import CompoundSelect +from .selectable import Exists +from .selectable import FromClause +from .selectable import Join +from .selectable import Lateral +from .selectable import LateralFromClause +from .selectable import NamedFromClause +from .selectable import Select +from .selectable import TableClause +from .selectable import TableSample +from .selectable import Values + +if TYPE_CHECKING: + from ._typing import _FromClauseArgument + from ._typing import _OnClauseArgument + from ._typing import _SelectStatementForCompoundArgument + from ._typing import _T0 + from ._typing import _T1 + from ._typing import _T2 + from ._typing import _T3 + from ._typing import _T4 + from ._typing import _T5 + from ._typing import _T6 + from ._typing import _T7 + from ._typing import _T8 + from ._typing import _T9 + from ._typing import _TypedColumnClauseArgument as _TCCA + from .functions import Function + from .selectable import CTE + from .selectable import HasCTE + from .selectable import ScalarSelect + from .selectable import SelectBase + + +_T = TypeVar("_T", bound=Any) + + +def alias( + selectable: FromClause, name: Optional[str] = None, flat: bool = False +) -> NamedFromClause: + """Return a named alias of the given :class:`.FromClause`. + + For :class:`.Table` and :class:`.Join` objects, the return type is the + :class:`_expression.Alias` object. Other kinds of :class:`.NamedFromClause` + objects may be returned for other kinds of :class:`.FromClause` objects. + + The named alias represents any :class:`_expression.FromClause` with an + alternate name assigned within SQL, typically using the ``AS`` clause when + generated, e.g. ``SELECT * FROM table AS aliasname``. + + Equivalent functionality is available via the + :meth:`_expression.FromClause.alias` + method available on all :class:`_expression.FromClause` objects. + + :param selectable: any :class:`_expression.FromClause` subclass, + such as a table, select statement, etc. + + :param name: string name to be assigned as the alias. + If ``None``, a name will be deterministically generated at compile + time. Deterministic means the name is guaranteed to be unique against + other constructs used in the same statement, and will also be the same + name for each successive compilation of the same statement object. + + :param flat: Will be passed through to if the given selectable + is an instance of :class:`_expression.Join` - see + :meth:`_expression.Join.alias` for details. + + """ + return Alias._factory(selectable, name=name, flat=flat) + + +def cte( + selectable: HasCTE, name: Optional[str] = None, recursive: bool = False +) -> CTE: + r"""Return a new :class:`_expression.CTE`, + or Common Table Expression instance. + + Please see :meth:`_expression.HasCTE.cte` for detail on CTE usage. + + """ + return coercions.expect(roles.HasCTERole, selectable).cte( + name=name, recursive=recursive + ) + + +def except_( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``EXCEPT`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_except(*selects) + + +def except_all( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``EXCEPT ALL`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_except_all(*selects) + + +def exists( + __argument: Optional[ + Union[_ColumnsClauseArgument[Any], SelectBase, ScalarSelect[Any]] + ] = None, +) -> Exists: + """Construct a new :class:`_expression.Exists` construct. + + The :func:`_sql.exists` can be invoked by itself to produce an + :class:`_sql.Exists` construct, which will accept simple WHERE + criteria:: + + exists_criteria = exists().where(table1.c.col1 == table2.c.col2) + + However, for greater flexibility in constructing the SELECT, an + existing :class:`_sql.Select` construct may be converted to an + :class:`_sql.Exists`, most conveniently by making use of the + :meth:`_sql.SelectBase.exists` method:: + + exists_criteria = ( + select(table2.c.col2). + where(table1.c.col1 == table2.c.col2). + exists() + ) + + The EXISTS criteria is then used inside of an enclosing SELECT:: + + stmt = select(table1.c.col1).where(exists_criteria) + + The above statement will then be of the form:: + + SELECT col1 FROM table1 WHERE EXISTS + (SELECT table2.col2 FROM table2 WHERE table2.col2 = table1.col1) + + .. seealso:: + + :ref:`tutorial_exists` - in the :term:`2.0 style` tutorial. + + :meth:`_sql.SelectBase.exists` - method to transform a ``SELECT`` to an + ``EXISTS`` clause. + + """ # noqa: E501 + + return Exists(__argument) + + +def intersect( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``INTERSECT`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_intersect(*selects) + + +def intersect_all( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``INTERSECT ALL`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + + """ + return CompoundSelect._create_intersect_all(*selects) + + +def join( + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + isouter: bool = False, + full: bool = False, +) -> Join: + """Produce a :class:`_expression.Join` object, given two + :class:`_expression.FromClause` + expressions. + + E.g.:: + + j = join(user_table, address_table, + user_table.c.id == address_table.c.user_id) + stmt = select(user_table).select_from(j) + + would emit SQL along the lines of:: + + SELECT user.id, user.name FROM user + JOIN address ON user.id = address.user_id + + Similar functionality is available given any + :class:`_expression.FromClause` object (e.g. such as a + :class:`_schema.Table`) using + the :meth:`_expression.FromClause.join` method. + + :param left: The left side of the join. + + :param right: the right side of the join; this is any + :class:`_expression.FromClause` object such as a + :class:`_schema.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. + + :param onclause: a SQL expression representing the ON clause of the + join. If left at ``None``, :meth:`_expression.FromClause.join` + will attempt to + join the two tables based on a foreign key relationship. + + :param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN. + + :param full: if True, render a FULL OUTER JOIN, instead of JOIN. + + .. seealso:: + + :meth:`_expression.FromClause.join` - method form, + based on a given left side. + + :class:`_expression.Join` - the type of object produced. + + """ + + return Join(left, right, onclause, isouter, full) + + +def lateral( + selectable: Union[SelectBase, _FromClauseArgument], + name: Optional[str] = None, +) -> LateralFromClause: + """Return a :class:`_expression.Lateral` object. + + :class:`_expression.Lateral` is an :class:`_expression.Alias` + subclass that represents + a subquery with the LATERAL keyword applied to it. + + The special behavior of a LATERAL subquery is that it appears in the + FROM clause of an enclosing SELECT, but may correlate to other + FROM clauses of that SELECT. It is a special case of subquery + only supported by a small number of backends, currently more recent + PostgreSQL versions. + + .. seealso:: + + :ref:`tutorial_lateral_correlation` - overview of usage. + + """ + return Lateral._factory(selectable, name=name) + + +def outerjoin( + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + full: bool = False, +) -> Join: + """Return an ``OUTER JOIN`` clause element. + + The returned object is an instance of :class:`_expression.Join`. + + Similar functionality is also available via the + :meth:`_expression.FromClause.outerjoin` method on any + :class:`_expression.FromClause`. + + :param left: The left side of the join. + + :param right: The right side of the join. + + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between + left and right otherwise. + + To chain joins together, use the :meth:`_expression.FromClause.join` + or + :meth:`_expression.FromClause.outerjoin` methods on the resulting + :class:`_expression.Join` object. + + """ + return Join(left, right, onclause, isouter=True, full=full) + + +# START OVERLOADED FUNCTIONS select Select 1-10 + +# code within this block is **programmatically, +# statically generated** by tools/generate_tuple_map_overloads.py + + +@overload +def select(__ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] +) -> Select[Tuple[_T0, _T1]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] +) -> Select[Tuple[_T0, _T1, _T2]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], +) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + __ent8: _TCCA[_T8], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + __ent8: _TCCA[_T8], + __ent9: _TCCA[_T9], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: ... + + +# END OVERLOADED FUNCTIONS select + + +@overload +def select( + *entities: _ColumnsClauseArgument[Any], **__kw: Any +) -> Select[Any]: ... + + +def select(*entities: _ColumnsClauseArgument[Any], **__kw: Any) -> Select[Any]: + r"""Construct a new :class:`_expression.Select`. + + + .. versionadded:: 1.4 - The :func:`_sql.select` function now accepts + column arguments positionally. The top-level :func:`_sql.select` + function will automatically use the 1.x or 2.x style API based on + the incoming arguments; using :func:`_sql.select` from the + ``sqlalchemy.future`` module will enforce that only the 2.x style + constructor is used. + + Similar functionality is also available via the + :meth:`_expression.FromClause.select` method on any + :class:`_expression.FromClause`. + + .. seealso:: + + :ref:`tutorial_selecting_data` - in the :ref:`unified_tutorial` + + :param \*entities: + Entities to SELECT from. For Core usage, this is typically a series + of :class:`_expression.ColumnElement` and / or + :class:`_expression.FromClause` + objects which will form the columns clause of the resulting + statement. For those objects that are instances of + :class:`_expression.FromClause` (typically :class:`_schema.Table` + or :class:`_expression.Alias` + objects), the :attr:`_expression.FromClause.c` + collection is extracted + to form a collection of :class:`_expression.ColumnElement` objects. + + This parameter will also accept :class:`_expression.TextClause` + constructs as + given, as well as ORM-mapped classes. + + """ + # the keyword args are a necessary element in order for the typing + # to work out w/ the varargs vs. having named "keyword" arguments that + # aren't always present. + if __kw: + raise _no_kw() + return Select(*entities) + + +def table(name: str, *columns: ColumnClause[Any], **kw: Any) -> TableClause: + """Produce a new :class:`_expression.TableClause`. + + The object returned is an instance of + :class:`_expression.TableClause`, which + represents the "syntactical" portion of the schema-level + :class:`_schema.Table` object. + It may be used to construct lightweight table constructs. + + :param name: Name of the table. + + :param columns: A collection of :func:`_expression.column` constructs. + + :param schema: The schema name for this table. + + .. versionadded:: 1.3.18 :func:`_expression.table` can now + accept a ``schema`` argument. + """ + + return TableClause(name, *columns, **kw) + + +def tablesample( + selectable: _FromClauseArgument, + sampling: Union[float, Function[Any]], + name: Optional[str] = None, + seed: Optional[roles.ExpressionElementRole[Any]] = None, +) -> TableSample: + """Return a :class:`_expression.TableSample` object. + + :class:`_expression.TableSample` is an :class:`_expression.Alias` + subclass that represents + a table with the TABLESAMPLE clause applied to it. + :func:`_expression.tablesample` + is also available from the :class:`_expression.FromClause` + class via the + :meth:`_expression.FromClause.tablesample` method. + + The TABLESAMPLE clause allows selecting a randomly selected approximate + percentage of rows from a table. It supports multiple sampling methods, + most commonly BERNOULLI and SYSTEM. + + e.g.:: + + from sqlalchemy import func + + selectable = people.tablesample( + func.bernoulli(1), + name='alias', + seed=func.random()) + stmt = select(selectable.c.people_id) + + Assuming ``people`` with a column ``people_id``, the above + statement would render as:: + + SELECT alias.people_id FROM + people AS alias TABLESAMPLE bernoulli(:bernoulli_1) + REPEATABLE (random()) + + :param sampling: a ``float`` percentage between 0 and 100 or + :class:`_functions.Function`. + + :param name: optional alias name + + :param seed: any real-valued SQL expression. When specified, the + REPEATABLE sub-clause is also rendered. + + """ + return TableSample._factory(selectable, sampling, name=name, seed=seed) + + +def union( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return a ``UNION`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + A similar :func:`union()` method is available on all + :class:`_expression.FromClause` subclasses. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + :param \**kwargs: + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect._create_union(*selects) + + +def union_all( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return a ``UNION ALL`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + A similar :func:`union_all()` method is available on all + :class:`_expression.FromClause` subclasses. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_union_all(*selects) + + +def values( + *columns: ColumnClause[Any], + name: Optional[str] = None, + literal_binds: bool = False, +) -> Values: + r"""Construct a :class:`_expression.Values` construct. + + The column expressions and the actual data for + :class:`_expression.Values` are given in two separate steps. The + constructor receives the column expressions typically as + :func:`_expression.column` constructs, + and the data is then passed via the + :meth:`_expression.Values.data` method as a list, + which can be called multiple + times to add more data, e.g.:: + + from sqlalchemy import column + from sqlalchemy import values + + value_expr = values( + column('id', Integer), + column('name', String), + name="my_values" + ).data( + [(1, 'name1'), (2, 'name2'), (3, 'name3')] + ) + + :param \*columns: column expressions, typically composed using + :func:`_expression.column` objects. + + :param name: the name for this VALUES construct. If omitted, the + VALUES construct will be unnamed in a SQL expression. Different + backends may have different requirements here. + + :param literal_binds: Defaults to False. Whether or not to render + the data values inline in the SQL output, rather than using bound + parameters. + + """ + return Values(*columns, literal_binds=literal_binds, name=name) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py new file mode 100644 index 00000000..73ed8899 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py @@ -0,0 +1,460 @@ +# sql/_typing.py +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import operator +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import roles +from .. import exc +from .. import util +from ..inspection import Inspectable +from ..util.typing import Literal +from ..util.typing import Protocol +from ..util.typing import TypeAlias + +if TYPE_CHECKING: + from datetime import date + from datetime import datetime + from datetime import time + from datetime import timedelta + from decimal import Decimal + from uuid import UUID + + from .base import Executable + from .compiler import Compiled + from .compiler import DDLCompiler + from .compiler import SQLCompiler + from .dml import UpdateBase + from .dml import ValuesBase + from .elements import ClauseElement + from .elements import ColumnElement + from .elements import KeyedColumnElement + from .elements import quoted_name + from .elements import SQLCoreOperations + from .elements import TextClause + from .lambdas import LambdaElement + from .roles import FromClauseRole + from .schema import Column + from .selectable import Alias + from .selectable import CTE + from .selectable import FromClause + from .selectable import Join + from .selectable import NamedFromClause + from .selectable import ReturnsRows + from .selectable import Select + from .selectable import Selectable + from .selectable import SelectBase + from .selectable import Subquery + from .selectable import TableClause + from .sqltypes import TableValueType + from .sqltypes import TupleType + from .type_api import TypeEngine + from ..engine import Dialect + from ..util.typing import TypeGuard + +_T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + + +_CE = TypeVar("_CE", bound="ColumnElement[Any]") + +_CLE = TypeVar("_CLE", bound="ClauseElement") + + +class _HasClauseElement(Protocol, Generic[_T_co]): + """indicates a class that has a __clause_element__() method""" + + def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: ... + + +class _CoreAdapterProto(Protocol): + """protocol for the ClauseAdapter/ColumnAdapter.traverse() method.""" + + def __call__(self, obj: _CE) -> _CE: ... + + +class _HasDialect(Protocol): + """protocol for Engine/Connection-like objects that have dialect + attribute. + """ + + @property + def dialect(self) -> Dialect: ... + + +# match column types that are not ORM entities +_NOT_ENTITY = TypeVar( + "_NOT_ENTITY", + int, + str, + bool, + "datetime", + "date", + "time", + "timedelta", + "UUID", + float, + "Decimal", +) + +_StarOrOne = Literal["*", 1] + +_MAYBE_ENTITY = TypeVar( + "_MAYBE_ENTITY", + roles.ColumnsClauseRole, + _StarOrOne, + Type[Any], + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], +) + + +# convention: +# XYZArgument - something that the end user is passing to a public API method +# XYZElement - the internal representation that we use for the thing. +# the coercions system is responsible for converting from XYZArgument to +# XYZElement. + +_TextCoercedExpressionArgument = Union[ + str, + "TextClause", + "ColumnElement[_T]", + _HasClauseElement[_T], + roles.ExpressionElementRole[_T], +] + +_ColumnsClauseArgument = Union[ + roles.TypedColumnsClauseRole[_T], + roles.ColumnsClauseRole, + "SQLCoreOperations[_T]", + _StarOrOne, + Type[_T], + Inspectable[_HasClauseElement[_T]], + _HasClauseElement[_T], +] +"""open-ended SELECT columns clause argument. + +Includes column expressions, tables, ORM mapped entities, a few literal values. + +This type is used for lists of columns / entities to be returned in result +sets; select(...), insert().returning(...), etc. + + +""" + +_TypedColumnClauseArgument = Union[ + roles.TypedColumnsClauseRole[_T], + "SQLCoreOperations[_T]", + Type[_T], +] + +_TP = TypeVar("_TP", bound=Tuple[Any, ...]) + +_T0 = TypeVar("_T0", bound=Any) +_T1 = TypeVar("_T1", bound=Any) +_T2 = TypeVar("_T2", bound=Any) +_T3 = TypeVar("_T3", bound=Any) +_T4 = TypeVar("_T4", bound=Any) +_T5 = TypeVar("_T5", bound=Any) +_T6 = TypeVar("_T6", bound=Any) +_T7 = TypeVar("_T7", bound=Any) +_T8 = TypeVar("_T8", bound=Any) +_T9 = TypeVar("_T9", bound=Any) + + +_ColumnExpressionArgument = Union[ + "ColumnElement[_T]", + _HasClauseElement[_T], + "SQLCoreOperations[_T]", + roles.ExpressionElementRole[_T], + roles.TypedColumnsClauseRole[_T], + Callable[[], "ColumnElement[_T]"], + "LambdaElement", +] +"See docs in public alias ColumnExpressionArgument." + +ColumnExpressionArgument: TypeAlias = _ColumnExpressionArgument[_T] +"""Narrower "column expression" argument. + +This type is used for all the other "column" kinds of expressions that +typically represent a single SQL column expression, not a set of columns the +way a table or ORM entity does. + +This includes ColumnElement, or ORM-mapped attributes that will have a +``__clause_element__()`` method, it also has the ExpressionElementRole +overall which brings in the TextClause object also. + +.. versionadded:: 2.0.13 + +""" + +_ColumnExpressionOrLiteralArgument = Union[Any, _ColumnExpressionArgument[_T]] + +_ColumnExpressionOrStrLabelArgument = Union[str, _ColumnExpressionArgument[_T]] + +_ByArgument = Union[ + Iterable[_ColumnExpressionOrStrLabelArgument[Any]], + _ColumnExpressionOrStrLabelArgument[Any], +] +"""Used for keyword-based ``order_by`` and ``partition_by`` parameters.""" + + +_InfoType = Dict[Any, Any] +"""the .info dictionary accepted and used throughout Core /ORM""" + +_FromClauseArgument = Union[ + roles.FromClauseRole, + Type[Any], + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], +] +"""A FROM clause, like we would send to select().select_from(). + +Also accommodates ORM entities and related constructs. + +""" + +_JoinTargetArgument = Union[_FromClauseArgument, roles.JoinTargetRole] +"""target for join() builds on _FromClauseArgument to include additional +join target roles such as those which come from the ORM. + +""" + +_OnClauseArgument = Union[_ColumnExpressionArgument[Any], roles.OnClauseRole] +"""target for an ON clause, includes additional roles such as those which +come from the ORM. + +""" + +_SelectStatementForCompoundArgument = Union[ + "SelectBase", roles.CompoundElementRole +] +"""SELECT statement acceptable by ``union()`` and other SQL set operations""" + +_DMLColumnArgument = Union[ + str, + _HasClauseElement[Any], + roles.DMLColumnRole, + "SQLCoreOperations[Any]", +] +"""A DML column expression. This is a "key" inside of insert().values(), +update().values(), and related. + +These are usually strings or SQL table columns. + +There's also edge cases like JSON expression assignment, which we would want +the DMLColumnRole to be able to accommodate. + +""" + +_DMLKey = TypeVar("_DMLKey", bound=_DMLColumnArgument) +_DMLColumnKeyMapping = Mapping[_DMLKey, Any] + + +_DDLColumnArgument = Union[str, "Column[Any]", roles.DDLConstraintColumnRole] +"""DDL column. + +used for :class:`.PrimaryKeyConstraint`, :class:`.UniqueConstraint`, etc. + +""" + +_DMLTableArgument = Union[ + "TableClause", + "Join", + "Alias", + "CTE", + Type[Any], + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], +] + +_PropagateAttrsType = util.immutabledict[str, Any] + +_TypeEngineArgument = Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"] + +_EquivalentColumnMap = Dict["ColumnElement[Any]", Set["ColumnElement[Any]"]] + +_LimitOffsetType = Union[int, _ColumnExpressionArgument[int], None] + +_AutoIncrementType = Union[bool, Literal["auto", "ignore_fk"]] + +if TYPE_CHECKING: + + def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: ... + + def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]: ... + + def is_named_from_clause( + t: FromClauseRole, + ) -> TypeGuard[NamedFromClause]: ... + + def is_column_element( + c: ClauseElement, + ) -> TypeGuard[ColumnElement[Any]]: ... + + def is_keyed_column_element( + c: ClauseElement, + ) -> TypeGuard[KeyedColumnElement[Any]]: ... + + def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]: ... + + def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]: ... + + def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]: ... + + def is_table_value_type( + t: TypeEngine[Any], + ) -> TypeGuard[TableValueType]: ... + + def is_selectable(t: Any) -> TypeGuard[Selectable]: ... + + def is_select_base( + t: Union[Executable, ReturnsRows] + ) -> TypeGuard[SelectBase]: ... + + def is_select_statement( + t: Union[Executable, ReturnsRows] + ) -> TypeGuard[Select[Any]]: ... + + def is_table(t: FromClause) -> TypeGuard[TableClause]: ... + + def is_subquery(t: FromClause) -> TypeGuard[Subquery]: ... + + def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]: ... + +else: + is_sql_compiler = operator.attrgetter("is_sql") + is_ddl_compiler = operator.attrgetter("is_ddl") + is_named_from_clause = operator.attrgetter("named_with_column") + is_column_element = operator.attrgetter("_is_column_element") + is_keyed_column_element = operator.attrgetter("_is_keyed_column_element") + is_text_clause = operator.attrgetter("_is_text_clause") + is_from_clause = operator.attrgetter("_is_from_clause") + is_tuple_type = operator.attrgetter("_is_tuple_type") + is_table_value_type = operator.attrgetter("_is_table_value") + is_selectable = operator.attrgetter("is_selectable") + is_select_base = operator.attrgetter("_is_select_base") + is_select_statement = operator.attrgetter("_is_select_statement") + is_table = operator.attrgetter("_is_table") + is_subquery = operator.attrgetter("_is_subquery") + is_dml = operator.attrgetter("is_dml") + + +def has_schema_attr(t: FromClauseRole) -> TypeGuard[TableClause]: + return hasattr(t, "schema") + + +def is_quoted_name(s: str) -> TypeGuard[quoted_name]: + return hasattr(s, "quote") + + +def is_has_clause_element(s: object) -> TypeGuard[_HasClauseElement[Any]]: + return hasattr(s, "__clause_element__") + + +def is_insert_update(c: ClauseElement) -> TypeGuard[ValuesBase]: + return c.is_dml and (c.is_insert or c.is_update) # type: ignore + + +def _no_kw() -> exc.ArgumentError: + return exc.ArgumentError( + "Additional keyword arguments are not accepted by this " + "function/method. The presence of **kw is for pep-484 typing purposes" + ) + + +def _unexpected_kw(methname: str, kw: Dict[str, Any]) -> NoReturn: + k = list(kw)[0] + raise TypeError(f"{methname} got an unexpected keyword argument '{k}'") + + +@overload +def Nullable( + val: "SQLCoreOperations[_T]", +) -> "SQLCoreOperations[Optional[_T]]": ... + + +@overload +def Nullable( + val: roles.ExpressionElementRole[_T], +) -> roles.ExpressionElementRole[Optional[_T]]: ... + + +@overload +def Nullable(val: Type[_T]) -> Type[Optional[_T]]: ... + + +def Nullable( + val: _TypedColumnClauseArgument[_T], +) -> _TypedColumnClauseArgument[Optional[_T]]: + """Types a column or ORM class as nullable. + + This can be used in select and other contexts to express that the value of + a column can be null, for example due to an outer join:: + + stmt1 = select(A, Nullable(B)).outerjoin(A.bs) + stmt2 = select(A.data, Nullable(B.data)).outerjoin(A.bs) + + At runtime this method returns the input unchanged. + + .. versionadded:: 2.0.20 + """ + return val + + +@overload +def NotNullable( + val: "SQLCoreOperations[Optional[_T]]", +) -> "SQLCoreOperations[_T]": ... + + +@overload +def NotNullable( + val: roles.ExpressionElementRole[Optional[_T]], +) -> roles.ExpressionElementRole[_T]: ... + + +@overload +def NotNullable(val: Type[Optional[_T]]) -> Type[_T]: ... + + +@overload +def NotNullable(val: Optional[Type[_T]]) -> Type[_T]: ... + + +def NotNullable( + val: Union[_TypedColumnClauseArgument[Optional[_T]], Optional[Type[_T]]], +) -> _TypedColumnClauseArgument[_T]: + """Types a column or ORM class as not nullable. + + This can be used in select and other contexts to express that the value of + a column cannot be null, for example due to a where condition on a + nullable column:: + + stmt = select(NotNullable(A.value)).where(A.value.is_not(None)) + + At runtime this method returns the input unchanged. + + .. versionadded:: 2.0.20 + """ + return val # type: ignore diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py new file mode 100644 index 00000000..db382b87 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py @@ -0,0 +1,585 @@ +# sql/annotation.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""The :class:`.Annotated` class and related routines; creates hash-equivalent +copies of SQL constructs which contain context-specific markers and +associations. + +Note that the :class:`.Annotated` concept as implemented in this module is not +related in any way to the pep-593 concept of "Annotated". + + +""" + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar + +from . import operators +from .cache_key import HasCacheKey +from .visitors import anon_map +from .visitors import ExternallyTraversible +from .visitors import InternalTraversal +from .. import util +from ..util.typing import Literal +from ..util.typing import Self + +if TYPE_CHECKING: + from .base import _EntityNamespace + from .visitors import _TraverseInternalsType + +_AnnotationDict = Mapping[str, Any] + +EMPTY_ANNOTATIONS: util.immutabledict[str, Any] = util.EMPTY_DICT + + +class SupportsAnnotations(ExternallyTraversible): + __slots__ = () + + _annotations: util.immutabledict[str, Any] = EMPTY_ANNOTATIONS + + proxy_set: util.generic_fn_descriptor[FrozenSet[Any]] + + _is_immutable: bool + + def _annotate(self, values: _AnnotationDict) -> Self: + raise NotImplementedError() + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> SupportsAnnotations: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = False, + ) -> SupportsAnnotations: + raise NotImplementedError() + + @util.memoized_property + def _annotations_cache_key(self) -> Tuple[Any, ...]: + anon_map_ = anon_map() + + return self._gen_annotations_cache_key(anon_map_) + + def _gen_annotations_cache_key( + self, anon_map: anon_map + ) -> Tuple[Any, ...]: + return ( + "_annotations", + tuple( + ( + key, + ( + value._gen_cache_key(anon_map, []) + if isinstance(value, HasCacheKey) + else value + ), + ) + for key, value in [ + (key, self._annotations[key]) + for key in sorted(self._annotations) + ] + ), + ) + + +class SupportsWrappingAnnotations(SupportsAnnotations): + __slots__ = () + + _constructor: Callable[..., SupportsWrappingAnnotations] + + if TYPE_CHECKING: + + @util.ro_non_memoized_property + def entity_namespace(self) -> _EntityNamespace: ... + + def _annotate(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + updated by the given dictionary. + + """ + return Annotated._as_annotated_instance(self, values) # type: ignore + + def _with_annotations(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + replaced by the given dictionary. + + """ + return Annotated._as_annotated_instance(self, values) # type: ignore + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> SupportsAnnotations: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = False, + ) -> SupportsAnnotations: + """return a copy of this :class:`_expression.ClauseElement` + with annotations + removed. + + :param values: optional tuple of individual values + to remove. + + """ + if clone: + s = self._clone() + return s + else: + return self + + +class SupportsCloneAnnotations(SupportsWrappingAnnotations): + # SupportsCloneAnnotations extends from SupportsWrappingAnnotations + # to support the structure of having the base ClauseElement + # be a subclass of SupportsWrappingAnnotations. Any ClauseElement + # subclass that wants to extend from SupportsCloneAnnotations + # will inherently also be subclassing SupportsWrappingAnnotations, so + # make that specific here. + + if not typing.TYPE_CHECKING: + __slots__ = () + + _clone_annotations_traverse_internals: _TraverseInternalsType = [ + ("_annotations", InternalTraversal.dp_annotations_key) + ] + + def _annotate(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + updated by the given dictionary. + + """ + new = self._clone() + new._annotations = new._annotations.union(values) + new.__dict__.pop("_annotations_cache_key", None) + new.__dict__.pop("_generate_cache_key", None) + return new + + def _with_annotations(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + replaced by the given dictionary. + + """ + new = self._clone() + new._annotations = util.immutabledict(values) + new.__dict__.pop("_annotations_cache_key", None) + new.__dict__.pop("_generate_cache_key", None) + return new + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> SupportsAnnotations: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = False, + ) -> SupportsAnnotations: + """return a copy of this :class:`_expression.ClauseElement` + with annotations + removed. + + :param values: optional tuple of individual values + to remove. + + """ + if clone or self._annotations: + # clone is used when we are also copying + # the expression for a deep deannotation + new = self._clone() + new._annotations = util.immutabledict() + new.__dict__.pop("_annotations_cache_key", None) + return new + else: + return self + + +class Annotated(SupportsAnnotations): + """clones a SupportsAnnotations and applies an 'annotations' dictionary. + + Unlike regular clones, this clone also mimics __hash__() and + __eq__() of the original element so that it takes its place + in hashed collections. + + A reference to the original element is maintained, for the important + reason of keeping its hash value current. When GC'ed, the + hash value may be reused, causing conflicts. + + .. note:: The rationale for Annotated producing a brand new class, + rather than placing the functionality directly within ClauseElement, + is **performance**. The __hash__() method is absent on plain + ClauseElement which leads to significantly reduced function call + overhead, as the use of sets and dictionaries against ClauseElement + objects is prevalent, but most are not "annotated". + + """ + + _is_column_operators = False + + @classmethod + def _as_annotated_instance( + cls, element: SupportsWrappingAnnotations, values: _AnnotationDict + ) -> Annotated: + try: + cls = annotated_classes[element.__class__] + except KeyError: + cls = _new_annotation_type(element.__class__, cls) + return cls(element, values) + + _annotations: util.immutabledict[str, Any] + __element: SupportsWrappingAnnotations + _hash: int + + def __new__(cls: Type[Self], *args: Any) -> Self: + return object.__new__(cls) + + def __init__( + self, element: SupportsWrappingAnnotations, values: _AnnotationDict + ): + self.__dict__ = element.__dict__.copy() + self.__dict__.pop("_annotations_cache_key", None) + self.__dict__.pop("_generate_cache_key", None) + self.__element = element + self._annotations = util.immutabledict(values) + self._hash = hash(element) + + def _annotate(self, values: _AnnotationDict) -> Self: + _values = self._annotations.union(values) + new = self._with_annotations(_values) + return new + + def _with_annotations(self, values: _AnnotationDict) -> Self: + clone = self.__class__.__new__(self.__class__) + clone.__dict__ = self.__dict__.copy() + clone.__dict__.pop("_annotations_cache_key", None) + clone.__dict__.pop("_generate_cache_key", None) + clone._annotations = util.immutabledict(values) + return clone + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> Annotated: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = True, + ) -> SupportsAnnotations: + if values is None: + return self.__element + else: + return self._with_annotations( + util.immutabledict( + { + key: value + for key, value in self._annotations.items() + if key not in values + } + ) + ) + + if not typing.TYPE_CHECKING: + # manually proxy some methods that need extra attention + def _compiler_dispatch(self, visitor: Any, **kw: Any) -> Any: + return self.__element.__class__._compiler_dispatch( + self, visitor, **kw + ) + + @property + def _constructor(self): + return self.__element._constructor + + def _clone(self, **kw: Any) -> Self: + clone = self.__element._clone(**kw) + if clone is self.__element: + # detect immutable, don't change anything + return self + else: + # update the clone with any changes that have occurred + # to this object's __dict__. + clone.__dict__.update(self.__dict__) + return self.__class__(clone, self._annotations) + + def __reduce__(self) -> Tuple[Type[Annotated], Tuple[Any, ...]]: + return self.__class__, (self.__element, self._annotations) + + def __hash__(self) -> int: + return self._hash + + def __eq__(self, other: Any) -> bool: + if self._is_column_operators: + return self.__element.__class__.__eq__(self, other) + else: + return hash(other) == hash(self) + + @util.ro_non_memoized_property + def entity_namespace(self) -> _EntityNamespace: + if "entity_namespace" in self._annotations: + return cast( + SupportsWrappingAnnotations, + self._annotations["entity_namespace"], + ).entity_namespace + else: + return self.__element.entity_namespace + + +# hard-generate Annotated subclasses. this technique +# is used instead of on-the-fly types (i.e. type.__new__()) +# so that the resulting objects are pickleable; additionally, other +# decisions can be made up front about the type of object being annotated +# just once per class rather than per-instance. +annotated_classes: Dict[Type[SupportsWrappingAnnotations], Type[Annotated]] = ( + {} +) + +_SA = TypeVar("_SA", bound="SupportsAnnotations") + + +def _safe_annotate(to_annotate: _SA, annotations: _AnnotationDict) -> _SA: + try: + _annotate = to_annotate._annotate + except AttributeError: + # skip objects that don't actually have an `_annotate` + # attribute, namely QueryableAttribute inside of a join + # condition + return to_annotate + else: + return _annotate(annotations) + + +def _deep_annotate( + element: _SA, + annotations: _AnnotationDict, + exclude: Optional[Sequence[SupportsAnnotations]] = None, + *, + detect_subquery_cols: bool = False, + ind_cols_on_fromclause: bool = False, + annotate_callable: Optional[ + Callable[[SupportsAnnotations, _AnnotationDict], SupportsAnnotations] + ] = None, +) -> _SA: + """Deep copy the given ClauseElement, annotating each element + with the given annotations dictionary. + + Elements within the exclude collection will be cloned but not annotated. + + """ + + # annotated objects hack the __hash__() method so if we want to + # uniquely process them we have to use id() + + cloned_ids: Dict[int, SupportsAnnotations] = {} + + def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations: + # ind_cols_on_fromclause means make sure an AnnotatedFromClause + # has its own .c collection independent of that which its proxying. + # this is used specifically by orm.LoaderCriteriaOption to break + # a reference cycle that it's otherwise prone to building, + # see test_relationship_criteria-> + # test_loader_criteria_subquery_w_same_entity. logic here was + # changed for #8796 and made explicit; previously it occurred + # by accident + + kw["detect_subquery_cols"] = detect_subquery_cols + id_ = id(elem) + + if id_ in cloned_ids: + return cloned_ids[id_] + + if ( + exclude + and hasattr(elem, "proxy_set") + and elem.proxy_set.intersection(exclude) + ): + newelem = elem._clone(clone=clone, **kw) + elif annotations != elem._annotations: + if detect_subquery_cols and elem._is_immutable: + to_annotate = elem._clone(clone=clone, **kw) + else: + to_annotate = elem + if annotate_callable: + newelem = annotate_callable(to_annotate, annotations) + else: + newelem = _safe_annotate(to_annotate, annotations) + else: + newelem = elem + + newelem._copy_internals( + clone=clone, ind_cols_on_fromclause=ind_cols_on_fromclause + ) + + cloned_ids[id_] = newelem + return newelem + + if element is not None: + element = cast(_SA, clone(element)) + clone = None # type: ignore # remove gc cycles + return element + + +@overload +def _deep_deannotate( + element: Literal[None], values: Optional[Sequence[str]] = None +) -> Literal[None]: ... + + +@overload +def _deep_deannotate( + element: _SA, values: Optional[Sequence[str]] = None +) -> _SA: ... + + +def _deep_deannotate( + element: Optional[_SA], values: Optional[Sequence[str]] = None +) -> Optional[_SA]: + """Deep copy the given element, removing annotations.""" + + cloned: Dict[Any, SupportsAnnotations] = {} + + def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations: + key: Any + if values: + key = id(elem) + else: + key = elem + + if key not in cloned: + newelem = elem._deannotate(values=values, clone=True) + newelem._copy_internals(clone=clone) + cloned[key] = newelem + return newelem + else: + return cloned[key] + + if element is not None: + element = cast(_SA, clone(element)) + clone = None # type: ignore # remove gc cycles + return element + + +def _shallow_annotate(element: _SA, annotations: _AnnotationDict) -> _SA: + """Annotate the given ClauseElement and copy its internals so that + internal objects refer to the new annotated object. + + Basically used to apply a "don't traverse" annotation to a + selectable, without digging throughout the whole + structure wasting time. + """ + element = element._annotate(annotations) + element._copy_internals() + return element + + +def _new_annotation_type( + cls: Type[SupportsWrappingAnnotations], base_cls: Type[Annotated] +) -> Type[Annotated]: + """Generates a new class that subclasses Annotated and proxies a given + element type. + + """ + if issubclass(cls, Annotated): + return cls + elif cls in annotated_classes: + return annotated_classes[cls] + + for super_ in cls.__mro__: + # check if an Annotated subclass more specific than + # the given base_cls is already registered, such + # as AnnotatedColumnElement. + if super_ in annotated_classes: + base_cls = annotated_classes[super_] + break + + annotated_classes[cls] = anno_cls = cast( + Type[Annotated], + type("Annotated%s" % cls.__name__, (base_cls, cls), {}), + ) + globals()["Annotated%s" % cls.__name__] = anno_cls + + if "_traverse_internals" in cls.__dict__: + anno_cls._traverse_internals = list(cls._traverse_internals) + [ + ("_annotations", InternalTraversal.dp_annotations_key) + ] + elif cls.__dict__.get("inherit_cache", False): + anno_cls._traverse_internals = list(cls._traverse_internals) + [ + ("_annotations", InternalTraversal.dp_annotations_key) + ] + + # some classes include this even if they have traverse_internals + # e.g. BindParameter, add it if present. + if cls.__dict__.get("inherit_cache", False): + anno_cls.inherit_cache = True # type: ignore + elif "inherit_cache" in cls.__dict__: + anno_cls.inherit_cache = cls.__dict__["inherit_cache"] # type: ignore + + anno_cls._is_column_operators = issubclass(cls, operators.ColumnOperators) + + return anno_cls + + +def _prepare_annotations( + target_hierarchy: Type[SupportsWrappingAnnotations], + base_cls: Type[Annotated], +) -> None: + for cls in util.walk_subclasses(target_hierarchy): + _new_annotation_type(cls, base_cls) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py new file mode 100644 index 00000000..e4a7256b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py @@ -0,0 +1,2185 @@ +# sql/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Foundational utilities common to many sql modules. + +""" + + +from __future__ import annotations + +import collections +from enum import Enum +import itertools +from itertools import zip_longest +import operator +import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import roles +from . import visitors +from .cache_key import HasCacheKey # noqa +from .cache_key import MemoizedHasCacheKey # noqa +from .traversals import HasCopyInternals # noqa +from .visitors import ClauseVisitor +from .visitors import ExtendedInternalTraversal +from .visitors import ExternallyTraversible +from .visitors import InternalTraversal +from .. import event +from .. import exc +from .. import util +from ..util import HasMemoized as HasMemoized +from ..util import hybridmethod +from ..util import typing as compat_typing +from ..util.typing import Protocol +from ..util.typing import Self +from ..util.typing import TypeGuard + +if TYPE_CHECKING: + from . import coercions + from . import elements + from . import type_api + from ._orm_types import DMLStrategyArgument + from ._orm_types import SynchronizeSessionArgument + from ._typing import _CLE + from .elements import BindParameter + from .elements import ClauseList + from .elements import ColumnClause # noqa + from .elements import ColumnElement + from .elements import NamedColumn + from .elements import SQLCoreOperations + from .elements import TextClause + from .schema import Column + from .schema import DefaultGenerator + from .selectable import _JoinTargetElement + from .selectable import _SelectIterable + from .selectable import FromClause + from ..engine import Connection + from ..engine import CursorResult + from ..engine.interfaces import _CoreMultiExecuteParams + from ..engine.interfaces import _ExecuteOptions + from ..engine.interfaces import _ImmutableExecuteOptions + from ..engine.interfaces import CacheStats + from ..engine.interfaces import Compiled + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import CoreExecuteOptionsParameter + from ..engine.interfaces import Dialect + from ..engine.interfaces import IsolationLevel + from ..engine.interfaces import SchemaTranslateMapType + from ..event import dispatcher + +if not TYPE_CHECKING: + coercions = None # noqa + elements = None # noqa + type_api = None # noqa + + +class _NoArg(Enum): + NO_ARG = 0 + + def __repr__(self): + return f"_NoArg.{self.name}" + + +NO_ARG = _NoArg.NO_ARG + + +class _NoneName(Enum): + NONE_NAME = 0 + """indicate a 'deferred' name that was ultimately the value None.""" + + +_NONE_NAME = _NoneName.NONE_NAME + +_T = TypeVar("_T", bound=Any) + +_Fn = TypeVar("_Fn", bound=Callable[..., Any]) + +_AmbiguousTableNameMap = MutableMapping[str, str] + + +class _DefaultDescriptionTuple(NamedTuple): + arg: Any + is_scalar: Optional[bool] + is_callable: Optional[bool] + is_sentinel: Optional[bool] + + @classmethod + def _from_column_default( + cls, default: Optional[DefaultGenerator] + ) -> _DefaultDescriptionTuple: + return ( + _DefaultDescriptionTuple( + default.arg, # type: ignore + default.is_scalar, + default.is_callable, + default.is_sentinel, + ) + if default + and ( + default.has_arg + or (not default.for_update and default.is_sentinel) + ) + else _DefaultDescriptionTuple(None, None, None, None) + ) + + +_never_select_column = operator.attrgetter("_omit_from_statements") + + +class _EntityNamespace(Protocol): + def __getattr__(self, key: str) -> SQLCoreOperations[Any]: ... + + +class _HasEntityNamespace(Protocol): + @util.ro_non_memoized_property + def entity_namespace(self) -> _EntityNamespace: ... + + +def _is_has_entity_namespace(element: Any) -> TypeGuard[_HasEntityNamespace]: + return hasattr(element, "entity_namespace") + + +# Remove when https://github.com/python/mypy/issues/14640 will be fixed +_Self = TypeVar("_Self", bound=Any) + + +class Immutable: + """mark a ClauseElement as 'immutable' when expressions are cloned. + + "immutable" objects refers to the "mutability" of an object in the + context of SQL DQL and DML generation. Such as, in DQL, one can + compose a SELECT or subquery of varied forms, but one cannot modify + the structure of a specific table or column within DQL. + :class:`.Immutable` is mostly intended to follow this concept, and as + such the primary "immutable" objects are :class:`.ColumnClause`, + :class:`.Column`, :class:`.TableClause`, :class:`.Table`. + + """ + + __slots__ = () + + _is_immutable = True + + def unique_params(self, *optionaldict, **kwargs): + raise NotImplementedError("Immutable objects do not support copying") + + def params(self, *optionaldict, **kwargs): + raise NotImplementedError("Immutable objects do not support copying") + + def _clone(self: _Self, **kw: Any) -> _Self: + return self + + def _copy_internals( + self, *, omit_attrs: Iterable[str] = (), **kw: Any + ) -> None: + pass + + +class SingletonConstant(Immutable): + """Represent SQL constants like NULL, TRUE, FALSE""" + + _is_singleton_constant = True + + _singleton: SingletonConstant + + def __new__(cls: _T, *arg: Any, **kw: Any) -> _T: + return cast(_T, cls._singleton) + + @util.non_memoized_property + def proxy_set(self) -> FrozenSet[ColumnElement[Any]]: + raise NotImplementedError() + + @classmethod + def _create_singleton(cls): + obj = object.__new__(cls) + obj.__init__() # type: ignore + + # for a long time this was an empty frozenset, meaning + # a SingletonConstant would never be a "corresponding column" in + # a statement. This referred to #6259. However, in #7154 we see + # that we do in fact need "correspondence" to work when matching cols + # in result sets, so the non-correspondence was moved to a more + # specific level when we are actually adapting expressions for SQL + # render only. + obj.proxy_set = frozenset([obj]) + cls._singleton = obj + + +def _from_objects( + *elements: Union[ + ColumnElement[Any], FromClause, TextClause, _JoinTargetElement + ] +) -> Iterator[FromClause]: + return itertools.chain.from_iterable( + [element._from_objects for element in elements] + ) + + +def _select_iterables( + elements: Iterable[roles.ColumnsClauseRole], +) -> _SelectIterable: + """expand tables into individual columns in the + given list of column expressions. + + """ + return itertools.chain.from_iterable( + [c._select_iterable for c in elements] + ) + + +_SelfGenerativeType = TypeVar("_SelfGenerativeType", bound="_GenerativeType") + + +class _GenerativeType(compat_typing.Protocol): + def _generate(self) -> Self: ... + + +def _generative(fn: _Fn) -> _Fn: + """non-caching _generative() decorator. + + This is basically the legacy decorator that copies the object and + runs a method on the new copy. + + """ + + @util.decorator + def _generative( + fn: _Fn, self: _SelfGenerativeType, *args: Any, **kw: Any + ) -> _SelfGenerativeType: + """Mark a method as generative.""" + + self = self._generate() + x = fn(self, *args, **kw) + assert x is self, "generative methods must return self" + return self + + decorated = _generative(fn) + decorated.non_generative = fn # type: ignore + return decorated + + +def _exclusive_against(*names: str, **kw: Any) -> Callable[[_Fn], _Fn]: + msgs = kw.pop("msgs", {}) + + defaults = kw.pop("defaults", {}) + + getters = [ + (name, operator.attrgetter(name), defaults.get(name, None)) + for name in names + ] + + @util.decorator + def check(fn, *args, **kw): + # make pylance happy by not including "self" in the argument + # list + self = args[0] + args = args[1:] + for name, getter, default_ in getters: + if getter(self) is not default_: + msg = msgs.get( + name, + "Method %s() has already been invoked on this %s construct" + % (fn.__name__, self.__class__), + ) + raise exc.InvalidRequestError(msg) + return fn(self, *args, **kw) + + return check + + +def _clone(element, **kw): + return element._clone(**kw) + + +def _expand_cloned( + elements: Iterable[_CLE], +) -> Iterable[_CLE]: + """expand the given set of ClauseElements to be the set of all 'cloned' + predecessors. + + """ + # TODO: cython candidate + return itertools.chain(*[x._cloned_set for x in elements]) + + +def _de_clone( + elements: Iterable[_CLE], +) -> Iterable[_CLE]: + for x in elements: + while x._is_clone_of is not None: + x = x._is_clone_of + yield x + + +def _cloned_intersection(a: Iterable[_CLE], b: Iterable[_CLE]) -> Set[_CLE]: + """return the intersection of sets a and b, counting + any overlap between 'cloned' predecessors. + + The returned set is in terms of the entities present within 'a'. + + """ + all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) + return {elem for elem in a if all_overlap.intersection(elem._cloned_set)} + + +def _cloned_difference(a: Iterable[_CLE], b: Iterable[_CLE]) -> Set[_CLE]: + all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) + return { + elem for elem in a if not all_overlap.intersection(elem._cloned_set) + } + + +class _DialectArgView(MutableMapping[str, Any]): + """A dictionary view of dialect-level arguments in the form + _. + + """ + + def __init__(self, obj): + self.obj = obj + + def _key(self, key): + try: + dialect, value_key = key.split("_", 1) + except ValueError as err: + raise KeyError(key) from err + else: + return dialect, value_key + + def __getitem__(self, key): + dialect, value_key = self._key(key) + + try: + opt = self.obj.dialect_options[dialect] + except exc.NoSuchModuleError as err: + raise KeyError(key) from err + else: + return opt[value_key] + + def __setitem__(self, key, value): + try: + dialect, value_key = self._key(key) + except KeyError as err: + raise exc.ArgumentError( + "Keys must be of the form _" + ) from err + else: + self.obj.dialect_options[dialect][value_key] = value + + def __delitem__(self, key): + dialect, value_key = self._key(key) + del self.obj.dialect_options[dialect][value_key] + + def __len__(self): + return sum( + len(args._non_defaults) + for args in self.obj.dialect_options.values() + ) + + def __iter__(self): + return ( + "%s_%s" % (dialect_name, value_name) + for dialect_name in self.obj.dialect_options + for value_name in self.obj.dialect_options[ + dialect_name + ]._non_defaults + ) + + +class _DialectArgDict(MutableMapping[str, Any]): + """A dictionary view of dialect-level arguments for a specific + dialect. + + Maintains a separate collection of user-specified arguments + and dialect-specified default arguments. + + """ + + def __init__(self): + self._non_defaults = {} + self._defaults = {} + + def __len__(self): + return len(set(self._non_defaults).union(self._defaults)) + + def __iter__(self): + return iter(set(self._non_defaults).union(self._defaults)) + + def __getitem__(self, key): + if key in self._non_defaults: + return self._non_defaults[key] + else: + return self._defaults[key] + + def __setitem__(self, key, value): + self._non_defaults[key] = value + + def __delitem__(self, key): + del self._non_defaults[key] + + +@util.preload_module("sqlalchemy.dialects") +def _kw_reg_for_dialect(dialect_name): + dialect_cls = util.preloaded.dialects.registry.load(dialect_name) + if dialect_cls.construct_arguments is None: + return None + return dict(dialect_cls.construct_arguments) + + +class DialectKWArgs: + """Establish the ability for a class to have dialect-specific arguments + with defaults and constructor validation. + + The :class:`.DialectKWArgs` interacts with the + :attr:`.DefaultDialect.construct_arguments` present on a dialect. + + .. seealso:: + + :attr:`.DefaultDialect.construct_arguments` + + """ + + __slots__ = () + + _dialect_kwargs_traverse_internals = [ + ("dialect_options", InternalTraversal.dp_dialect_options) + ] + + @classmethod + def argument_for(cls, dialect_name, argument_name, default): + """Add a new kind of dialect-specific keyword argument for this class. + + E.g.:: + + Index.argument_for("mydialect", "length", None) + + some_index = Index('a', 'b', mydialect_length=5) + + The :meth:`.DialectKWArgs.argument_for` method is a per-argument + way adding extra arguments to the + :attr:`.DefaultDialect.construct_arguments` dictionary. This + dictionary provides a list of argument names accepted by various + schema-level constructs on behalf of a dialect. + + New dialects should typically specify this dictionary all at once as a + data member of the dialect class. The use case for ad-hoc addition of + argument names is typically for end-user code that is also using + a custom compilation scheme which consumes the additional arguments. + + :param dialect_name: name of a dialect. The dialect must be + locatable, else a :class:`.NoSuchModuleError` is raised. The + dialect must also include an existing + :attr:`.DefaultDialect.construct_arguments` collection, indicating + that it participates in the keyword-argument validation and default + system, else :class:`.ArgumentError` is raised. If the dialect does + not include this collection, then any keyword argument can be + specified on behalf of this dialect already. All dialects packaged + within SQLAlchemy include this collection, however for third party + dialects, support may vary. + + :param argument_name: name of the parameter. + + :param default: default value of the parameter. + + """ + + construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] + if construct_arg_dictionary is None: + raise exc.ArgumentError( + "Dialect '%s' does have keyword-argument " + "validation and defaults enabled configured" % dialect_name + ) + if cls not in construct_arg_dictionary: + construct_arg_dictionary[cls] = {} + construct_arg_dictionary[cls][argument_name] = default + + @util.memoized_property + def dialect_kwargs(self): + """A collection of keyword arguments specified as dialect-specific + options to this construct. + + The arguments are present here in their original ``_`` + format. Only arguments that were actually passed are included; + unlike the :attr:`.DialectKWArgs.dialect_options` collection, which + contains all options known by this dialect including defaults. + + The collection is also writable; keys are accepted of the + form ``_`` where the value will be assembled + into the list of options. + + .. seealso:: + + :attr:`.DialectKWArgs.dialect_options` - nested dictionary form + + """ + return _DialectArgView(self) + + @property + def kwargs(self): + """A synonym for :attr:`.DialectKWArgs.dialect_kwargs`.""" + return self.dialect_kwargs + + _kw_registry = util.PopulateDict(_kw_reg_for_dialect) + + def _kw_reg_for_dialect_cls(self, dialect_name): + construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] + d = _DialectArgDict() + + if construct_arg_dictionary is None: + d._defaults.update({"*": None}) + else: + for cls in reversed(self.__class__.__mro__): + if cls in construct_arg_dictionary: + d._defaults.update(construct_arg_dictionary[cls]) + return d + + @util.memoized_property + def dialect_options(self): + """A collection of keyword arguments specified as dialect-specific + options to this construct. + + This is a two-level nested registry, keyed to ```` + and ````. For example, the ``postgresql_where`` + argument would be locatable as:: + + arg = my_object.dialect_options['postgresql']['where'] + + .. versionadded:: 0.9.2 + + .. seealso:: + + :attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form + + """ + + return util.PopulateDict( + util.portable_instancemethod(self._kw_reg_for_dialect_cls) + ) + + def _validate_dialect_kwargs(self, kwargs: Dict[str, Any]) -> None: + # validate remaining kwargs that they all specify DB prefixes + + if not kwargs: + return + + for k in kwargs: + m = re.match("^(.+?)_(.+)$", k) + if not m: + raise TypeError( + "Additional arguments should be " + "named _, got '%s'" % k + ) + dialect_name, arg_name = m.group(1, 2) + + try: + construct_arg_dictionary = self.dialect_options[dialect_name] + except exc.NoSuchModuleError: + util.warn( + "Can't validate argument %r; can't " + "locate any SQLAlchemy dialect named %r" + % (k, dialect_name) + ) + self.dialect_options[dialect_name] = d = _DialectArgDict() + d._defaults.update({"*": None}) + d._non_defaults[arg_name] = kwargs[k] + else: + if ( + "*" not in construct_arg_dictionary + and arg_name not in construct_arg_dictionary + ): + raise exc.ArgumentError( + "Argument %r is not accepted by " + "dialect %r on behalf of %r" + % (k, dialect_name, self.__class__) + ) + else: + construct_arg_dictionary[arg_name] = kwargs[k] + + +class CompileState: + """Produces additional object state necessary for a statement to be + compiled. + + the :class:`.CompileState` class is at the base of classes that assemble + state for a particular statement object that is then used by the + compiler. This process is essentially an extension of the process that + the SQLCompiler.visit_XYZ() method takes, however there is an emphasis + on converting raw user intent into more organized structures rather than + producing string output. The top-level :class:`.CompileState` for the + statement being executed is also accessible when the execution context + works with invoking the statement and collecting results. + + The production of :class:`.CompileState` is specific to the compiler, such + as within the :meth:`.SQLCompiler.visit_insert`, + :meth:`.SQLCompiler.visit_select` etc. methods. These methods are also + responsible for associating the :class:`.CompileState` with the + :class:`.SQLCompiler` itself, if the statement is the "toplevel" statement, + i.e. the outermost SQL statement that's actually being executed. + There can be other :class:`.CompileState` objects that are not the + toplevel, such as when a SELECT subquery or CTE-nested + INSERT/UPDATE/DELETE is generated. + + .. versionadded:: 1.4 + + """ + + __slots__ = ("statement", "_ambiguous_table_name_map") + + plugins: Dict[Tuple[str, str], Type[CompileState]] = {} + + _ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] + + @classmethod + def create_for_statement(cls, statement, compiler, **kw): + # factory construction. + + if statement._propagate_attrs: + plugin_name = statement._propagate_attrs.get( + "compile_state_plugin", "default" + ) + klass = cls.plugins.get( + (plugin_name, statement._effective_plugin_target), None + ) + if klass is None: + klass = cls.plugins[ + ("default", statement._effective_plugin_target) + ] + + else: + klass = cls.plugins[ + ("default", statement._effective_plugin_target) + ] + + if klass is cls: + return cls(statement, compiler, **kw) + else: + return klass.create_for_statement(statement, compiler, **kw) + + def __init__(self, statement, compiler, **kw): + self.statement = statement + + @classmethod + def get_plugin_class( + cls, statement: Executable + ) -> Optional[Type[CompileState]]: + plugin_name = statement._propagate_attrs.get( + "compile_state_plugin", None + ) + + if plugin_name: + key = (plugin_name, statement._effective_plugin_target) + if key in cls.plugins: + return cls.plugins[key] + + # there's no case where we call upon get_plugin_class() and want + # to get None back, there should always be a default. return that + # if there was no plugin-specific class (e.g. Insert with "orm" + # plugin) + try: + return cls.plugins[("default", statement._effective_plugin_target)] + except KeyError: + return None + + @classmethod + def _get_plugin_class_for_plugin( + cls, statement: Executable, plugin_name: str + ) -> Optional[Type[CompileState]]: + try: + return cls.plugins[ + (plugin_name, statement._effective_plugin_target) + ] + except KeyError: + return None + + @classmethod + def plugin_for( + cls, plugin_name: str, visit_name: str + ) -> Callable[[_Fn], _Fn]: + def decorate(cls_to_decorate): + cls.plugins[(plugin_name, visit_name)] = cls_to_decorate + return cls_to_decorate + + return decorate + + +class Generative(HasMemoized): + """Provide a method-chaining pattern in conjunction with the + @_generative decorator.""" + + def _generate(self) -> Self: + skip = self._memoized_keys + cls = self.__class__ + s = cls.__new__(cls) + if skip: + # ensure this iteration remains atomic + s.__dict__ = { + k: v for k, v in self.__dict__.copy().items() if k not in skip + } + else: + s.__dict__ = self.__dict__.copy() + return s + + +class InPlaceGenerative(HasMemoized): + """Provide a method-chaining pattern in conjunction with the + @_generative decorator that mutates in place.""" + + __slots__ = () + + def _generate(self): + skip = self._memoized_keys + # note __dict__ needs to be in __slots__ if this is used + for k in skip: + self.__dict__.pop(k, None) + return self + + +class HasCompileState(Generative): + """A class that has a :class:`.CompileState` associated with it.""" + + _compile_state_plugin: Optional[Type[CompileState]] = None + + _attributes: util.immutabledict[str, Any] = util.EMPTY_DICT + + _compile_state_factory = CompileState.create_for_statement + + +class _MetaOptions(type): + """metaclass for the Options class. + + This metaclass is actually necessary despite the availability of the + ``__init_subclass__()`` hook as this type also provides custom class-level + behavior for the ``__add__()`` method. + + """ + + _cache_attrs: Tuple[str, ...] + + def __add__(self, other): + o1 = self() + + if set(other).difference(self._cache_attrs): + raise TypeError( + "dictionary contains attributes not covered by " + "Options class %s: %r" + % (self, set(other).difference(self._cache_attrs)) + ) + + o1.__dict__.update(other) + return o1 + + if TYPE_CHECKING: + + def __getattr__(self, key: str) -> Any: ... + + def __setattr__(self, key: str, value: Any) -> None: ... + + def __delattr__(self, key: str) -> None: ... + + +class Options(metaclass=_MetaOptions): + """A cacheable option dictionary with defaults.""" + + __slots__ = () + + _cache_attrs: Tuple[str, ...] + + def __init_subclass__(cls) -> None: + dict_ = cls.__dict__ + cls._cache_attrs = tuple( + sorted( + d + for d in dict_ + if not d.startswith("__") + and d not in ("_cache_key_traversal",) + ) + ) + super().__init_subclass__() + + def __init__(self, **kw): + self.__dict__.update(kw) + + def __add__(self, other): + o1 = self.__class__.__new__(self.__class__) + o1.__dict__.update(self.__dict__) + + if set(other).difference(self._cache_attrs): + raise TypeError( + "dictionary contains attributes not covered by " + "Options class %s: %r" + % (self, set(other).difference(self._cache_attrs)) + ) + + o1.__dict__.update(other) + return o1 + + def __eq__(self, other): + # TODO: very inefficient. This is used only in test suites + # right now. + for a, b in zip_longest(self._cache_attrs, other._cache_attrs): + if getattr(self, a) != getattr(other, b): + return False + return True + + def __repr__(self): + # TODO: fairly inefficient, used only in debugging right now. + + return "%s(%s)" % ( + self.__class__.__name__, + ", ".join( + "%s=%r" % (k, self.__dict__[k]) + for k in self._cache_attrs + if k in self.__dict__ + ), + ) + + @classmethod + def isinstance(cls, klass: Type[Any]) -> bool: + return issubclass(cls, klass) + + @hybridmethod + def add_to_element(self, name, value): + return self + {name: getattr(self, name) + value} + + @hybridmethod + def _state_dict_inst(self) -> Mapping[str, Any]: + return self.__dict__ + + _state_dict_const: util.immutabledict[str, Any] = util.EMPTY_DICT + + @_state_dict_inst.classlevel + def _state_dict(cls) -> Mapping[str, Any]: + return cls._state_dict_const + + @classmethod + def safe_merge(cls, other): + d = other._state_dict() + + # only support a merge with another object of our class + # and which does not have attrs that we don't. otherwise + # we risk having state that might not be part of our cache + # key strategy + + if ( + cls is not other.__class__ + and other._cache_attrs + and set(other._cache_attrs).difference(cls._cache_attrs) + ): + raise TypeError( + "other element %r is not empty, is not of type %s, " + "and contains attributes not covered here %r" + % ( + other, + cls, + set(other._cache_attrs).difference(cls._cache_attrs), + ) + ) + return cls + d + + @classmethod + def from_execution_options( + cls, key, attrs, exec_options, statement_exec_options + ): + """process Options argument in terms of execution options. + + + e.g.:: + + ( + load_options, + execution_options, + ) = QueryContext.default_load_options.from_execution_options( + "_sa_orm_load_options", + { + "populate_existing", + "autoflush", + "yield_per" + }, + execution_options, + statement._execution_options, + ) + + get back the Options and refresh "_sa_orm_load_options" in the + exec options dict w/ the Options as well + + """ + + # common case is that no options we are looking for are + # in either dictionary, so cancel for that first + check_argnames = attrs.intersection( + set(exec_options).union(statement_exec_options) + ) + + existing_options = exec_options.get(key, cls) + + if check_argnames: + result = {} + for argname in check_argnames: + local = "_" + argname + if argname in exec_options: + result[local] = exec_options[argname] + elif argname in statement_exec_options: + result[local] = statement_exec_options[argname] + + new_options = existing_options + result + exec_options = util.immutabledict().merge_with( + exec_options, {key: new_options} + ) + return new_options, exec_options + + else: + return existing_options, exec_options + + if TYPE_CHECKING: + + def __getattr__(self, key: str) -> Any: ... + + def __setattr__(self, key: str, value: Any) -> None: ... + + def __delattr__(self, key: str) -> None: ... + + +class CacheableOptions(Options, HasCacheKey): + __slots__ = () + + @hybridmethod + def _gen_cache_key_inst(self, anon_map, bindparams): + return HasCacheKey._gen_cache_key(self, anon_map, bindparams) + + @_gen_cache_key_inst.classlevel + def _gen_cache_key(cls, anon_map, bindparams): + return (cls, ()) + + @hybridmethod + def _generate_cache_key(self): + return HasCacheKey._generate_cache_key_for_object(self) + + +class ExecutableOption(HasCopyInternals): + __slots__ = () + + _annotations = util.EMPTY_DICT + + __visit_name__ = "executable_option" + + _is_has_cache_key = False + + _is_core = True + + def _clone(self, **kw): + """Create a shallow copy of this ExecutableOption.""" + c = self.__class__.__new__(self.__class__) + c.__dict__ = dict(self.__dict__) # type: ignore + return c + + +class Executable(roles.StatementRole): + """Mark a :class:`_expression.ClauseElement` as supporting execution. + + :class:`.Executable` is a superclass for all "statement" types + of objects, including :func:`select`, :func:`delete`, :func:`update`, + :func:`insert`, :func:`text`. + + """ + + supports_execution: bool = True + _execution_options: _ImmutableExecuteOptions = util.EMPTY_DICT + _is_default_generator = False + _with_options: Tuple[ExecutableOption, ...] = () + _with_context_options: Tuple[ + Tuple[Callable[[CompileState], None], Any], ... + ] = () + _compile_options: Optional[Union[Type[CacheableOptions], CacheableOptions]] + + _executable_traverse_internals = [ + ("_with_options", InternalTraversal.dp_executable_options), + ( + "_with_context_options", + ExtendedInternalTraversal.dp_with_context_options, + ), + ("_propagate_attrs", ExtendedInternalTraversal.dp_propagate_attrs), + ] + + is_select = False + is_from_statement = False + is_update = False + is_insert = False + is_text = False + is_delete = False + is_dml = False + + if TYPE_CHECKING: + __visit_name__: str + + def _compile_w_cache( + self, + dialect: Dialect, + *, + compiled_cache: Optional[CompiledCacheType], + column_keys: List[str], + for_executemany: bool = False, + schema_translate_map: Optional[SchemaTranslateMapType] = None, + **kw: Any, + ) -> Tuple[ + Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats + ]: ... + + def _execute_on_connection( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> CursorResult[Any]: ... + + def _execute_on_scalar( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Any: ... + + @util.ro_non_memoized_property + def _all_selected_columns(self): + raise NotImplementedError() + + @property + def _effective_plugin_target(self) -> str: + return self.__visit_name__ + + @_generative + def options(self, *options: ExecutableOption) -> Self: + """Apply options to this statement. + + In the general sense, options are any kind of Python object + that can be interpreted by the SQL compiler for the statement. + These options can be consumed by specific dialects or specific kinds + of compilers. + + The most commonly known kind of option are the ORM level options + that apply "eager load" and other loading behaviors to an ORM + query. However, options can theoretically be used for many other + purposes. + + For background on specific kinds of options for specific kinds of + statements, refer to the documentation for those option objects. + + .. versionchanged:: 1.4 - added :meth:`.Executable.options` to + Core statement objects towards the goal of allowing unified + Core / ORM querying capabilities. + + .. seealso:: + + :ref:`loading_columns` - refers to options specific to the usage + of ORM queries + + :ref:`relationship_loader_options` - refers to options specific + to the usage of ORM queries + + """ + self._with_options += tuple( + coercions.expect(roles.ExecutableOptionRole, opt) + for opt in options + ) + return self + + @_generative + def _set_compile_options(self, compile_options: CacheableOptions) -> Self: + """Assign the compile options to a new value. + + :param compile_options: appropriate CacheableOptions structure + + """ + + self._compile_options = compile_options + return self + + @_generative + def _update_compile_options(self, options: CacheableOptions) -> Self: + """update the _compile_options with new keys.""" + + assert self._compile_options is not None + self._compile_options += options + return self + + @_generative + def _add_context_option( + self, + callable_: Callable[[CompileState], None], + cache_args: Any, + ) -> Self: + """Add a context option to this statement. + + These are callable functions that will + be given the CompileState object upon compilation. + + A second argument cache_args is required, which will be combined with + the ``__code__`` identity of the function itself in order to produce a + cache key. + + """ + self._with_context_options += ((callable_, cache_args),) + return self + + @overload + def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + no_parameters: bool = False, + stream_results: bool = False, + max_row_buffer: int = ..., + yield_per: int = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + populate_existing: bool = False, + autoflush: bool = False, + synchronize_session: SynchronizeSessionArgument = ..., + dml_strategy: DMLStrategyArgument = ..., + render_nulls: bool = ..., + is_delete_using: bool = ..., + is_update_from: bool = ..., + preserve_rowcount: bool = False, + **opt: Any, + ) -> Self: ... + + @overload + def execution_options(self, **opt: Any) -> Self: ... + + @_generative + def execution_options(self, **kw: Any) -> Self: + """Set non-SQL options for the statement which take effect during + execution. + + Execution options can be set at many scopes, including per-statement, + per-connection, or per execution, using methods such as + :meth:`_engine.Connection.execution_options` and parameters which + accept a dictionary of options such as + :paramref:`_engine.Connection.execute.execution_options` and + :paramref:`_orm.Session.execute.execution_options`. + + The primary characteristic of an execution option, as opposed to + other kinds of options such as ORM loader options, is that + **execution options never affect the compiled SQL of a query, only + things that affect how the SQL statement itself is invoked or how + results are fetched**. That is, execution options are not part of + what's accommodated by SQL compilation nor are they considered part of + the cached state of a statement. + + The :meth:`_sql.Executable.execution_options` method is + :term:`generative`, as + is the case for the method as applied to the :class:`_engine.Engine` + and :class:`_orm.Query` objects, which means when the method is called, + a copy of the object is returned, which applies the given parameters to + that new copy, but leaves the original unchanged:: + + statement = select(table.c.x, table.c.y) + new_statement = statement.execution_options(my_option=True) + + An exception to this behavior is the :class:`_engine.Connection` + object, where the :meth:`_engine.Connection.execution_options` method + is explicitly **not** generative. + + The kinds of options that may be passed to + :meth:`_sql.Executable.execution_options` and other related methods and + parameter dictionaries include parameters that are explicitly consumed + by SQLAlchemy Core or ORM, as well as arbitrary keyword arguments not + defined by SQLAlchemy, which means the methods and/or parameter + dictionaries may be used for user-defined parameters that interact with + custom code, which may access the parameters using methods such as + :meth:`_sql.Executable.get_execution_options` and + :meth:`_engine.Connection.get_execution_options`, or within selected + event hooks using a dedicated ``execution_options`` event parameter + such as + :paramref:`_events.ConnectionEvents.before_execute.execution_options` + or :attr:`_orm.ORMExecuteState.execution_options`, e.g.:: + + from sqlalchemy import event + + @event.listens_for(some_engine, "before_execute") + def _process_opt(conn, statement, multiparams, params, execution_options): + "run a SQL function before invoking a statement" + + if execution_options.get("do_special_thing", False): + conn.exec_driver_sql("run_special_function()") + + Within the scope of options that are explicitly recognized by + SQLAlchemy, most apply to specific classes of objects and not others. + The most common execution options include: + + * :paramref:`_engine.Connection.execution_options.isolation_level` - + sets the isolation level for a connection or a class of connections + via an :class:`_engine.Engine`. This option is accepted only + by :class:`_engine.Connection` or :class:`_engine.Engine`. + + * :paramref:`_engine.Connection.execution_options.stream_results` - + indicates results should be fetched using a server side cursor; + this option is accepted by :class:`_engine.Connection`, by the + :paramref:`_engine.Connection.execute.execution_options` parameter + on :meth:`_engine.Connection.execute`, and additionally by + :meth:`_sql.Executable.execution_options` on a SQL statement object, + as well as by ORM constructs like :meth:`_orm.Session.execute`. + + * :paramref:`_engine.Connection.execution_options.compiled_cache` - + indicates a dictionary that will serve as the + :ref:`SQL compilation cache ` + for a :class:`_engine.Connection` or :class:`_engine.Engine`, as + well as for ORM methods like :meth:`_orm.Session.execute`. + Can be passed as ``None`` to disable caching for statements. + This option is not accepted by + :meth:`_sql.Executable.execution_options` as it is inadvisable to + carry along a compilation cache within a statement object. + + * :paramref:`_engine.Connection.execution_options.schema_translate_map` + - a mapping of schema names used by the + :ref:`Schema Translate Map ` feature, accepted + by :class:`_engine.Connection`, :class:`_engine.Engine`, + :class:`_sql.Executable`, as well as by ORM constructs + like :meth:`_orm.Session.execute`. + + .. seealso:: + + :meth:`_engine.Connection.execution_options` + + :paramref:`_engine.Connection.execute.execution_options` + + :paramref:`_orm.Session.execute.execution_options` + + :ref:`orm_queryguide_execution_options` - documentation on all + ORM-specific execution options + + """ # noqa: E501 + if "isolation_level" in kw: + raise exc.ArgumentError( + "'isolation_level' execution option may only be specified " + "on Connection.execution_options(), or " + "per-engine using the isolation_level " + "argument to create_engine()." + ) + if "compiled_cache" in kw: + raise exc.ArgumentError( + "'compiled_cache' execution option may only be specified " + "on Connection.execution_options(), not per statement." + ) + self._execution_options = self._execution_options.union(kw) + return self + + def get_execution_options(self) -> _ExecuteOptions: + """Get the non-SQL options which will take effect during execution. + + .. versionadded:: 1.3 + + .. seealso:: + + :meth:`.Executable.execution_options` + """ + return self._execution_options + + +class SchemaEventTarget(event.EventTarget): + """Base class for elements that are the targets of :class:`.DDLEvents` + events. + + This includes :class:`.SchemaItem` as well as :class:`.SchemaType`. + + """ + + dispatch: dispatcher[SchemaEventTarget] + + def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None: + """Associate with this SchemaEvent's parent object.""" + + def _set_parent_with_dispatch( + self, parent: SchemaEventTarget, **kw: Any + ) -> None: + self.dispatch.before_parent_attach(self, parent) + self._set_parent(parent, **kw) + self.dispatch.after_parent_attach(self, parent) + + +class SchemaVisitor(ClauseVisitor): + """Define the visiting for ``SchemaItem`` objects.""" + + __traverse_options__ = {"schema_visitor": True} + + +class _SentinelDefaultCharacterization(Enum): + NONE = "none" + UNKNOWN = "unknown" + CLIENTSIDE = "clientside" + SENTINEL_DEFAULT = "sentinel_default" + SERVERSIDE = "serverside" + IDENTITY = "identity" + SEQUENCE = "sequence" + + +class _SentinelColumnCharacterization(NamedTuple): + columns: Optional[Sequence[Column[Any]]] = None + is_explicit: bool = False + is_autoinc: bool = False + default_characterization: _SentinelDefaultCharacterization = ( + _SentinelDefaultCharacterization.NONE + ) + + +_COLKEY = TypeVar("_COLKEY", Union[None, str], str) + +_COL_co = TypeVar("_COL_co", bound="ColumnElement[Any]", covariant=True) +_COL = TypeVar("_COL", bound="ColumnElement[Any]") + + +class _ColumnMetrics(Generic[_COL_co]): + __slots__ = ("column",) + + column: _COL_co + + def __init__( + self, collection: ColumnCollection[Any, _COL_co], col: _COL_co + ): + self.column = col + + # proxy_index being non-empty means it was initialized. + # so we need to update it + pi = collection._proxy_index + if pi: + for eps_col in col._expanded_proxy_set: + pi[eps_col].add(self) + + def get_expanded_proxy_set(self): + return self.column._expanded_proxy_set + + def dispose(self, collection): + pi = collection._proxy_index + if not pi: + return + for col in self.column._expanded_proxy_set: + colset = pi.get(col, None) + if colset: + colset.discard(self) + if colset is not None and not colset: + del pi[col] + + def embedded( + self, + target_set: Union[ + Set[ColumnElement[Any]], FrozenSet[ColumnElement[Any]] + ], + ) -> bool: + expanded_proxy_set = self.column._expanded_proxy_set + for t in target_set.difference(expanded_proxy_set): + if not expanded_proxy_set.intersection(_expand_cloned([t])): + return False + return True + + +class ColumnCollection(Generic[_COLKEY, _COL_co]): + """Collection of :class:`_expression.ColumnElement` instances, + typically for + :class:`_sql.FromClause` objects. + + The :class:`_sql.ColumnCollection` object is most commonly available + as the :attr:`_schema.Table.c` or :attr:`_schema.Table.columns` collection + on the :class:`_schema.Table` object, introduced at + :ref:`metadata_tables_and_columns`. + + The :class:`_expression.ColumnCollection` has both mapping- and sequence- + like behaviors. A :class:`_expression.ColumnCollection` usually stores + :class:`_schema.Column` objects, which are then accessible both via mapping + style access as well as attribute access style. + + To access :class:`_schema.Column` objects using ordinary attribute-style + access, specify the name like any other object attribute, such as below + a column named ``employee_name`` is accessed:: + + >>> employee_table.c.employee_name + + To access columns that have names with special characters or spaces, + index-style access is used, such as below which illustrates a column named + ``employee ' payment`` is accessed:: + + >>> employee_table.c["employee ' payment"] + + As the :class:`_sql.ColumnCollection` object provides a Python dictionary + interface, common dictionary method names like + :meth:`_sql.ColumnCollection.keys`, :meth:`_sql.ColumnCollection.values`, + and :meth:`_sql.ColumnCollection.items` are available, which means that + database columns that are keyed under these names also need to use indexed + access:: + + >>> employee_table.c["values"] + + + The name for which a :class:`_schema.Column` would be present is normally + that of the :paramref:`_schema.Column.key` parameter. In some contexts, + such as a :class:`_sql.Select` object that uses a label style set + using the :meth:`_sql.Select.set_label_style` method, a column of a certain + key may instead be represented under a particular label name such + as ``tablename_columnname``:: + + >>> from sqlalchemy import select, column, table + >>> from sqlalchemy import LABEL_STYLE_TABLENAME_PLUS_COL + >>> t = table("t", column("c")) + >>> stmt = select(t).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + >>> subq = stmt.subquery() + >>> subq.c.t_c + + + :class:`.ColumnCollection` also indexes the columns in order and allows + them to be accessible by their integer position:: + + >>> cc[0] + Column('x', Integer(), table=None) + >>> cc[1] + Column('y', Integer(), table=None) + + .. versionadded:: 1.4 :class:`_expression.ColumnCollection` + allows integer-based + index access to the collection. + + Iterating the collection yields the column expressions in order:: + + >>> list(cc) + [Column('x', Integer(), table=None), + Column('y', Integer(), table=None)] + + The base :class:`_expression.ColumnCollection` object can store + duplicates, which can + mean either two columns with the same key, in which case the column + returned by key access is **arbitrary**:: + + >>> x1, x2 = Column('x', Integer), Column('x', Integer) + >>> cc = ColumnCollection(columns=[(x1.name, x1), (x2.name, x2)]) + >>> list(cc) + [Column('x', Integer(), table=None), + Column('x', Integer(), table=None)] + >>> cc['x'] is x1 + False + >>> cc['x'] is x2 + True + + Or it can also mean the same column multiple times. These cases are + supported as :class:`_expression.ColumnCollection` + is used to represent the columns in + a SELECT statement which may include duplicates. + + A special subclass :class:`.DedupeColumnCollection` exists which instead + maintains SQLAlchemy's older behavior of not allowing duplicates; this + collection is used for schema level objects like :class:`_schema.Table` + and + :class:`.PrimaryKeyConstraint` where this deduping is helpful. The + :class:`.DedupeColumnCollection` class also has additional mutation methods + as the schema constructs have more use cases that require removal and + replacement of columns. + + .. versionchanged:: 1.4 :class:`_expression.ColumnCollection` + now stores duplicate + column keys as well as the same column in multiple positions. The + :class:`.DedupeColumnCollection` class is added to maintain the + former behavior in those cases where deduplication as well as + additional replace/remove operations are needed. + + + """ + + __slots__ = "_collection", "_index", "_colset", "_proxy_index" + + _collection: List[Tuple[_COLKEY, _COL_co, _ColumnMetrics[_COL_co]]] + _index: Dict[Union[None, str, int], Tuple[_COLKEY, _COL_co]] + _proxy_index: Dict[ColumnElement[Any], Set[_ColumnMetrics[_COL_co]]] + _colset: Set[_COL_co] + + def __init__( + self, columns: Optional[Iterable[Tuple[_COLKEY, _COL_co]]] = None + ): + object.__setattr__(self, "_colset", set()) + object.__setattr__(self, "_index", {}) + object.__setattr__( + self, "_proxy_index", collections.defaultdict(util.OrderedSet) + ) + object.__setattr__(self, "_collection", []) + if columns: + self._initial_populate(columns) + + @util.preload_module("sqlalchemy.sql.elements") + def __clause_element__(self) -> ClauseList: + elements = util.preloaded.sql_elements + + return elements.ClauseList( + _literal_as_text_role=roles.ColumnsClauseRole, + group=False, + *self._all_columns, + ) + + def _initial_populate( + self, iter_: Iterable[Tuple[_COLKEY, _COL_co]] + ) -> None: + self._populate_separate_keys(iter_) + + @property + def _all_columns(self) -> List[_COL_co]: + return [col for (_, col, _) in self._collection] + + def keys(self) -> List[_COLKEY]: + """Return a sequence of string key names for all columns in this + collection.""" + return [k for (k, _, _) in self._collection] + + def values(self) -> List[_COL_co]: + """Return a sequence of :class:`_sql.ColumnClause` or + :class:`_schema.Column` objects for all columns in this + collection.""" + return [col for (_, col, _) in self._collection] + + def items(self) -> List[Tuple[_COLKEY, _COL_co]]: + """Return a sequence of (key, column) tuples for all columns in this + collection each consisting of a string key name and a + :class:`_sql.ColumnClause` or + :class:`_schema.Column` object. + """ + + return [(k, col) for (k, col, _) in self._collection] + + def __bool__(self) -> bool: + return bool(self._collection) + + def __len__(self) -> int: + return len(self._collection) + + def __iter__(self) -> Iterator[_COL_co]: + # turn to a list first to maintain over a course of changes + return iter([col for _, col, _ in self._collection]) + + @overload + def __getitem__(self, key: Union[str, int]) -> _COL_co: ... + + @overload + def __getitem__( + self, key: Tuple[Union[str, int], ...] + ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ... + + @overload + def __getitem__( + self, key: slice + ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ... + + def __getitem__( + self, key: Union[str, int, slice, Tuple[Union[str, int], ...]] + ) -> Union[ReadOnlyColumnCollection[_COLKEY, _COL_co], _COL_co]: + try: + if isinstance(key, (tuple, slice)): + if isinstance(key, slice): + cols = ( + (sub_key, col) + for (sub_key, col, _) in self._collection[key] + ) + else: + cols = (self._index[sub_key] for sub_key in key) + + return ColumnCollection(cols).as_readonly() + else: + return self._index[key][1] + except KeyError as err: + if isinstance(err.args[0], int): + raise IndexError(err.args[0]) from err + else: + raise + + def __getattr__(self, key: str) -> _COL_co: + try: + return self._index[key][1] + except KeyError as err: + raise AttributeError(key) from err + + def __contains__(self, key: str) -> bool: + if key not in self._index: + if not isinstance(key, str): + raise exc.ArgumentError( + "__contains__ requires a string argument" + ) + return False + else: + return True + + def compare(self, other: ColumnCollection[Any, Any]) -> bool: + """Compare this :class:`_expression.ColumnCollection` to another + based on the names of the keys""" + + for l, r in zip_longest(self, other): + if l is not r: + return False + else: + return True + + def __eq__(self, other: Any) -> bool: + return self.compare(other) + + @overload + def get(self, key: str, default: None = None) -> Optional[_COL_co]: ... + + @overload + def get(self, key: str, default: _COL) -> Union[_COL_co, _COL]: ... + + def get( + self, key: str, default: Optional[_COL] = None + ) -> Optional[Union[_COL_co, _COL]]: + """Get a :class:`_sql.ColumnClause` or :class:`_schema.Column` object + based on a string key name from this + :class:`_expression.ColumnCollection`.""" + + if key in self._index: + return self._index[key][1] + else: + return default + + def __str__(self) -> str: + return "%s(%s)" % ( + self.__class__.__name__, + ", ".join(str(c) for c in self), + ) + + def __setitem__(self, key: str, value: Any) -> NoReturn: + raise NotImplementedError() + + def __delitem__(self, key: str) -> NoReturn: + raise NotImplementedError() + + def __setattr__(self, key: str, obj: Any) -> NoReturn: + raise NotImplementedError() + + def clear(self) -> NoReturn: + """Dictionary clear() is not implemented for + :class:`_sql.ColumnCollection`.""" + raise NotImplementedError() + + def remove(self, column: Any) -> None: + raise NotImplementedError() + + def update(self, iter_: Any) -> NoReturn: + """Dictionary update() is not implemented for + :class:`_sql.ColumnCollection`.""" + raise NotImplementedError() + + # https://github.com/python/mypy/issues/4266 + __hash__ = None # type: ignore + + def _populate_separate_keys( + self, iter_: Iterable[Tuple[_COLKEY, _COL_co]] + ) -> None: + """populate from an iterator of (key, column)""" + + self._collection[:] = collection = [ + (k, c, _ColumnMetrics(self, c)) for k, c in iter_ + ] + self._colset.update(c._deannotate() for _, c, _ in collection) + self._index.update( + {idx: (k, c) for idx, (k, c, _) in enumerate(collection)} + ) + self._index.update({k: (k, col) for k, col, _ in reversed(collection)}) + + def add( + self, column: ColumnElement[Any], key: Optional[_COLKEY] = None + ) -> None: + """Add a column to this :class:`_sql.ColumnCollection`. + + .. note:: + + This method is **not normally used by user-facing code**, as the + :class:`_sql.ColumnCollection` is usually part of an existing + object such as a :class:`_schema.Table`. To add a + :class:`_schema.Column` to an existing :class:`_schema.Table` + object, use the :meth:`_schema.Table.append_column` method. + + """ + colkey: _COLKEY + + if key is None: + colkey = column.key # type: ignore + else: + colkey = key + + l = len(self._collection) + + # don't really know how this part is supposed to work w/ the + # covariant thing + + _column = cast(_COL_co, column) + + self._collection.append( + (colkey, _column, _ColumnMetrics(self, _column)) + ) + self._colset.add(_column._deannotate()) + self._index[l] = (colkey, _column) + if colkey not in self._index: + self._index[colkey] = (colkey, _column) + + def __getstate__(self) -> Dict[str, Any]: + return { + "_collection": [(k, c) for k, c, _ in self._collection], + "_index": self._index, + } + + def __setstate__(self, state: Dict[str, Any]) -> None: + object.__setattr__(self, "_index", state["_index"]) + object.__setattr__( + self, "_proxy_index", collections.defaultdict(util.OrderedSet) + ) + object.__setattr__( + self, + "_collection", + [ + (k, c, _ColumnMetrics(self, c)) + for (k, c) in state["_collection"] + ], + ) + object.__setattr__( + self, "_colset", {col for k, col, _ in self._collection} + ) + + def contains_column(self, col: ColumnElement[Any]) -> bool: + """Checks if a column object exists in this collection""" + if col not in self._colset: + if isinstance(col, str): + raise exc.ArgumentError( + "contains_column cannot be used with string arguments. " + "Use ``col_name in table.c`` instead." + ) + return False + else: + return True + + def as_readonly(self) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: + """Return a "read only" form of this + :class:`_sql.ColumnCollection`.""" + + return ReadOnlyColumnCollection(self) + + def _init_proxy_index(self): + """populate the "proxy index", if empty. + + proxy index is added in 2.0 to provide more efficient operation + for the corresponding_column() method. + + For reasons of both time to construct new .c collections as well as + memory conservation for large numbers of large .c collections, the + proxy_index is only filled if corresponding_column() is called. once + filled it stays that way, and new _ColumnMetrics objects created after + that point will populate it with new data. Note this case would be + unusual, if not nonexistent, as it means a .c collection is being + mutated after corresponding_column() were used, however it is tested in + test/base/test_utils.py. + + """ + pi = self._proxy_index + if pi: + return + + for _, _, metrics in self._collection: + eps = metrics.column._expanded_proxy_set + + for eps_col in eps: + pi[eps_col].add(metrics) + + def corresponding_column( + self, column: _COL, require_embedded: bool = False + ) -> Optional[Union[_COL, _COL_co]]: + """Given a :class:`_expression.ColumnElement`, return the exported + :class:`_expression.ColumnElement` object from this + :class:`_expression.ColumnCollection` + which corresponds to that original :class:`_expression.ColumnElement` + via a common + ancestor column. + + :param column: the target :class:`_expression.ColumnElement` + to be matched. + + :param require_embedded: only return corresponding columns for + the given :class:`_expression.ColumnElement`, if the given + :class:`_expression.ColumnElement` + is actually present within a sub-element + of this :class:`_expression.Selectable`. + Normally the column will match if + it merely shares a common ancestor with one of the exported + columns of this :class:`_expression.Selectable`. + + .. seealso:: + + :meth:`_expression.Selectable.corresponding_column` + - invokes this method + against the collection returned by + :attr:`_expression.Selectable.exported_columns`. + + .. versionchanged:: 1.4 the implementation for ``corresponding_column`` + was moved onto the :class:`_expression.ColumnCollection` itself. + + """ + # TODO: cython candidate + + # don't dig around if the column is locally present + if column in self._colset: + return column + + selected_intersection, selected_metrics = None, None + target_set = column.proxy_set + + pi = self._proxy_index + if not pi: + self._init_proxy_index() + + for current_metrics in ( + mm for ts in target_set if ts in pi for mm in pi[ts] + ): + if not require_embedded or current_metrics.embedded(target_set): + if selected_metrics is None: + # no corresponding column yet, pick this one. + selected_metrics = current_metrics + continue + + current_intersection = target_set.intersection( + current_metrics.column._expanded_proxy_set + ) + if selected_intersection is None: + selected_intersection = target_set.intersection( + selected_metrics.column._expanded_proxy_set + ) + + if len(current_intersection) > len(selected_intersection): + # 'current' has a larger field of correspondence than + # 'selected'. i.e. selectable.c.a1_x->a1.c.x->table.c.x + # matches a1.c.x->table.c.x better than + # selectable.c.x->table.c.x does. + + selected_metrics = current_metrics + selected_intersection = current_intersection + elif current_intersection == selected_intersection: + # they have the same field of correspondence. see + # which proxy_set has fewer columns in it, which + # indicates a closer relationship with the root + # column. Also take into account the "weight" + # attribute which CompoundSelect() uses to give + # higher precedence to columns based on vertical + # position in the compound statement, and discard + # columns that have no reference to the target + # column (also occurs with CompoundSelect) + + selected_col_distance = sum( + [ + sc._annotations.get("weight", 1) + for sc in ( + selected_metrics.column._uncached_proxy_list() + ) + if sc.shares_lineage(column) + ], + ) + current_col_distance = sum( + [ + sc._annotations.get("weight", 1) + for sc in ( + current_metrics.column._uncached_proxy_list() + ) + if sc.shares_lineage(column) + ], + ) + if current_col_distance < selected_col_distance: + selected_metrics = current_metrics + selected_intersection = current_intersection + + return selected_metrics.column if selected_metrics else None + + +_NAMEDCOL = TypeVar("_NAMEDCOL", bound="NamedColumn[Any]") + + +class DedupeColumnCollection(ColumnCollection[str, _NAMEDCOL]): + """A :class:`_expression.ColumnCollection` + that maintains deduplicating behavior. + + This is useful by schema level objects such as :class:`_schema.Table` and + :class:`.PrimaryKeyConstraint`. The collection includes more + sophisticated mutator methods as well to suit schema objects which + require mutable column collections. + + .. versionadded:: 1.4 + + """ + + def add( # type: ignore[override] + self, column: _NAMEDCOL, key: Optional[str] = None + ) -> None: + if key is not None and column.key != key: + raise exc.ArgumentError( + "DedupeColumnCollection requires columns be under " + "the same key as their .key" + ) + key = column.key + + if key is None: + raise exc.ArgumentError( + "Can't add unnamed column to column collection" + ) + + if key in self._index: + existing = self._index[key][1] + + if existing is column: + return + + self.replace(column) + + # pop out memoized proxy_set as this + # operation may very well be occurring + # in a _make_proxy operation + util.memoized_property.reset(column, "proxy_set") + else: + self._append_new_column(key, column) + + def _append_new_column(self, key: str, named_column: _NAMEDCOL) -> None: + l = len(self._collection) + self._collection.append( + (key, named_column, _ColumnMetrics(self, named_column)) + ) + self._colset.add(named_column._deannotate()) + self._index[l] = (key, named_column) + self._index[key] = (key, named_column) + + def _populate_separate_keys( + self, iter_: Iterable[Tuple[str, _NAMEDCOL]] + ) -> None: + """populate from an iterator of (key, column)""" + cols = list(iter_) + + replace_col = [] + for k, col in cols: + if col.key != k: + raise exc.ArgumentError( + "DedupeColumnCollection requires columns be under " + "the same key as their .key" + ) + if col.name in self._index and col.key != col.name: + replace_col.append(col) + elif col.key in self._index: + replace_col.append(col) + else: + self._index[k] = (k, col) + self._collection.append((k, col, _ColumnMetrics(self, col))) + self._colset.update(c._deannotate() for (k, c, _) in self._collection) + + self._index.update( + (idx, (k, c)) for idx, (k, c, _) in enumerate(self._collection) + ) + for col in replace_col: + self.replace(col) + + def extend(self, iter_: Iterable[_NAMEDCOL]) -> None: + self._populate_separate_keys((col.key, col) for col in iter_) + + def remove(self, column: _NAMEDCOL) -> None: + if column not in self._colset: + raise ValueError( + "Can't remove column %r; column is not in this collection" + % column + ) + del self._index[column.key] + self._colset.remove(column) + self._collection[:] = [ + (k, c, metrics) + for (k, c, metrics) in self._collection + if c is not column + ] + for metrics in self._proxy_index.get(column, ()): + metrics.dispose(self) + + self._index.update( + {idx: (k, col) for idx, (k, col, _) in enumerate(self._collection)} + ) + # delete higher index + del self._index[len(self._collection)] + + def replace( + self, + column: _NAMEDCOL, + extra_remove: Optional[Iterable[_NAMEDCOL]] = None, + ) -> None: + """add the given column to this collection, removing unaliased + versions of this column as well as existing columns with the + same key. + + e.g.:: + + t = Table('sometable', metadata, Column('col1', Integer)) + t.columns.replace(Column('col1', Integer, key='columnone')) + + will remove the original 'col1' from the collection, and add + the new column under the name 'columnname'. + + Used by schema.Column to override columns during table reflection. + + """ + + if extra_remove: + remove_col = set(extra_remove) + else: + remove_col = set() + # remove up to two columns based on matches of name as well as key + if column.name in self._index and column.key != column.name: + other = self._index[column.name][1] + if other.name == other.key: + remove_col.add(other) + + if column.key in self._index: + remove_col.add(self._index[column.key][1]) + + if not remove_col: + self._append_new_column(column.key, column) + return + new_cols: List[Tuple[str, _NAMEDCOL, _ColumnMetrics[_NAMEDCOL]]] = [] + replaced = False + for k, col, metrics in self._collection: + if col in remove_col: + if not replaced: + replaced = True + new_cols.append( + (column.key, column, _ColumnMetrics(self, column)) + ) + else: + new_cols.append((k, col, metrics)) + + if remove_col: + self._colset.difference_update(remove_col) + + for rc in remove_col: + for metrics in self._proxy_index.get(rc, ()): + metrics.dispose(self) + + if not replaced: + new_cols.append((column.key, column, _ColumnMetrics(self, column))) + + self._colset.add(column._deannotate()) + self._collection[:] = new_cols + + self._index.clear() + + self._index.update( + {idx: (k, col) for idx, (k, col, _) in enumerate(self._collection)} + ) + self._index.update({k: (k, col) for (k, col, _) in self._collection}) + + +class ReadOnlyColumnCollection( + util.ReadOnlyContainer, ColumnCollection[_COLKEY, _COL_co] +): + __slots__ = ("_parent",) + + def __init__(self, collection): + object.__setattr__(self, "_parent", collection) + object.__setattr__(self, "_colset", collection._colset) + object.__setattr__(self, "_index", collection._index) + object.__setattr__(self, "_collection", collection._collection) + object.__setattr__(self, "_proxy_index", collection._proxy_index) + + def __getstate__(self): + return {"_parent": self._parent} + + def __setstate__(self, state): + parent = state["_parent"] + self.__init__(parent) # type: ignore + + def add(self, column: Any, key: Any = ...) -> Any: + self._readonly() + + def extend(self, elements: Any) -> NoReturn: + self._readonly() + + def remove(self, item: Any) -> NoReturn: + self._readonly() + + +class ColumnSet(util.OrderedSet["ColumnClause[Any]"]): + def contains_column(self, col): + return col in self + + def extend(self, cols): + for col in cols: + self.add(col) + + def __eq__(self, other): + l = [] + for c in other: + for local in self: + if c.shares_lineage(local): + l.append(c == local) + return elements.and_(*l) + + def __hash__(self): # type: ignore[override] + return hash(tuple(x for x in self)) + + +def _entity_namespace( + entity: Union[_HasEntityNamespace, ExternallyTraversible] +) -> _EntityNamespace: + """Return the nearest .entity_namespace for the given entity. + + If not immediately available, does an iterate to find a sub-element + that has one, if any. + + """ + try: + return cast(_HasEntityNamespace, entity).entity_namespace + except AttributeError: + for elem in visitors.iterate(cast(ExternallyTraversible, entity)): + if _is_has_entity_namespace(elem): + return elem.entity_namespace + else: + raise + + +def _entity_namespace_key( + entity: Union[_HasEntityNamespace, ExternallyTraversible], + key: str, + default: Union[SQLCoreOperations[Any], _NoArg] = NO_ARG, +) -> SQLCoreOperations[Any]: + """Return an entry from an entity_namespace. + + + Raises :class:`_exc.InvalidRequestError` rather than attribute error + on not found. + + """ + + try: + ns = _entity_namespace(entity) + if default is not NO_ARG: + return getattr(ns, key, default) + else: + return getattr(ns, key) # type: ignore + except AttributeError as err: + raise exc.InvalidRequestError( + 'Entity namespace for "%s" has no property "%s"' % (entity, key) + ) from err diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py new file mode 100644 index 00000000..1172d3c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py @@ -0,0 +1,1057 @@ +# sql/cache_key.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import enum +from itertools import zip_longest +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import NamedTuple +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +from .visitors import anon_map +from .visitors import HasTraversalDispatch +from .visitors import HasTraverseInternals +from .visitors import InternalTraversal +from .visitors import prefix_anon_map +from .. import util +from ..inspection import inspect +from ..util import HasMemoized +from ..util.typing import Literal +from ..util.typing import Protocol + +if typing.TYPE_CHECKING: + from .elements import BindParameter + from .elements import ClauseElement + from .elements import ColumnElement + from .visitors import _TraverseInternalsType + from ..engine.interfaces import _CoreSingleExecuteParams + + +class _CacheKeyTraversalDispatchType(Protocol): + def __call__( + s, self: HasCacheKey, visitor: _CacheKeyTraversal + ) -> _CacheKeyTraversalDispatchTypeReturn: ... + + +class CacheConst(enum.Enum): + NO_CACHE = 0 + + +NO_CACHE = CacheConst.NO_CACHE + + +_CacheKeyTraversalType = Union[ + "_TraverseInternalsType", Literal[CacheConst.NO_CACHE], Literal[None] +] + + +class CacheTraverseTarget(enum.Enum): + CACHE_IN_PLACE = 0 + CALL_GEN_CACHE_KEY = 1 + STATIC_CACHE_KEY = 2 + PROPAGATE_ATTRS = 3 + ANON_NAME = 4 + + +( + CACHE_IN_PLACE, + CALL_GEN_CACHE_KEY, + STATIC_CACHE_KEY, + PROPAGATE_ATTRS, + ANON_NAME, +) = tuple(CacheTraverseTarget) + +_CacheKeyTraversalDispatchTypeReturn = Sequence[ + Tuple[ + str, + Any, + Union[ + Callable[..., Tuple[Any, ...]], + CacheTraverseTarget, + InternalTraversal, + ], + ] +] + + +class HasCacheKey: + """Mixin for objects which can produce a cache key. + + This class is usually in a hierarchy that starts with the + :class:`.HasTraverseInternals` base, but this is optional. Currently, + the class should be able to work on its own without including + :class:`.HasTraverseInternals`. + + .. seealso:: + + :class:`.CacheKey` + + :ref:`sql_caching` + + """ + + __slots__ = () + + _cache_key_traversal: _CacheKeyTraversalType = NO_CACHE + + _is_has_cache_key = True + + _hierarchy_supports_caching = True + """private attribute which may be set to False to prevent the + inherit_cache warning from being emitted for a hierarchy of subclasses. + + Currently applies to the :class:`.ExecutableDDLElement` hierarchy which + does not implement caching. + + """ + + inherit_cache: Optional[bool] = None + """Indicate if this :class:`.HasCacheKey` instance should make use of the + cache key generation scheme used by its immediate superclass. + + The attribute defaults to ``None``, which indicates that a construct has + not yet taken into account whether or not its appropriate for it to + participate in caching; this is functionally equivalent to setting the + value to ``False``, except that a warning is also emitted. + + This flag can be set to ``True`` on a particular class, if the SQL that + corresponds to the object does not change based on attributes which + are local to this class, and not its superclass. + + .. seealso:: + + :ref:`compilerext_caching` - General guideslines for setting the + :attr:`.HasCacheKey.inherit_cache` attribute for third-party or user + defined SQL constructs. + + """ + + __slots__ = () + + _generated_cache_key_traversal: Any + + @classmethod + def _generate_cache_attrs( + cls, + ) -> Union[_CacheKeyTraversalDispatchType, Literal[CacheConst.NO_CACHE]]: + """generate cache key dispatcher for a new class. + + This sets the _generated_cache_key_traversal attribute once called + so should only be called once per class. + + """ + inherit_cache = cls.__dict__.get("inherit_cache", None) + inherit = bool(inherit_cache) + + if inherit: + _cache_key_traversal = getattr(cls, "_cache_key_traversal", None) + if _cache_key_traversal is None: + try: + assert issubclass(cls, HasTraverseInternals) + _cache_key_traversal = cls._traverse_internals + except AttributeError: + cls._generated_cache_key_traversal = NO_CACHE + return NO_CACHE + + assert _cache_key_traversal is not NO_CACHE, ( + f"class {cls} has _cache_key_traversal=NO_CACHE, " + "which conflicts with inherit_cache=True" + ) + + # TODO: wouldn't we instead get this from our superclass? + # also, our superclass may not have this yet, but in any case, + # we'd generate for the superclass that has it. this is a little + # more complicated, so for the moment this is a little less + # efficient on startup but simpler. + return _cache_key_traversal_visitor.generate_dispatch( + cls, + _cache_key_traversal, + "_generated_cache_key_traversal", + ) + else: + _cache_key_traversal = cls.__dict__.get( + "_cache_key_traversal", None + ) + if _cache_key_traversal is None: + _cache_key_traversal = cls.__dict__.get( + "_traverse_internals", None + ) + if _cache_key_traversal is None: + cls._generated_cache_key_traversal = NO_CACHE + if ( + inherit_cache is None + and cls._hierarchy_supports_caching + ): + util.warn( + "Class %s will not make use of SQL compilation " + "caching as it does not set the 'inherit_cache' " + "attribute to ``True``. This can have " + "significant performance implications including " + "some performance degradations in comparison to " + "prior SQLAlchemy versions. Set this attribute " + "to True if this object can make use of the cache " + "key generated by the superclass. Alternatively, " + "this attribute may be set to False which will " + "disable this warning." % (cls.__name__), + code="cprf", + ) + return NO_CACHE + + return _cache_key_traversal_visitor.generate_dispatch( + cls, + _cache_key_traversal, + "_generated_cache_key_traversal", + ) + + @util.preload_module("sqlalchemy.sql.elements") + def _gen_cache_key( + self, anon_map: anon_map, bindparams: List[BindParameter[Any]] + ) -> Optional[Tuple[Any, ...]]: + """return an optional cache key. + + The cache key is a tuple which can contain any series of + objects that are hashable and also identifies + this object uniquely within the presence of a larger SQL expression + or statement, for the purposes of caching the resulting query. + + The cache key should be based on the SQL compiled structure that would + ultimately be produced. That is, two structures that are composed in + exactly the same way should produce the same cache key; any difference + in the structures that would affect the SQL string or the type handlers + should result in a different cache key. + + If a structure cannot produce a useful cache key, the NO_CACHE + symbol should be added to the anon_map and the method should + return None. + + """ + + cls = self.__class__ + + id_, found = anon_map.get_anon(self) + if found: + return (id_, cls) + + dispatcher: Union[ + Literal[CacheConst.NO_CACHE], + _CacheKeyTraversalDispatchType, + ] + + try: + dispatcher = cls.__dict__["_generated_cache_key_traversal"] + except KeyError: + # traversals.py -> _preconfigure_traversals() + # may be used to run these ahead of time, but + # is not enabled right now. + # this block will generate any remaining dispatchers. + dispatcher = cls._generate_cache_attrs() + + if dispatcher is NO_CACHE: + anon_map[NO_CACHE] = True + return None + + result: Tuple[Any, ...] = (id_, cls) + + # inline of _cache_key_traversal_visitor.run_generated_dispatch() + + for attrname, obj, meth in dispatcher( + self, _cache_key_traversal_visitor + ): + if obj is not None: + # TODO: see if C code can help here as Python lacks an + # efficient switch construct + + if meth is STATIC_CACHE_KEY: + sck = obj._static_cache_key + if sck is NO_CACHE: + anon_map[NO_CACHE] = True + return None + result += (attrname, sck) + elif meth is ANON_NAME: + elements = util.preloaded.sql_elements + if isinstance(obj, elements._anonymous_label): + obj = obj.apply_map(anon_map) # type: ignore + result += (attrname, obj) + elif meth is CALL_GEN_CACHE_KEY: + result += ( + attrname, + obj._gen_cache_key(anon_map, bindparams), + ) + + # remaining cache functions are against + # Python tuples, dicts, lists, etc. so we can skip + # if they are empty + elif obj: + if meth is CACHE_IN_PLACE: + result += (attrname, obj) + elif meth is PROPAGATE_ATTRS: + result += ( + attrname, + obj["compile_state_plugin"], + ( + obj["plugin_subject"]._gen_cache_key( + anon_map, bindparams + ) + if obj["plugin_subject"] + else None + ), + ) + elif meth is InternalTraversal.dp_annotations_key: + # obj is here is the _annotations dict. Table uses + # a memoized version of it. however in other cases, + # we generate it given anon_map as we may be from a + # Join, Aliased, etc. + # see #8790 + + if self._gen_static_annotations_cache_key: # type: ignore # noqa: E501 + result += self._annotations_cache_key # type: ignore # noqa: E501 + else: + result += self._gen_annotations_cache_key(anon_map) # type: ignore # noqa: E501 + + elif ( + meth is InternalTraversal.dp_clauseelement_list + or meth is InternalTraversal.dp_clauseelement_tuple + or meth + is InternalTraversal.dp_memoized_select_entities + ): + result += ( + attrname, + tuple( + [ + elem._gen_cache_key(anon_map, bindparams) + for elem in obj + ] + ), + ) + else: + result += meth( # type: ignore + attrname, obj, self, anon_map, bindparams + ) + return result + + def _generate_cache_key(self) -> Optional[CacheKey]: + """return a cache key. + + The cache key is a tuple which can contain any series of + objects that are hashable and also identifies + this object uniquely within the presence of a larger SQL expression + or statement, for the purposes of caching the resulting query. + + The cache key should be based on the SQL compiled structure that would + ultimately be produced. That is, two structures that are composed in + exactly the same way should produce the same cache key; any difference + in the structures that would affect the SQL string or the type handlers + should result in a different cache key. + + The cache key returned by this method is an instance of + :class:`.CacheKey`, which consists of a tuple representing the + cache key, as well as a list of :class:`.BindParameter` objects + which are extracted from the expression. While two expressions + that produce identical cache key tuples will themselves generate + identical SQL strings, the list of :class:`.BindParameter` objects + indicates the bound values which may have different values in + each one; these bound parameters must be consulted in order to + execute the statement with the correct parameters. + + a :class:`_expression.ClauseElement` structure that does not implement + a :meth:`._gen_cache_key` method and does not implement a + :attr:`.traverse_internals` attribute will not be cacheable; when + such an element is embedded into a larger structure, this method + will return None, indicating no cache key is available. + + """ + + bindparams: List[BindParameter[Any]] = [] + + _anon_map = anon_map() + key = self._gen_cache_key(_anon_map, bindparams) + if NO_CACHE in _anon_map: + return None + else: + assert key is not None + return CacheKey(key, bindparams) + + @classmethod + def _generate_cache_key_for_object( + cls, obj: HasCacheKey + ) -> Optional[CacheKey]: + bindparams: List[BindParameter[Any]] = [] + + _anon_map = anon_map() + key = obj._gen_cache_key(_anon_map, bindparams) + if NO_CACHE in _anon_map: + return None + else: + assert key is not None + return CacheKey(key, bindparams) + + +class HasCacheKeyTraverse(HasTraverseInternals, HasCacheKey): + pass + + +class MemoizedHasCacheKey(HasCacheKey, HasMemoized): + __slots__ = () + + @HasMemoized.memoized_instancemethod + def _generate_cache_key(self) -> Optional[CacheKey]: + return HasCacheKey._generate_cache_key(self) + + +class SlotsMemoizedHasCacheKey(HasCacheKey, util.MemoizedSlots): + __slots__ = () + + def _memoized_method__generate_cache_key(self) -> Optional[CacheKey]: + return HasCacheKey._generate_cache_key(self) + + +class CacheKey(NamedTuple): + """The key used to identify a SQL statement construct in the + SQL compilation cache. + + .. seealso:: + + :ref:`sql_caching` + + """ + + key: Tuple[Any, ...] + bindparams: Sequence[BindParameter[Any]] + + # can't set __hash__ attribute because it interferes + # with namedtuple + # can't use "if not TYPE_CHECKING" because mypy rejects it + # inside of a NamedTuple + def __hash__(self) -> Optional[int]: # type: ignore + """CacheKey itself is not hashable - hash the .key portion""" + return None + + def to_offline_string( + self, + statement_cache: MutableMapping[Any, str], + statement: ClauseElement, + parameters: _CoreSingleExecuteParams, + ) -> str: + """Generate an "offline string" form of this :class:`.CacheKey` + + The "offline string" is basically the string SQL for the + statement plus a repr of the bound parameter values in series. + Whereas the :class:`.CacheKey` object is dependent on in-memory + identities in order to work as a cache key, the "offline" version + is suitable for a cache that will work for other processes as well. + + The given ``statement_cache`` is a dictionary-like object where the + string form of the statement itself will be cached. This dictionary + should be in a longer lived scope in order to reduce the time spent + stringifying statements. + + + """ + if self.key not in statement_cache: + statement_cache[self.key] = sql_str = str(statement) + else: + sql_str = statement_cache[self.key] + + if not self.bindparams: + param_tuple = tuple(parameters[key] for key in sorted(parameters)) + else: + param_tuple = tuple( + parameters.get(bindparam.key, bindparam.value) + for bindparam in self.bindparams + ) + + return repr((sql_str, param_tuple)) + + def __eq__(self, other: Any) -> bool: + return bool(self.key == other.key) + + def __ne__(self, other: Any) -> bool: + return not (self.key == other.key) + + @classmethod + def _diff_tuples(cls, left: CacheKey, right: CacheKey) -> str: + ck1 = CacheKey(left, []) + ck2 = CacheKey(right, []) + return ck1._diff(ck2) + + def _whats_different(self, other: CacheKey) -> Iterator[str]: + k1 = self.key + k2 = other.key + + stack: List[int] = [] + pickup_index = 0 + while True: + s1, s2 = k1, k2 + for idx in stack: + s1 = s1[idx] + s2 = s2[idx] + + for idx, (e1, e2) in enumerate(zip_longest(s1, s2)): + if idx < pickup_index: + continue + if e1 != e2: + if isinstance(e1, tuple) and isinstance(e2, tuple): + stack.append(idx) + break + else: + yield "key%s[%d]: %s != %s" % ( + "".join("[%d]" % id_ for id_ in stack), + idx, + e1, + e2, + ) + else: + pickup_index = stack.pop(-1) + break + + def _diff(self, other: CacheKey) -> str: + return ", ".join(self._whats_different(other)) + + def __str__(self) -> str: + stack: List[Union[Tuple[Any, ...], HasCacheKey]] = [self.key] + + output = [] + sentinel = object() + indent = -1 + while stack: + elem = stack.pop(0) + if elem is sentinel: + output.append((" " * (indent * 2)) + "),") + indent -= 1 + elif isinstance(elem, tuple): + if not elem: + output.append((" " * ((indent + 1) * 2)) + "()") + else: + indent += 1 + stack = list(elem) + [sentinel] + stack + output.append((" " * (indent * 2)) + "(") + else: + if isinstance(elem, HasCacheKey): + repr_ = "<%s object at %s>" % ( + type(elem).__name__, + hex(id(elem)), + ) + else: + repr_ = repr(elem) + output.append((" " * (indent * 2)) + " " + repr_ + ", ") + + return "CacheKey(key=%s)" % ("\n".join(output),) + + def _generate_param_dict(self) -> Dict[str, Any]: + """used for testing""" + + _anon_map = prefix_anon_map() + return {b.key % _anon_map: b.effective_value for b in self.bindparams} + + @util.preload_module("sqlalchemy.sql.elements") + def _apply_params_to_element( + self, original_cache_key: CacheKey, target_element: ColumnElement[Any] + ) -> ColumnElement[Any]: + if target_element._is_immutable or original_cache_key is self: + return target_element + + elements = util.preloaded.sql_elements + return elements._OverrideBinds( + target_element, self.bindparams, original_cache_key.bindparams + ) + + +def _ad_hoc_cache_key_from_args( + tokens: Tuple[Any, ...], + traverse_args: Iterable[Tuple[str, InternalTraversal]], + args: Iterable[Any], +) -> Tuple[Any, ...]: + """a quick cache key generator used by reflection.flexi_cache.""" + bindparams: List[BindParameter[Any]] = [] + + _anon_map = anon_map() + + tup = tokens + + for (attrname, sym), arg in zip(traverse_args, args): + key = sym.name + visit_key = key.replace("dp_", "visit_") + + if arg is None: + tup += (attrname, None) + continue + + meth = getattr(_cache_key_traversal_visitor, visit_key) + if meth is CACHE_IN_PLACE: + tup += (attrname, arg) + elif meth in ( + CALL_GEN_CACHE_KEY, + STATIC_CACHE_KEY, + ANON_NAME, + PROPAGATE_ATTRS, + ): + raise NotImplementedError( + f"Haven't implemented symbol {meth} for ad-hoc key from args" + ) + else: + tup += meth(attrname, arg, None, _anon_map, bindparams) + return tup + + +class _CacheKeyTraversal(HasTraversalDispatch): + # very common elements are inlined into the main _get_cache_key() method + # to produce a dramatic savings in Python function call overhead + + visit_has_cache_key = visit_clauseelement = CALL_GEN_CACHE_KEY + visit_clauseelement_list = InternalTraversal.dp_clauseelement_list + visit_annotations_key = InternalTraversal.dp_annotations_key + visit_clauseelement_tuple = InternalTraversal.dp_clauseelement_tuple + visit_memoized_select_entities = ( + InternalTraversal.dp_memoized_select_entities + ) + + visit_string = visit_boolean = visit_operator = visit_plain_obj = ( + CACHE_IN_PLACE + ) + visit_statement_hint_list = CACHE_IN_PLACE + visit_type = STATIC_CACHE_KEY + visit_anon_name = ANON_NAME + + visit_propagate_attrs = PROPAGATE_ATTRS + + def visit_with_context_options( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return tuple((fn.__code__, c_key) for fn, c_key in obj) + + def visit_inspectable( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return (attrname, inspect(obj)._gen_cache_key(anon_map, bindparams)) + + def visit_string_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return tuple(obj) + + def visit_multi( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + ( + obj._gen_cache_key(anon_map, bindparams) + if isinstance(obj, HasCacheKey) + else obj + ), + ) + + def visit_multi_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + elem._gen_cache_key(anon_map, bindparams) + if isinstance(elem, HasCacheKey) + else elem + ) + for elem in obj + ), + ) + + def visit_has_cache_key_tuples( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple( + tuple( + elem._gen_cache_key(anon_map, bindparams) + for elem in tup_elem + ) + for tup_elem in obj + ), + ) + + def visit_has_cache_key_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple(elem._gen_cache_key(anon_map, bindparams) for elem in obj), + ) + + def visit_executable_options( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple( + elem._gen_cache_key(anon_map, bindparams) + for elem in obj + if elem._is_has_cache_key + ), + ) + + def visit_inspectable_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return self.visit_has_cache_key_list( + attrname, [inspect(o) for o in obj], parent, anon_map, bindparams + ) + + def visit_clauseelement_tuples( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return self.visit_has_cache_key_tuples( + attrname, obj, parent, anon_map, bindparams + ) + + def visit_fromclause_ordered_set( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple([elem._gen_cache_key(anon_map, bindparams) for elem in obj]), + ) + + def visit_clauseelement_unordered_set( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + cache_keys = [ + elem._gen_cache_key(anon_map, bindparams) for elem in obj + ] + return ( + attrname, + tuple( + sorted(cache_keys) + ), # cache keys all start with (id_, class) + ) + + def visit_named_ddl_element( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return (attrname, obj.name) + + def visit_prefix_sequence( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + + return ( + attrname, + tuple( + [ + (clause._gen_cache_key(anon_map, bindparams), strval) + for clause, strval in obj + ] + ), + ) + + def visit_setup_join_tuple( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return tuple( + ( + target._gen_cache_key(anon_map, bindparams), + ( + onclause._gen_cache_key(anon_map, bindparams) + if onclause is not None + else None + ), + ( + from_._gen_cache_key(anon_map, bindparams) + if from_ is not None + else None + ), + tuple([(key, flags[key]) for key in sorted(flags)]), + ) + for (target, onclause, from_, flags) in obj + ) + + def visit_table_hint_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + + return ( + attrname, + tuple( + [ + ( + clause._gen_cache_key(anon_map, bindparams), + dialect_name, + text, + ) + for (clause, dialect_name), text in obj.items() + ] + ), + ) + + def visit_plain_dict( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return (attrname, tuple([(key, obj[key]) for key in sorted(obj)])) + + def visit_dialect_options( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + dialect_name, + tuple( + [ + (key, obj[dialect_name][key]) + for key in sorted(obj[dialect_name]) + ] + ), + ) + for dialect_name in sorted(obj) + ), + ) + + def visit_string_clauseelement_dict( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + (key, obj[key]._gen_cache_key(anon_map, bindparams)) + for key in sorted(obj) + ), + ) + + def visit_string_multi_dict( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + key, + ( + value._gen_cache_key(anon_map, bindparams) + if isinstance(value, HasCacheKey) + else value + ), + ) + for key, value in [(key, obj[key]) for key in sorted(obj)] + ), + ) + + def visit_fromclause_canonical_column_collection( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + # inlining into the internals of ColumnCollection + return ( + attrname, + tuple( + col._gen_cache_key(anon_map, bindparams) + for k, col, _ in obj._collection + ), + ) + + def visit_unknown_structure( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + anon_map[NO_CACHE] = True + return () + + def visit_dml_ordered_values( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + ( + key._gen_cache_key(anon_map, bindparams) + if hasattr(key, "__clause_element__") + else key + ), + value._gen_cache_key(anon_map, bindparams), + ) + for key, value in obj + ), + ) + + def visit_dml_values( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + # in py37 we can assume two dictionaries created in the same + # insert ordering will retain that sorting + return ( + attrname, + tuple( + ( + ( + k._gen_cache_key(anon_map, bindparams) + if hasattr(k, "__clause_element__") + else k + ), + obj[k]._gen_cache_key(anon_map, bindparams), + ) + for k in obj + ), + ) + + def visit_dml_multi_values( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + # multivalues are simply not cacheable right now + anon_map[NO_CACHE] = True + return () + + +_cache_key_traversal_visitor = _CacheKeyTraversal() diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py new file mode 100644 index 00000000..0c998c66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py @@ -0,0 +1,1405 @@ +# sql/coercions.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +from __future__ import annotations + +import collections.abc as collections_abc +import numbers +import re +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import operators +from . import roles +from . import visitors +from ._typing import is_from_clause +from .base import ExecutableOption +from .base import Options +from .cache_key import HasCacheKey +from .visitors import Visitable +from .. import exc +from .. import inspection +from .. import util +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + # elements lambdas schema selectable are set by __init__ + from . import elements + from . import lambdas + from . import schema + from . import selectable + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnsClauseArgument + from ._typing import _DDLColumnArgument + from ._typing import _DMLTableArgument + from ._typing import _FromClauseArgument + from .dml import _DMLTableElement + from .elements import BindParameter + from .elements import ClauseElement + from .elements import ColumnClause + from .elements import ColumnElement + from .elements import DQLDMLClauseElement + from .elements import NamedColumn + from .elements import SQLCoreOperations + from .schema import Column + from .selectable import _ColumnsClauseElement + from .selectable import _JoinTargetProtocol + from .selectable import FromClause + from .selectable import HasCTE + from .selectable import SelectBase + from .selectable import Subquery + from .visitors import _TraverseCallableType + +_SR = TypeVar("_SR", bound=roles.SQLRole) +_F = TypeVar("_F", bound=Callable[..., Any]) +_StringOnlyR = TypeVar("_StringOnlyR", bound=roles.StringRole) +_T = TypeVar("_T", bound=Any) + + +def _is_literal(element): + """Return whether or not the element is a "literal" in the context + of a SQL expression construct. + + """ + + return not isinstance( + element, + (Visitable, schema.SchemaEventTarget), + ) and not hasattr(element, "__clause_element__") + + +def _deep_is_literal(element): + """Return whether or not the element is a "literal" in the context + of a SQL expression construct. + + does a deeper more esoteric check than _is_literal. is used + for lambda elements that have to distinguish values that would + be bound vs. not without any context. + + """ + + if isinstance(element, collections_abc.Sequence) and not isinstance( + element, str + ): + for elem in element: + if not _deep_is_literal(elem): + return False + else: + return True + + return ( + not isinstance( + element, + ( + Visitable, + schema.SchemaEventTarget, + HasCacheKey, + Options, + util.langhelpers.symbol, + ), + ) + and not hasattr(element, "__clause_element__") + and ( + not isinstance(element, type) + or not issubclass(element, HasCacheKey) + ) + ) + + +def _document_text_coercion( + paramname: str, meth_rst: str, param_rst: str +) -> Callable[[_F], _F]: + return util.add_parameter_text( + paramname, + ( + ".. warning:: " + "The %s argument to %s can be passed as a Python string argument, " + "which will be treated " + "as **trusted SQL text** and rendered as given. **DO NOT PASS " + "UNTRUSTED INPUT TO THIS PARAMETER**." + ) + % (param_rst, meth_rst), + ) + + +def _expression_collection_was_a_list( + attrname: str, + fnname: str, + args: Union[Sequence[_T], Sequence[Sequence[_T]]], +) -> Sequence[_T]: + if args and isinstance(args[0], (list, set, dict)) and len(args) == 1: + if isinstance(args[0], list): + raise exc.ArgumentError( + f'The "{attrname}" argument to {fnname}(), when ' + "referring to a sequence " + "of items, is now passed as a series of positional " + "elements, rather than as a list. " + ) + return cast("Sequence[_T]", args[0]) + + return cast("Sequence[_T]", args) + + +@overload +def expect( + role: Type[roles.TruncatedLabelRole], + element: Any, + **kw: Any, +) -> str: ... + + +@overload +def expect( + role: Type[roles.DMLColumnRole], + element: Any, + *, + as_key: Literal[True] = ..., + **kw: Any, +) -> str: ... + + +@overload +def expect( + role: Type[roles.LiteralValueRole], + element: Any, + **kw: Any, +) -> BindParameter[Any]: ... + + +@overload +def expect( + role: Type[roles.DDLReferredColumnRole], + element: Any, + **kw: Any, +) -> Column[Any]: ... + + +@overload +def expect( + role: Type[roles.DDLConstraintColumnRole], + element: Any, + **kw: Any, +) -> Union[Column[Any], str]: ... + + +@overload +def expect( + role: Type[roles.StatementOptionRole], + element: Any, + **kw: Any, +) -> DQLDMLClauseElement: ... + + +@overload +def expect( + role: Type[roles.LabeledColumnExprRole[Any]], + element: _ColumnExpressionArgument[_T], + **kw: Any, +) -> NamedColumn[_T]: ... + + +@overload +def expect( + role: Union[ + Type[roles.ExpressionElementRole[Any]], + Type[roles.LimitOffsetRole], + Type[roles.WhereHavingRole], + ], + element: _ColumnExpressionArgument[_T], + **kw: Any, +) -> ColumnElement[_T]: ... + + +@overload +def expect( + role: Union[ + Type[roles.ExpressionElementRole[Any]], + Type[roles.LimitOffsetRole], + Type[roles.WhereHavingRole], + Type[roles.OnClauseRole], + Type[roles.ColumnArgumentRole], + ], + element: Any, + **kw: Any, +) -> ColumnElement[Any]: ... + + +@overload +def expect( + role: Type[roles.DMLTableRole], + element: _DMLTableArgument, + **kw: Any, +) -> _DMLTableElement: ... + + +@overload +def expect( + role: Type[roles.HasCTERole], + element: HasCTE, + **kw: Any, +) -> HasCTE: ... + + +@overload +def expect( + role: Type[roles.SelectStatementRole], + element: SelectBase, + **kw: Any, +) -> SelectBase: ... + + +@overload +def expect( + role: Type[roles.FromClauseRole], + element: _FromClauseArgument, + **kw: Any, +) -> FromClause: ... + + +@overload +def expect( + role: Type[roles.FromClauseRole], + element: SelectBase, + *, + explicit_subquery: Literal[True] = ..., + **kw: Any, +) -> Subquery: ... + + +@overload +def expect( + role: Type[roles.ColumnsClauseRole], + element: _ColumnsClauseArgument[Any], + **kw: Any, +) -> _ColumnsClauseElement: ... + + +@overload +def expect( + role: Type[roles.JoinTargetRole], + element: _JoinTargetProtocol, + **kw: Any, +) -> _JoinTargetProtocol: ... + + +# catchall for not-yet-implemented overloads +@overload +def expect( + role: Type[_SR], + element: Any, + **kw: Any, +) -> Any: ... + + +def expect( + role: Type[_SR], + element: Any, + *, + apply_propagate_attrs: Optional[ClauseElement] = None, + argname: Optional[str] = None, + post_inspect: bool = False, + disable_inspection: bool = False, + **kw: Any, +) -> Any: + if ( + role.allows_lambda + # note callable() will not invoke a __getattr__() method, whereas + # hasattr(obj, "__call__") will. by keeping the callable() check here + # we prevent most needless calls to hasattr() and therefore + # __getattr__(), which is present on ColumnElement. + and callable(element) + and hasattr(element, "__code__") + ): + return lambdas.LambdaElement( + element, + role, + lambdas.LambdaOptions(**kw), + apply_propagate_attrs=apply_propagate_attrs, + ) + + # major case is that we are given a ClauseElement already, skip more + # elaborate logic up front if possible + impl = _impl_lookup[role] + + original_element = element + + if not isinstance( + element, + ( + elements.CompilerElement, + schema.SchemaItem, + schema.FetchedValue, + lambdas.PyWrapper, + ), + ): + resolved = None + + if impl._resolve_literal_only: + resolved = impl._literal_coercion(element, **kw) + else: + original_element = element + + is_clause_element = False + + # this is a special performance optimization for ORM + # joins used by JoinTargetImpl that we don't go through the + # work of creating __clause_element__() when we only need the + # original QueryableAttribute, as the former will do clause + # adaption and all that which is just thrown away here. + if ( + impl._skip_clauseelement_for_target_match + and isinstance(element, role) + and hasattr(element, "__clause_element__") + ): + is_clause_element = True + else: + while hasattr(element, "__clause_element__"): + is_clause_element = True + + if not getattr(element, "is_clause_element", False): + element = element.__clause_element__() + else: + break + + if not is_clause_element: + if impl._use_inspection and not disable_inspection: + insp = inspection.inspect(element, raiseerr=False) + if insp is not None: + if post_inspect: + insp._post_inspect + try: + resolved = insp.__clause_element__() + except AttributeError: + impl._raise_for_expected(original_element, argname) + + if resolved is None: + resolved = impl._literal_coercion( + element, argname=argname, **kw + ) + else: + resolved = element + elif isinstance(element, lambdas.PyWrapper): + resolved = element._sa__py_wrapper_literal(**kw) + else: + resolved = element + + if apply_propagate_attrs is not None: + if typing.TYPE_CHECKING: + assert isinstance(resolved, (SQLCoreOperations, ClauseElement)) + + if not apply_propagate_attrs._propagate_attrs and getattr( + resolved, "_propagate_attrs", None + ): + apply_propagate_attrs._propagate_attrs = resolved._propagate_attrs + + if impl._role_class in resolved.__class__.__mro__: + if impl._post_coercion: + resolved = impl._post_coercion( + resolved, + argname=argname, + original_element=original_element, + **kw, + ) + return resolved + else: + return impl._implicit_coercions( + original_element, resolved, argname=argname, **kw + ) + + +def expect_as_key( + role: Type[roles.DMLColumnRole], element: Any, **kw: Any +) -> str: + kw.pop("as_key", None) + return expect(role, element, as_key=True, **kw) + + +def expect_col_expression_collection( + role: Type[roles.DDLConstraintColumnRole], + expressions: Iterable[_DDLColumnArgument], +) -> Iterator[ + Tuple[ + Union[str, Column[Any]], + Optional[ColumnClause[Any]], + Optional[str], + Optional[Union[Column[Any], str]], + ] +]: + for expr in expressions: + strname = None + column = None + + resolved: Union[Column[Any], str] = expect(role, expr) + if isinstance(resolved, str): + assert isinstance(expr, str) + strname = resolved = expr + else: + cols: List[Column[Any]] = [] + col_append: _TraverseCallableType[Column[Any]] = cols.append + visitors.traverse(resolved, {}, {"column": col_append}) + if cols: + column = cols[0] + add_element = column if column is not None else strname + + yield resolved, column, strname, add_element + + +class RoleImpl: + __slots__ = ("_role_class", "name", "_use_inspection") + + def _literal_coercion(self, element, **kw): + raise NotImplementedError() + + _post_coercion: Any = None + _resolve_literal_only = False + _skip_clauseelement_for_target_match = False + + def __init__(self, role_class): + self._role_class = role_class + self.name = role_class._role_name + self._use_inspection = issubclass(role_class, roles.UsesInspection) + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + self._raise_for_expected(element, argname, resolved) + + def _raise_for_expected( + self, + element: Any, + argname: Optional[str] = None, + resolved: Optional[Any] = None, + *, + advice: Optional[str] = None, + code: Optional[str] = None, + err: Optional[Exception] = None, + **kw: Any, + ) -> NoReturn: + if resolved is not None and resolved is not element: + got = "%r object resolved from %r object" % (resolved, element) + else: + got = repr(element) + + if argname: + msg = "%s expected for argument %r; got %s." % ( + self.name, + argname, + got, + ) + else: + msg = "%s expected, got %s." % (self.name, got) + + if advice: + msg += " " + advice + + raise exc.ArgumentError(msg, code=code) from err + + +class _Deannotate: + __slots__ = () + + def _post_coercion(self, resolved, **kw): + from .util import _deep_deannotate + + return _deep_deannotate(resolved) + + +class _StringOnly: + __slots__ = () + + _resolve_literal_only = True + + +class _ReturnsStringKey(RoleImpl): + __slots__ = () + + def _implicit_coercions(self, element, resolved, argname=None, **kw): + if isinstance(element, str): + return element + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + return element + + +class _ColumnCoercions(RoleImpl): + __slots__ = () + + def _warn_for_scalar_subquery_coercion(self): + util.warn( + "implicitly coercing SELECT object to scalar subquery; " + "please use the .scalar_subquery() method to produce a scalar " + "subquery.", + ) + + def _implicit_coercions(self, element, resolved, argname=None, **kw): + original_element = element + if not getattr(resolved, "is_clause_element", False): + self._raise_for_expected(original_element, argname, resolved) + elif resolved._is_select_base: + self._warn_for_scalar_subquery_coercion() + return resolved.scalar_subquery() + elif resolved._is_from_clause and isinstance( + resolved, selectable.Subquery + ): + self._warn_for_scalar_subquery_coercion() + return resolved.element.scalar_subquery() + elif self._role_class.allows_lambda and resolved._is_lambda_element: + return resolved + else: + self._raise_for_expected(original_element, argname, resolved) + + +def _no_text_coercion( + element: Any, + argname: Optional[str] = None, + exc_cls: Type[exc.SQLAlchemyError] = exc.ArgumentError, + extra: Optional[str] = None, + err: Optional[Exception] = None, +) -> NoReturn: + raise exc_cls( + "%(extra)sTextual SQL expression %(expr)r %(argname)sshould be " + "explicitly declared as text(%(expr)r)" + % { + "expr": util.ellipses_string(element), + "argname": "for argument %s" % (argname,) if argname else "", + "extra": "%s " % extra if extra else "", + } + ) from err + + +class _NoTextCoercion(RoleImpl): + __slots__ = () + + def _literal_coercion(self, element, *, argname=None, **kw): + if isinstance(element, str) and issubclass( + elements.TextClause, self._role_class + ): + _no_text_coercion(element, argname) + else: + self._raise_for_expected(element, argname) + + +class _CoerceLiterals(RoleImpl): + __slots__ = () + _coerce_consts = False + _coerce_star = False + _coerce_numerics = False + + def _text_coercion(self, element, argname=None): + return _no_text_coercion(element, argname) + + def _literal_coercion(self, element, *, argname=None, **kw): + if isinstance(element, str): + if self._coerce_star and element == "*": + return elements.ColumnClause("*", is_literal=True) + else: + return self._text_coercion(element, argname, **kw) + + if self._coerce_consts: + if element is None: + return elements.Null() + elif element is False: + return elements.False_() + elif element is True: + return elements.True_() + + if self._coerce_numerics and isinstance(element, (numbers.Number)): + return elements.ColumnClause(str(element), is_literal=True) + + self._raise_for_expected(element, argname) + + +class LiteralValueImpl(RoleImpl): + _resolve_literal_only = True + + def _implicit_coercions( + self, + element, + resolved, + argname=None, + *, + type_=None, + literal_execute=False, + **kw, + ): + if not _is_literal(resolved): + self._raise_for_expected( + element, resolved=resolved, argname=argname, **kw + ) + + return elements.BindParameter( + None, + element, + type_=type_, + unique=True, + literal_execute=literal_execute, + ) + + def _literal_coercion(self, element, **kw): + return element + + +class _SelectIsNotFrom(RoleImpl): + __slots__ = () + + def _raise_for_expected( + self, + element: Any, + argname: Optional[str] = None, + resolved: Optional[Any] = None, + *, + advice: Optional[str] = None, + code: Optional[str] = None, + err: Optional[Exception] = None, + **kw: Any, + ) -> NoReturn: + if ( + not advice + and isinstance(element, roles.SelectStatementRole) + or isinstance(resolved, roles.SelectStatementRole) + ): + advice = ( + "To create a " + "FROM clause from a %s object, use the .subquery() method." + % (resolved.__class__ if resolved is not None else element,) + ) + code = "89ve" + else: + code = None + + super()._raise_for_expected( + element, + argname=argname, + resolved=resolved, + advice=advice, + code=code, + err=err, + **kw, + ) + # never reached + assert False + + +class HasCacheKeyImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(element, HasCacheKey): + return element + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + return element + + +class ExecutableOptionImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(element, ExecutableOption): + return element + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + return element + + +class ExpressionElementImpl(_ColumnCoercions, RoleImpl): + __slots__ = () + + def _literal_coercion( + self, element, *, name=None, type_=None, is_crud=False, **kw + ): + if ( + element is None + and not is_crud + and (type_ is None or not type_.should_evaluate_none) + ): + # TODO: there's no test coverage now for the + # "should_evaluate_none" part of this, as outside of "crud" this + # codepath is not normally used except in some special cases + return elements.Null() + else: + try: + return elements.BindParameter( + name, element, type_, unique=True, _is_crud=is_crud + ) + except exc.ArgumentError as err: + self._raise_for_expected(element, err=err) + + def _raise_for_expected(self, element, argname=None, resolved=None, **kw): + # select uses implicit coercion with warning instead of raising + if isinstance(element, selectable.Values): + advice = ( + "To create a column expression from a VALUES clause, " + "use the .scalar_values() method." + ) + elif isinstance(element, roles.AnonymizedFromClauseRole): + advice = ( + "To create a column expression from a FROM clause row " + "as a whole, use the .table_valued() method." + ) + else: + advice = None + + return super()._raise_for_expected( + element, argname=argname, resolved=resolved, advice=advice, **kw + ) + + +class BinaryElementImpl(ExpressionElementImpl, RoleImpl): + __slots__ = () + + def _literal_coercion( # type: ignore[override] + self, + element, + *, + expr, + operator, + bindparam_type=None, + argname=None, + **kw, + ): + try: + return expr._bind_param(operator, element, type_=bindparam_type) + except exc.ArgumentError as err: + self._raise_for_expected(element, err=err) + + def _post_coercion(self, resolved, *, expr, bindparam_type=None, **kw): + if resolved.type._isnull and not expr.type._isnull: + resolved = resolved._with_binary_element_type( + bindparam_type if bindparam_type is not None else expr.type + ) + return resolved + + +class InElementImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_from_clause: + if ( + isinstance(resolved, selectable.Alias) + and resolved.element._is_select_base + ): + self._warn_for_implicit_coercion(resolved) + return self._post_coercion(resolved.element, **kw) + else: + self._warn_for_implicit_coercion(resolved) + return self._post_coercion(resolved.select(), **kw) + else: + self._raise_for_expected(element, argname, resolved) + + def _warn_for_implicit_coercion(self, elem): + util.warn( + "Coercing %s object into a select() for use in IN(); " + "please pass a select() construct explicitly" + % (elem.__class__.__name__) + ) + + def _literal_coercion( # type: ignore[override] + self, element, *, expr, operator, **kw + ): + if util.is_non_string_iterable(element): + non_literal_expressions: Dict[ + Optional[operators.ColumnOperators], + operators.ColumnOperators, + ] = {} + element = list(element) + for o in element: + if not _is_literal(o): + if not isinstance(o, operators.ColumnOperators): + self._raise_for_expected(element, **kw) + + else: + non_literal_expressions[o] = o + elif o is None: + non_literal_expressions[o] = elements.Null() + + if non_literal_expressions: + return elements.ClauseList( + *[ + ( + non_literal_expressions[o] + if o in non_literal_expressions + else expr._bind_param(operator, o) + ) + for o in element + ] + ) + else: + return expr._bind_param(operator, element, expanding=True) + + else: + self._raise_for_expected(element, **kw) + + def _post_coercion(self, element, *, expr, operator, **kw): + if element._is_select_base: + # for IN, we are doing scalar_subquery() coercion without + # a warning + return element.scalar_subquery() + elif isinstance(element, elements.ClauseList): + assert not len(element.clauses) == 0 + return element.self_group(against=operator) + + elif isinstance(element, elements.BindParameter): + element = element._clone(maintain_key=True) + element.expanding = True + element.expand_op = operator + + return element + elif isinstance(element, selectable.Values): + return element.scalar_values() + else: + return element + + +class OnClauseImpl(_ColumnCoercions, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _literal_coercion(self, element, **kw): + self._raise_for_expected(element) + + def _post_coercion(self, resolved, *, original_element=None, **kw): + # this is a hack right now as we want to use coercion on an + # ORM InstrumentedAttribute, but we want to return the object + # itself if it is one, not its clause element. + # ORM context _join and _legacy_join() would need to be improved + # to look for annotations in a clause element form. + if isinstance(original_element, roles.JoinTargetRole): + return original_element + return resolved + + +class WhereHavingImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + return _no_text_coercion(element, argname) + + +class StatementOptionImpl(_CoerceLiterals, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + return elements.TextClause(element) + + +class ColumnArgumentImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + +class ColumnArgumentOrKeyImpl(_ReturnsStringKey, RoleImpl): + __slots__ = () + + +class StrAsPlainColumnImpl(_CoerceLiterals, RoleImpl): + __slots__ = () + + def _text_coercion(self, element, argname=None): + return elements.ColumnClause(element) + + +class ByOfImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl, roles.ByOfRole): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + return elements._textual_label_reference(element) + + +class OrderByImpl(ByOfImpl, RoleImpl): + __slots__ = () + + def _post_coercion(self, resolved, **kw): + if ( + isinstance(resolved, self._role_class) + and resolved._order_by_label_element is not None + ): + return elements._label_reference(resolved) + else: + return resolved + + +class GroupByImpl(ByOfImpl, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if is_from_clause(resolved): + return elements.ClauseList(*resolved.c) + else: + return resolved + + +class DMLColumnImpl(_ReturnsStringKey, RoleImpl): + __slots__ = () + + def _post_coercion(self, element, *, as_key=False, **kw): + if as_key: + return element.key + else: + return element + + +class ConstExprImpl(RoleImpl): + __slots__ = () + + def _literal_coercion(self, element, *, argname=None, **kw): + if element is None: + return elements.Null() + elif element is False: + return elements.False_() + elif element is True: + return elements.True_() + else: + self._raise_for_expected(element, argname) + + +class TruncatedLabelImpl(_StringOnly, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(element, str): + return resolved + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + """coerce the given value to :class:`._truncated_label`. + + Existing :class:`._truncated_label` and + :class:`._anonymous_label` objects are passed + unchanged. + """ + + if isinstance(element, elements._truncated_label): + return element + else: + return elements._truncated_label(element) + + +class DDLExpressionImpl(_Deannotate, _CoerceLiterals, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + # see #5754 for why we can't easily deprecate this coercion. + # essentially expressions like postgresql_where would have to be + # text() as they come back from reflection and we don't want to + # have text() elements wired into the inspection dictionaries. + return elements.TextClause(element) + + +class DDLConstraintColumnImpl(_Deannotate, _ReturnsStringKey, RoleImpl): + __slots__ = () + + +class DDLReferredColumnImpl(DDLConstraintColumnImpl): + __slots__ = () + + +class LimitOffsetImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved is None: + return None + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion( # type: ignore[override] + self, element, *, name, type_, **kw + ): + if element is None: + return None + else: + value = util.asint(element) + return selectable._OffsetLimitParam( + name, value, type_=type_, unique=True + ) + + +class LabeledColumnExprImpl(ExpressionElementImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(resolved, roles.ExpressionElementRole): + return resolved.label(None) + else: + new = super()._implicit_coercions( + element, resolved, argname=argname, **kw + ) + if isinstance(new, roles.ExpressionElementRole): + return new.label(None) + else: + self._raise_for_expected(element, argname, resolved) + + +class ColumnsClauseImpl(_SelectIsNotFrom, _CoerceLiterals, RoleImpl): + __slots__ = () + + _coerce_consts = True + _coerce_numerics = True + _coerce_star = True + + _guess_straight_column = re.compile(r"^\w\S*$", re.I) + + def _raise_for_expected( + self, element, argname=None, resolved=None, *, advice=None, **kw + ): + if not advice and isinstance(element, list): + advice = ( + f"Did you mean to say select(" + f"{', '.join(repr(e) for e in element)})?" + ) + + return super()._raise_for_expected( + element, argname=argname, resolved=resolved, advice=advice, **kw + ) + + def _text_coercion(self, element, argname=None): + element = str(element) + + guess_is_literal = not self._guess_straight_column.match(element) + raise exc.ArgumentError( + "Textual column expression %(column)r %(argname)sshould be " + "explicitly declared with text(%(column)r), " + "or use %(literal_column)s(%(column)r) " + "for more specificity" + % { + "column": util.ellipses_string(element), + "argname": "for argument %s" % (argname,) if argname else "", + "literal_column": ( + "literal_column" if guess_is_literal else "column" + ), + } + ) + + +class ReturnsRowsImpl(RoleImpl): + __slots__ = () + + +class StatementImpl(_CoerceLiterals, RoleImpl): + __slots__ = () + + def _post_coercion( + self, resolved, *, original_element, argname=None, **kw + ): + if resolved is not original_element and not isinstance( + original_element, str + ): + # use same method as Connection uses; this will later raise + # ObjectNotExecutableError + try: + original_element._execute_on_connection + except AttributeError: + util.warn_deprecated( + "Object %r should not be used directly in a SQL statement " + "context, such as passing to methods such as " + "session.execute(). This usage will be disallowed in a " + "future release. " + "Please use Core select() / update() / delete() etc. " + "with Session.execute() and other statement execution " + "methods." % original_element, + "1.4", + ) + + return resolved + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_lambda_element: + return resolved + else: + return super()._implicit_coercions( + element, resolved, argname=argname, **kw + ) + + +class SelectStatementImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_text_clause: + return resolved.columns() + else: + self._raise_for_expected(element, argname, resolved) + + +class HasCTEImpl(ReturnsRowsImpl): + __slots__ = () + + +class IsCTEImpl(RoleImpl): + __slots__ = () + + +class JoinTargetImpl(RoleImpl): + __slots__ = () + + _skip_clauseelement_for_target_match = True + + def _literal_coercion(self, element, *, argname=None, **kw): + self._raise_for_expected(element, argname) + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + *, + legacy: bool = False, + **kw: Any, + ) -> Any: + if isinstance(element, roles.JoinTargetRole): + # note that this codepath no longer occurs as of + # #6550, unless JoinTargetImpl._skip_clauseelement_for_target_match + # were set to False. + return element + elif legacy and resolved._is_select_base: + util.warn_deprecated( + "Implicit coercion of SELECT and textual SELECT " + "constructs into FROM clauses is deprecated; please call " + ".subquery() on any Core select or ORM Query object in " + "order to produce a subquery object.", + version="1.4", + ) + # TODO: doing _implicit_subquery here causes tests to fail, + # how was this working before? probably that ORM + # join logic treated it as a select and subquery would happen + # in _ORMJoin->Join + return resolved + else: + self._raise_for_expected(element, argname, resolved) + + +class FromClauseImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + *, + explicit_subquery: bool = False, + allow_select: bool = True, + **kw: Any, + ) -> Any: + if resolved._is_select_base: + if explicit_subquery: + return resolved.subquery() + elif allow_select: + util.warn_deprecated( + "Implicit coercion of SELECT and textual SELECT " + "constructs into FROM clauses is deprecated; please call " + ".subquery() on any Core select or ORM Query object in " + "order to produce a subquery object.", + version="1.4", + ) + return resolved._implicit_subquery + elif resolved._is_text_clause: + return resolved + else: + self._raise_for_expected(element, argname, resolved) + + def _post_coercion(self, element, *, deannotate=False, **kw): + if deannotate: + return element._deannotate() + else: + return element + + +class StrictFromClauseImpl(FromClauseImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + *, + allow_select: bool = False, + **kw: Any, + ) -> Any: + if resolved._is_select_base and allow_select: + util.warn_deprecated( + "Implicit coercion of SELECT and textual SELECT constructs " + "into FROM clauses is deprecated; please call .subquery() " + "on any Core select or ORM Query object in order to produce a " + "subquery object.", + version="1.4", + ) + return resolved._implicit_subquery + else: + self._raise_for_expected(element, argname, resolved) + + +class AnonymizedFromClauseImpl(StrictFromClauseImpl): + __slots__ = () + + def _post_coercion(self, element, *, flat=False, name=None, **kw): + assert name is None + + return element._anonymous_fromclause(flat=flat) + + +class DMLTableImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): + __slots__ = () + + def _post_coercion(self, element, **kw): + if "dml_table" in element._annotations: + return element._annotations["dml_table"] + else: + return element + + +class DMLSelectImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_from_clause: + if ( + isinstance(resolved, selectable.Alias) + and resolved.element._is_select_base + ): + return resolved.element + else: + return resolved.select() + else: + self._raise_for_expected(element, argname, resolved) + + +class CompoundElementImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + def _raise_for_expected(self, element, argname=None, resolved=None, **kw): + if isinstance(element, roles.FromClauseRole): + if element._is_subquery: + advice = ( + "Use the plain select() object without " + "calling .subquery() or .alias()." + ) + else: + advice = ( + "To SELECT from any FROM clause, use the .select() method." + ) + else: + advice = None + return super()._raise_for_expected( + element, argname=argname, resolved=resolved, advice=advice, **kw + ) + + +_impl_lookup = {} + + +for name in dir(roles): + cls = getattr(roles, name) + if name.endswith("Role"): + name = name.replace("Role", "Impl") + if name in globals(): + impl = globals()[name](cls) + _impl_lookup[cls] = impl + +if not TYPE_CHECKING: + ee_impl = _impl_lookup[roles.ExpressionElementRole] + + for py_type in (int, bool, str, float): + _impl_lookup[roles.ExpressionElementRole[py_type]] = ee_impl diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py new file mode 100644 index 00000000..634e5ce1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py @@ -0,0 +1,7818 @@ +# sql/compiler.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Base SQL and DDL compiler implementations. + +Classes provided include: + +:class:`.compiler.SQLCompiler` - renders SQL +strings + +:class:`.compiler.DDLCompiler` - renders DDL +(data definition language) strings + +:class:`.compiler.GenericTypeCompiler` - renders +type specification strings. + +To generate user-defined SQL strings, see +:doc:`/ext/compiler`. + +""" +from __future__ import annotations + +import collections +import collections.abc as collections_abc +import contextlib +from enum import IntEnum +import functools +import itertools +import operator +import re +from time import perf_counter +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import FrozenSet +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from . import base +from . import coercions +from . import crud +from . import elements +from . import functions +from . import operators +from . import roles +from . import schema +from . import selectable +from . import sqltypes +from . import util as sql_util +from ._typing import is_column_element +from ._typing import is_dml +from .base import _de_clone +from .base import _from_objects +from .base import _NONE_NAME +from .base import _SentinelDefaultCharacterization +from .base import Executable +from .base import NO_ARG +from .elements import ClauseElement +from .elements import quoted_name +from .schema import Column +from .sqltypes import TupleType +from .type_api import TypeEngine +from .visitors import prefix_anon_map +from .visitors import Visitable +from .. import exc +from .. import util +from ..util import FastIntFlag +from ..util.typing import Literal +from ..util.typing import Protocol +from ..util.typing import TypedDict + +if typing.TYPE_CHECKING: + from .annotation import _AnnotationDict + from .base import _AmbiguousTableNameMap + from .base import CompileState + from .cache_key import CacheKey + from .ddl import ExecutableDDLElement + from .dml import Insert + from .dml import UpdateBase + from .dml import ValuesBase + from .elements import _truncated_label + from .elements import BindParameter + from .elements import ColumnClause + from .elements import ColumnElement + from .elements import Label + from .functions import Function + from .schema import Table + from .selectable import AliasedReturnsRows + from .selectable import CompoundSelectState + from .selectable import CTE + from .selectable import FromClause + from .selectable import NamedFromClause + from .selectable import ReturnsRows + from .selectable import Select + from .selectable import SelectState + from .type_api import _BindProcessorType + from ..engine.cursor import CursorResultMetaData + from ..engine.interfaces import _CoreSingleExecuteParams + from ..engine.interfaces import _DBAPIAnyExecuteParams + from ..engine.interfaces import _DBAPIMultiExecuteParams + from ..engine.interfaces import _DBAPISingleExecuteParams + from ..engine.interfaces import _ExecuteOptions + from ..engine.interfaces import _GenericSetInputSizesType + from ..engine.interfaces import _MutableCoreSingleExecuteParams + from ..engine.interfaces import Dialect + from ..engine.interfaces import SchemaTranslateMapType + +_FromHintsType = Dict["FromClause", str] + +RESERVED_WORDS = { + "all", + "analyse", + "analyze", + "and", + "any", + "array", + "as", + "asc", + "asymmetric", + "authorization", + "between", + "binary", + "both", + "case", + "cast", + "check", + "collate", + "column", + "constraint", + "create", + "cross", + "current_date", + "current_role", + "current_time", + "current_timestamp", + "current_user", + "default", + "deferrable", + "desc", + "distinct", + "do", + "else", + "end", + "except", + "false", + "for", + "foreign", + "freeze", + "from", + "full", + "grant", + "group", + "having", + "ilike", + "in", + "initially", + "inner", + "intersect", + "into", + "is", + "isnull", + "join", + "leading", + "left", + "like", + "limit", + "localtime", + "localtimestamp", + "natural", + "new", + "not", + "notnull", + "null", + "off", + "offset", + "old", + "on", + "only", + "or", + "order", + "outer", + "overlaps", + "placing", + "primary", + "references", + "right", + "select", + "session_user", + "set", + "similar", + "some", + "symmetric", + "table", + "then", + "to", + "trailing", + "true", + "union", + "unique", + "user", + "using", + "verbose", + "when", + "where", +} + +LEGAL_CHARACTERS = re.compile(r"^[A-Z0-9_$]+$", re.I) +LEGAL_CHARACTERS_PLUS_SPACE = re.compile(r"^[A-Z0-9_ $]+$", re.I) +ILLEGAL_INITIAL_CHARACTERS = {str(x) for x in range(0, 10)}.union(["$"]) + +FK_ON_DELETE = re.compile( + r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I +) +FK_ON_UPDATE = re.compile( + r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I +) +FK_INITIALLY = re.compile(r"^(?:DEFERRED|IMMEDIATE)$", re.I) +BIND_PARAMS = re.compile(r"(? ", + operators.ge: " >= ", + operators.eq: " = ", + operators.is_distinct_from: " IS DISTINCT FROM ", + operators.is_not_distinct_from: " IS NOT DISTINCT FROM ", + operators.concat_op: " || ", + operators.match_op: " MATCH ", + operators.not_match_op: " NOT MATCH ", + operators.in_op: " IN ", + operators.not_in_op: " NOT IN ", + operators.comma_op: ", ", + operators.from_: " FROM ", + operators.as_: " AS ", + operators.is_: " IS ", + operators.is_not: " IS NOT ", + operators.collate: " COLLATE ", + # unary + operators.exists: "EXISTS ", + operators.distinct_op: "DISTINCT ", + operators.inv: "NOT ", + operators.any_op: "ANY ", + operators.all_op: "ALL ", + # modifiers + operators.desc_op: " DESC", + operators.asc_op: " ASC", + operators.nulls_first_op: " NULLS FIRST", + operators.nulls_last_op: " NULLS LAST", + # bitwise + operators.bitwise_xor_op: " ^ ", + operators.bitwise_or_op: " | ", + operators.bitwise_and_op: " & ", + operators.bitwise_not_op: "~", + operators.bitwise_lshift_op: " << ", + operators.bitwise_rshift_op: " >> ", +} + +FUNCTIONS: Dict[Type[Function[Any]], str] = { + functions.coalesce: "coalesce", + functions.current_date: "CURRENT_DATE", + functions.current_time: "CURRENT_TIME", + functions.current_timestamp: "CURRENT_TIMESTAMP", + functions.current_user: "CURRENT_USER", + functions.localtime: "LOCALTIME", + functions.localtimestamp: "LOCALTIMESTAMP", + functions.random: "random", + functions.sysdate: "sysdate", + functions.session_user: "SESSION_USER", + functions.user: "USER", + functions.cube: "CUBE", + functions.rollup: "ROLLUP", + functions.grouping_sets: "GROUPING SETS", +} + + +EXTRACT_MAP = { + "month": "month", + "day": "day", + "year": "year", + "second": "second", + "hour": "hour", + "doy": "doy", + "minute": "minute", + "quarter": "quarter", + "dow": "dow", + "week": "week", + "epoch": "epoch", + "milliseconds": "milliseconds", + "microseconds": "microseconds", + "timezone_hour": "timezone_hour", + "timezone_minute": "timezone_minute", +} + +COMPOUND_KEYWORDS = { + selectable._CompoundSelectKeyword.UNION: "UNION", + selectable._CompoundSelectKeyword.UNION_ALL: "UNION ALL", + selectable._CompoundSelectKeyword.EXCEPT: "EXCEPT", + selectable._CompoundSelectKeyword.EXCEPT_ALL: "EXCEPT ALL", + selectable._CompoundSelectKeyword.INTERSECT: "INTERSECT", + selectable._CompoundSelectKeyword.INTERSECT_ALL: "INTERSECT ALL", +} + + +class ResultColumnsEntry(NamedTuple): + """Tracks a column expression that is expected to be represented + in the result rows for this statement. + + This normally refers to the columns clause of a SELECT statement + but may also refer to a RETURNING clause, as well as for dialect-specific + emulations. + + """ + + keyname: str + """string name that's expected in cursor.description""" + + name: str + """column name, may be labeled""" + + objects: Tuple[Any, ...] + """sequence of objects that should be able to locate this column + in a RowMapping. This is typically string names and aliases + as well as Column objects. + + """ + + type: TypeEngine[Any] + """Datatype to be associated with this column. This is where + the "result processing" logic directly links the compiled statement + to the rows that come back from the cursor. + + """ + + +class _ResultMapAppender(Protocol): + def __call__( + self, + keyname: str, + name: str, + objects: Sequence[Any], + type_: TypeEngine[Any], + ) -> None: ... + + +# integer indexes into ResultColumnsEntry used by cursor.py. +# some profiling showed integer access faster than named tuple +RM_RENDERED_NAME: Literal[0] = 0 +RM_NAME: Literal[1] = 1 +RM_OBJECTS: Literal[2] = 2 +RM_TYPE: Literal[3] = 3 + + +class _BaseCompilerStackEntry(TypedDict): + asfrom_froms: Set[FromClause] + correlate_froms: Set[FromClause] + selectable: ReturnsRows + + +class _CompilerStackEntry(_BaseCompilerStackEntry, total=False): + compile_state: CompileState + need_result_map_for_nested: bool + need_result_map_for_compound: bool + select_0: ReturnsRows + insert_from_select: Select[Any] + + +class ExpandedState(NamedTuple): + """represents state to use when producing "expanded" and + "post compile" bound parameters for a statement. + + "expanded" parameters are parameters that are generated at + statement execution time to suit a number of parameters passed, the most + prominent example being the individual elements inside of an IN expression. + + "post compile" parameters are parameters where the SQL literal value + will be rendered into the SQL statement at execution time, rather than + being passed as separate parameters to the driver. + + To create an :class:`.ExpandedState` instance, use the + :meth:`.SQLCompiler.construct_expanded_state` method on any + :class:`.SQLCompiler` instance. + + """ + + statement: str + """String SQL statement with parameters fully expanded""" + + parameters: _CoreSingleExecuteParams + """Parameter dictionary with parameters fully expanded. + + For a statement that uses named parameters, this dictionary will map + exactly to the names in the statement. For a statement that uses + positional parameters, the :attr:`.ExpandedState.positional_parameters` + will yield a tuple with the positional parameter set. + + """ + + processors: Mapping[str, _BindProcessorType[Any]] + """mapping of bound value processors""" + + positiontup: Optional[Sequence[str]] + """Sequence of string names indicating the order of positional + parameters""" + + parameter_expansion: Mapping[str, List[str]] + """Mapping representing the intermediary link from original parameter + name to list of "expanded" parameter names, for those parameters that + were expanded.""" + + @property + def positional_parameters(self) -> Tuple[Any, ...]: + """Tuple of positional parameters, for statements that were compiled + using a positional paramstyle. + + """ + if self.positiontup is None: + raise exc.InvalidRequestError( + "statement does not use a positional paramstyle" + ) + return tuple(self.parameters[key] for key in self.positiontup) + + @property + def additional_parameters(self) -> _CoreSingleExecuteParams: + """synonym for :attr:`.ExpandedState.parameters`.""" + return self.parameters + + +class _InsertManyValues(NamedTuple): + """represents state to use for executing an "insertmanyvalues" statement. + + The primary consumers of this object are the + :meth:`.SQLCompiler._deliver_insertmanyvalues_batches` and + :meth:`.DefaultDialect._deliver_insertmanyvalues_batches` methods. + + .. versionadded:: 2.0 + + """ + + is_default_expr: bool + """if True, the statement is of the form + ``INSERT INTO TABLE DEFAULT VALUES``, and can't be rewritten as a "batch" + + """ + + single_values_expr: str + """The rendered "values" clause of the INSERT statement. + + This is typically the parenthesized section e.g. "(?, ?, ?)" or similar. + The insertmanyvalues logic uses this string as a search and replace + target. + + """ + + insert_crud_params: List[crud._CrudParamElementStr] + """List of Column / bind names etc. used while rewriting the statement""" + + num_positional_params_counted: int + """the number of bound parameters in a single-row statement. + + This count may be larger or smaller than the actual number of columns + targeted in the INSERT, as it accommodates for SQL expressions + in the values list that may have zero or more parameters embedded + within them. + + This count is part of what's used to organize rewritten parameter lists + when batching. + + """ + + sort_by_parameter_order: bool = False + """if the deterministic_returnined_order parameter were used on the + insert. + + All of the attributes following this will only be used if this is True. + + """ + + includes_upsert_behaviors: bool = False + """if True, we have to accommodate for upsert behaviors. + + This will in some cases downgrade "insertmanyvalues" that requests + deterministic ordering. + + """ + + sentinel_columns: Optional[Sequence[Column[Any]]] = None + """List of sentinel columns that were located. + + This list is only here if the INSERT asked for + sort_by_parameter_order=True, + and dialect-appropriate sentinel columns were located. + + .. versionadded:: 2.0.10 + + """ + + num_sentinel_columns: int = 0 + """how many sentinel columns are in the above list, if any. + + This is the same as + ``len(sentinel_columns) if sentinel_columns is not None else 0`` + + """ + + sentinel_param_keys: Optional[Sequence[str]] = None + """parameter str keys in each param dictionary / tuple + that would link to the client side "sentinel" values for that row, which + we can use to match up parameter sets to result rows. + + This is only present if sentinel_columns is present and the INSERT + statement actually refers to client side values for these sentinel + columns. + + .. versionadded:: 2.0.10 + + .. versionchanged:: 2.0.29 - the sequence is now string dictionary keys + only, used against the "compiled parameteters" collection before + the parameters were converted by bound parameter processors + + """ + + implicit_sentinel: bool = False + """if True, we have exactly one sentinel column and it uses a server side + value, currently has to generate an incrementing integer value. + + The dialect in question would have asserted that it supports receiving + these values back and sorting on that value as a means of guaranteeing + correlation with the incoming parameter list. + + .. versionadded:: 2.0.10 + + """ + + embed_values_counter: bool = False + """Whether to embed an incrementing integer counter in each parameter + set within the VALUES clause as parameters are batched over. + + This is only used for a specific INSERT..SELECT..VALUES..RETURNING syntax + where a subquery is used to produce value tuples. Current support + includes PostgreSQL, Microsoft SQL Server. + + .. versionadded:: 2.0.10 + + """ + + +class _InsertManyValuesBatch(NamedTuple): + """represents an individual batch SQL statement for insertmanyvalues. + + This is passed through the + :meth:`.SQLCompiler._deliver_insertmanyvalues_batches` and + :meth:`.DefaultDialect._deliver_insertmanyvalues_batches` methods out + to the :class:`.Connection` within the + :meth:`.Connection._exec_insertmany_context` method. + + .. versionadded:: 2.0.10 + + """ + + replaced_statement: str + replaced_parameters: _DBAPIAnyExecuteParams + processed_setinputsizes: Optional[_GenericSetInputSizesType] + batch: Sequence[_DBAPISingleExecuteParams] + sentinel_values: Sequence[Tuple[Any, ...]] + current_batch_size: int + batchnum: int + total_batches: int + rows_sorted: bool + is_downgraded: bool + + +class InsertmanyvaluesSentinelOpts(FastIntFlag): + """bitflag enum indicating styles of PK defaults + which can work as implicit sentinel columns + + """ + + NOT_SUPPORTED = 1 + AUTOINCREMENT = 2 + IDENTITY = 4 + SEQUENCE = 8 + + ANY_AUTOINCREMENT = AUTOINCREMENT | IDENTITY | SEQUENCE + _SUPPORTED_OR_NOT = NOT_SUPPORTED | ANY_AUTOINCREMENT + + USE_INSERT_FROM_SELECT = 16 + RENDER_SELECT_COL_CASTS = 64 + + +class CompilerState(IntEnum): + COMPILING = 0 + """statement is present, compilation phase in progress""" + + STRING_APPLIED = 1 + """statement is present, string form of the statement has been applied. + + Additional processors by subclasses may still be pending. + + """ + + NO_STATEMENT = 2 + """compiler does not have a statement to compile, is used + for method access""" + + +class Linting(IntEnum): + """represent preferences for the 'SQL linting' feature. + + this feature currently includes support for flagging cartesian products + in SQL statements. + + """ + + NO_LINTING = 0 + "Disable all linting." + + COLLECT_CARTESIAN_PRODUCTS = 1 + """Collect data on FROMs and cartesian products and gather into + 'self.from_linter'""" + + WARN_LINTING = 2 + "Emit warnings for linters that find problems" + + FROM_LINTING = COLLECT_CARTESIAN_PRODUCTS | WARN_LINTING + """Warn for cartesian products; combines COLLECT_CARTESIAN_PRODUCTS + and WARN_LINTING""" + + +NO_LINTING, COLLECT_CARTESIAN_PRODUCTS, WARN_LINTING, FROM_LINTING = tuple( + Linting +) + + +class FromLinter(collections.namedtuple("FromLinter", ["froms", "edges"])): + """represents current state for the "cartesian product" detection + feature.""" + + def lint(self, start=None): + froms = self.froms + if not froms: + return None, None + + edges = set(self.edges) + the_rest = set(froms) + + if start is not None: + start_with = start + the_rest.remove(start_with) + else: + start_with = the_rest.pop() + + stack = collections.deque([start_with]) + + while stack and the_rest: + node = stack.popleft() + the_rest.discard(node) + + # comparison of nodes in edges here is based on hash equality, as + # there are "annotated" elements that match the non-annotated ones. + # to remove the need for in-python hash() calls, use native + # containment routines (e.g. "node in edge", "edge.index(node)") + to_remove = {edge for edge in edges if node in edge} + + # appendleft the node in each edge that is not + # the one that matched. + stack.extendleft(edge[not edge.index(node)] for edge in to_remove) + edges.difference_update(to_remove) + + # FROMS left over? boom + if the_rest: + return the_rest, start_with + else: + return None, None + + def warn(self, stmt_type="SELECT"): + the_rest, start_with = self.lint() + + # FROMS left over? boom + if the_rest: + froms = the_rest + if froms: + template = ( + "{stmt_type} statement has a cartesian product between " + "FROM element(s) {froms} and " + 'FROM element "{start}". Apply join condition(s) ' + "between each element to resolve." + ) + froms_str = ", ".join( + f'"{self.froms[from_]}"' for from_ in froms + ) + message = template.format( + stmt_type=stmt_type, + froms=froms_str, + start=self.froms[start_with], + ) + + util.warn(message) + + +class Compiled: + """Represent a compiled SQL or DDL expression. + + The ``__str__`` method of the ``Compiled`` object should produce + the actual text of the statement. ``Compiled`` objects are + specific to their underlying database dialect, and also may + or may not be specific to the columns referenced within a + particular set of bind parameters. In no case should the + ``Compiled`` object be dependent on the actual values of those + bind parameters, even though it may reference those values as + defaults. + """ + + statement: Optional[ClauseElement] = None + "The statement to compile." + string: str = "" + "The string representation of the ``statement``" + + state: CompilerState + """description of the compiler's state""" + + is_sql = False + is_ddl = False + + _cached_metadata: Optional[CursorResultMetaData] = None + + _result_columns: Optional[List[ResultColumnsEntry]] = None + + schema_translate_map: Optional[SchemaTranslateMapType] = None + + execution_options: _ExecuteOptions = util.EMPTY_DICT + """ + Execution options propagated from the statement. In some cases, + sub-elements of the statement can modify these. + """ + + preparer: IdentifierPreparer + + _annotations: _AnnotationDict = util.EMPTY_DICT + + compile_state: Optional[CompileState] = None + """Optional :class:`.CompileState` object that maintains additional + state used by the compiler. + + Major executable objects such as :class:`_expression.Insert`, + :class:`_expression.Update`, :class:`_expression.Delete`, + :class:`_expression.Select` will generate this + state when compiled in order to calculate additional information about the + object. For the top level object that is to be executed, the state can be + stored here where it can also have applicability towards result set + processing. + + .. versionadded:: 1.4 + + """ + + dml_compile_state: Optional[CompileState] = None + """Optional :class:`.CompileState` assigned at the same point that + .isinsert, .isupdate, or .isdelete is assigned. + + This will normally be the same object as .compile_state, with the + exception of cases like the :class:`.ORMFromStatementCompileState` + object. + + .. versionadded:: 1.4.40 + + """ + + cache_key: Optional[CacheKey] = None + """The :class:`.CacheKey` that was generated ahead of creating this + :class:`.Compiled` object. + + This is used for routines that need access to the original + :class:`.CacheKey` instance generated when the :class:`.Compiled` + instance was first cached, typically in order to reconcile + the original list of :class:`.BindParameter` objects with a + per-statement list that's generated on each call. + + """ + + _gen_time: float + """Generation time of this :class:`.Compiled`, used for reporting + cache stats.""" + + def __init__( + self, + dialect: Dialect, + statement: Optional[ClauseElement], + schema_translate_map: Optional[SchemaTranslateMapType] = None, + render_schema_translate: bool = False, + compile_kwargs: Mapping[str, Any] = util.immutabledict(), + ): + """Construct a new :class:`.Compiled` object. + + :param dialect: :class:`.Dialect` to compile against. + + :param statement: :class:`_expression.ClauseElement` to be compiled. + + :param schema_translate_map: dictionary of schema names to be + translated when forming the resultant SQL + + .. seealso:: + + :ref:`schema_translating` + + :param compile_kwargs: additional kwargs that will be + passed to the initial call to :meth:`.Compiled.process`. + + + """ + self.dialect = dialect + self.preparer = self.dialect.identifier_preparer + if schema_translate_map: + self.schema_translate_map = schema_translate_map + self.preparer = self.preparer._with_schema_translate( + schema_translate_map + ) + + if statement is not None: + self.state = CompilerState.COMPILING + self.statement = statement + self.can_execute = statement.supports_execution + self._annotations = statement._annotations + if self.can_execute: + if TYPE_CHECKING: + assert isinstance(statement, Executable) + self.execution_options = statement._execution_options + self.string = self.process(self.statement, **compile_kwargs) + + if render_schema_translate: + self.string = self.preparer._render_schema_translates( + self.string, schema_translate_map + ) + + self.state = CompilerState.STRING_APPLIED + else: + self.state = CompilerState.NO_STATEMENT + + self._gen_time = perf_counter() + + def __init_subclass__(cls) -> None: + cls._init_compiler_cls() + return super().__init_subclass__() + + @classmethod + def _init_compiler_cls(cls): + pass + + def _execute_on_connection( + self, connection, distilled_params, execution_options + ): + if self.can_execute: + return connection._execute_compiled( + self, distilled_params, execution_options + ) + else: + raise exc.ObjectNotExecutableError(self.statement) + + def visit_unsupported_compilation(self, element, err, **kw): + raise exc.UnsupportedCompilationError(self, type(element)) from err + + @property + def sql_compiler(self): + """Return a Compiled that is capable of processing SQL expressions. + + If this compiler is one, it would likely just return 'self'. + + """ + + raise NotImplementedError() + + def process(self, obj: Visitable, **kwargs: Any) -> str: + return obj._compiler_dispatch(self, **kwargs) + + def __str__(self) -> str: + """Return the string text of the generated SQL or DDL.""" + + if self.state is CompilerState.STRING_APPLIED: + return self.string + else: + return "" + + def construct_params( + self, + params: Optional[_CoreSingleExecuteParams] = None, + extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None, + escape_names: bool = True, + ) -> Optional[_MutableCoreSingleExecuteParams]: + """Return the bind params for this compiled object. + + :param params: a dict of string/object pairs whose values will + override bind values compiled in to the + statement. + """ + + raise NotImplementedError() + + @property + def params(self): + """Return the bind params for this compiled object.""" + return self.construct_params() + + +class TypeCompiler(util.EnsureKWArg): + """Produces DDL specification for TypeEngine objects.""" + + ensure_kwarg = r"visit_\w+" + + def __init__(self, dialect: Dialect): + self.dialect = dialect + + def process(self, type_: TypeEngine[Any], **kw: Any) -> str: + if ( + type_._variant_mapping + and self.dialect.name in type_._variant_mapping + ): + type_ = type_._variant_mapping[self.dialect.name] + return type_._compiler_dispatch(self, **kw) + + def visit_unsupported_compilation( + self, element: Any, err: Exception, **kw: Any + ) -> NoReturn: + raise exc.UnsupportedCompilationError(self, element) from err + + +# this was a Visitable, but to allow accurate detection of +# column elements this is actually a column element +class _CompileLabel( + roles.BinaryElementRole[Any], elements.CompilerColumnElement +): + """lightweight label object which acts as an expression.Label.""" + + __visit_name__ = "label" + __slots__ = "element", "name", "_alt_names" + + def __init__(self, col, name, alt_names=()): + self.element = col + self.name = name + self._alt_names = (col,) + alt_names + + @property + def proxy_set(self): + return self.element.proxy_set + + @property + def type(self): + return self.element.type + + def self_group(self, **kw): + return self + + +class ilike_case_insensitive( + roles.BinaryElementRole[Any], elements.CompilerColumnElement +): + """produce a wrapping element for a case-insensitive portion of + an ILIKE construct. + + The construct usually renders the ``lower()`` function, but on + PostgreSQL will pass silently with the assumption that "ILIKE" + is being used. + + .. versionadded:: 2.0 + + """ + + __visit_name__ = "ilike_case_insensitive_operand" + __slots__ = "element", "comparator" + + def __init__(self, element): + self.element = element + self.comparator = element.comparator + + @property + def proxy_set(self): + return self.element.proxy_set + + @property + def type(self): + return self.element.type + + def self_group(self, **kw): + return self + + def _with_binary_element_type(self, type_): + return ilike_case_insensitive( + self.element._with_binary_element_type(type_) + ) + + +class SQLCompiler(Compiled): + """Default implementation of :class:`.Compiled`. + + Compiles :class:`_expression.ClauseElement` objects into SQL strings. + + """ + + extract_map = EXTRACT_MAP + + bindname_escape_characters: ClassVar[Mapping[str, str]] = ( + util.immutabledict( + { + "%": "P", + "(": "A", + ")": "Z", + ":": "C", + ".": "_", + "[": "_", + "]": "_", + " ": "_", + } + ) + ) + """A mapping (e.g. dict or similar) containing a lookup of + characters keyed to replacement characters which will be applied to all + 'bind names' used in SQL statements as a form of 'escaping'; the given + characters are replaced entirely with the 'replacement' character when + rendered in the SQL statement, and a similar translation is performed + on the incoming names used in parameter dictionaries passed to methods + like :meth:`_engine.Connection.execute`. + + This allows bound parameter names used in :func:`_sql.bindparam` and + other constructs to have any arbitrary characters present without any + concern for characters that aren't allowed at all on the target database. + + Third party dialects can establish their own dictionary here to replace the + default mapping, which will ensure that the particular characters in the + mapping will never appear in a bound parameter name. + + The dictionary is evaluated at **class creation time**, so cannot be + modified at runtime; it must be present on the class when the class + is first declared. + + Note that for dialects that have additional bound parameter rules such + as additional restrictions on leading characters, the + :meth:`_sql.SQLCompiler.bindparam_string` method may need to be augmented. + See the cx_Oracle compiler for an example of this. + + .. versionadded:: 2.0.0rc1 + + """ + + _bind_translate_re: ClassVar[Pattern[str]] + _bind_translate_chars: ClassVar[Mapping[str, str]] + + is_sql = True + + compound_keywords = COMPOUND_KEYWORDS + + isdelete: bool = False + isinsert: bool = False + isupdate: bool = False + """class-level defaults which can be set at the instance + level to define if this Compiled instance represents + INSERT/UPDATE/DELETE + """ + + postfetch: Optional[List[Column[Any]]] + """list of columns that can be post-fetched after INSERT or UPDATE to + receive server-updated values""" + + insert_prefetch: Sequence[Column[Any]] = () + """list of columns for which default values should be evaluated before + an INSERT takes place""" + + update_prefetch: Sequence[Column[Any]] = () + """list of columns for which onupdate default values should be evaluated + before an UPDATE takes place""" + + implicit_returning: Optional[Sequence[ColumnElement[Any]]] = None + """list of "implicit" returning columns for a toplevel INSERT or UPDATE + statement, used to receive newly generated values of columns. + + .. versionadded:: 2.0 ``implicit_returning`` replaces the previous + ``returning`` collection, which was not a generalized RETURNING + collection and instead was in fact specific to the "implicit returning" + feature. + + """ + + isplaintext: bool = False + + binds: Dict[str, BindParameter[Any]] + """a dictionary of bind parameter keys to BindParameter instances.""" + + bind_names: Dict[BindParameter[Any], str] + """a dictionary of BindParameter instances to "compiled" names + that are actually present in the generated SQL""" + + stack: List[_CompilerStackEntry] + """major statements such as SELECT, INSERT, UPDATE, DELETE are + tracked in this stack using an entry format.""" + + returning_precedes_values: bool = False + """set to True classwide to generate RETURNING + clauses before the VALUES or WHERE clause (i.e. MSSQL) + """ + + render_table_with_column_in_update_from: bool = False + """set to True classwide to indicate the SET clause + in a multi-table UPDATE statement should qualify + columns with the table name (i.e. MySQL only) + """ + + ansi_bind_rules: bool = False + """SQL 92 doesn't allow bind parameters to be used + in the columns clause of a SELECT, nor does it allow + ambiguous expressions like "? = ?". A compiler + subclass can set this flag to False if the target + driver/DB enforces this + """ + + bindtemplate: str + """template to render bound parameters based on paramstyle.""" + + compilation_bindtemplate: str + """template used by compiler to render parameters before positional + paramstyle application""" + + _numeric_binds_identifier_char: str + """Character that's used to as the identifier of a numerical bind param. + For example if this char is set to ``$``, numerical binds will be rendered + in the form ``$1, $2, $3``. + """ + + _result_columns: List[ResultColumnsEntry] + """relates label names in the final SQL to a tuple of local + column/label name, ColumnElement object (if any) and + TypeEngine. CursorResult uses this for type processing and + column targeting""" + + _textual_ordered_columns: bool = False + """tell the result object that the column names as rendered are important, + but they are also "ordered" vs. what is in the compiled object here. + + As of 1.4.42 this condition is only present when the statement is a + TextualSelect, e.g. text("....").columns(...), where it is required + that the columns are considered positionally and not by name. + + """ + + _ad_hoc_textual: bool = False + """tell the result that we encountered text() or '*' constructs in the + middle of the result columns, but we also have compiled columns, so + if the number of columns in cursor.description does not match how many + expressions we have, that means we can't rely on positional at all and + should match on name. + + """ + + _ordered_columns: bool = True + """ + if False, means we can't be sure the list of entries + in _result_columns is actually the rendered order. Usually + True unless using an unordered TextualSelect. + """ + + _loose_column_name_matching: bool = False + """tell the result object that the SQL statement is textual, wants to match + up to Column objects, and may be using the ._tq_label in the SELECT rather + than the base name. + + """ + + _numeric_binds: bool = False + """ + True if paramstyle is "numeric". This paramstyle is trickier than + all the others. + + """ + + _render_postcompile: bool = False + """ + whether to render out POSTCOMPILE params during the compile phase. + + This attribute is used only for end-user invocation of stmt.compile(); + it's never used for actual statement execution, where instead the + dialect internals access and render the internal postcompile structure + directly. + + """ + + _post_compile_expanded_state: Optional[ExpandedState] = None + """When render_postcompile is used, the ``ExpandedState`` used to create + the "expanded" SQL is assigned here, and then used by the ``.params`` + accessor and ``.construct_params()`` methods for their return values. + + .. versionadded:: 2.0.0rc1 + + """ + + _pre_expanded_string: Optional[str] = None + """Stores the original string SQL before 'post_compile' is applied, + for cases where 'post_compile' were used. + + """ + + _pre_expanded_positiontup: Optional[List[str]] = None + + _insertmanyvalues: Optional[_InsertManyValues] = None + + _insert_crud_params: Optional[crud._CrudParamSequence] = None + + literal_execute_params: FrozenSet[BindParameter[Any]] = frozenset() + """bindparameter objects that are rendered as literal values at statement + execution time. + + """ + + post_compile_params: FrozenSet[BindParameter[Any]] = frozenset() + """bindparameter objects that are rendered as bound parameter placeholders + at statement execution time. + + """ + + escaped_bind_names: util.immutabledict[str, str] = util.EMPTY_DICT + """Late escaping of bound parameter names that has to be converted + to the original name when looking in the parameter dictionary. + + """ + + has_out_parameters = False + """if True, there are bindparam() objects that have the isoutparam + flag set.""" + + postfetch_lastrowid = False + """if True, and this in insert, use cursor.lastrowid to populate + result.inserted_primary_key. """ + + _cache_key_bind_match: Optional[ + Tuple[ + Dict[ + BindParameter[Any], + List[BindParameter[Any]], + ], + Dict[ + str, + BindParameter[Any], + ], + ] + ] = None + """a mapping that will relate the BindParameter object we compile + to those that are part of the extracted collection of parameters + in the cache key, if we were given a cache key. + + """ + + positiontup: Optional[List[str]] = None + """for a compiled construct that uses a positional paramstyle, will be + a sequence of strings, indicating the names of bound parameters in order. + + This is used in order to render bound parameters in their correct order, + and is combined with the :attr:`_sql.Compiled.params` dictionary to + render parameters. + + This sequence always contains the unescaped name of the parameters. + + .. seealso:: + + :ref:`faq_sql_expression_string` - includes a usage example for + debugging use cases. + + """ + _values_bindparam: Optional[List[str]] = None + + _visited_bindparam: Optional[List[str]] = None + + inline: bool = False + + ctes: Optional[MutableMapping[CTE, str]] + + # Detect same CTE references - Dict[(level, name), cte] + # Level is required for supporting nesting + ctes_by_level_name: Dict[Tuple[int, str], CTE] + + # To retrieve key/level in ctes_by_level_name - + # Dict[cte_reference, (level, cte_name, cte_opts)] + level_name_by_cte: Dict[CTE, Tuple[int, str, selectable._CTEOpts]] + + ctes_recursive: bool + + _post_compile_pattern = re.compile(r"__\[POSTCOMPILE_(\S+?)(~~.+?~~)?\]") + _pyformat_pattern = re.compile(r"%\(([^)]+?)\)s") + _positional_pattern = re.compile( + f"{_pyformat_pattern.pattern}|{_post_compile_pattern.pattern}" + ) + + @classmethod + def _init_compiler_cls(cls): + cls._init_bind_translate() + + @classmethod + def _init_bind_translate(cls): + reg = re.escape("".join(cls.bindname_escape_characters)) + cls._bind_translate_re = re.compile(f"[{reg}]") + cls._bind_translate_chars = cls.bindname_escape_characters + + def __init__( + self, + dialect: Dialect, + statement: Optional[ClauseElement], + cache_key: Optional[CacheKey] = None, + column_keys: Optional[Sequence[str]] = None, + for_executemany: bool = False, + linting: Linting = NO_LINTING, + _supporting_against: Optional[SQLCompiler] = None, + **kwargs: Any, + ): + """Construct a new :class:`.SQLCompiler` object. + + :param dialect: :class:`.Dialect` to be used + + :param statement: :class:`_expression.ClauseElement` to be compiled + + :param column_keys: a list of column names to be compiled into an + INSERT or UPDATE statement. + + :param for_executemany: whether INSERT / UPDATE statements should + expect that they are to be invoked in an "executemany" style, + which may impact how the statement will be expected to return the + values of defaults and autoincrement / sequences and similar. + Depending on the backend and driver in use, support for retrieving + these values may be disabled which means SQL expressions may + be rendered inline, RETURNING may not be rendered, etc. + + :param kwargs: additional keyword arguments to be consumed by the + superclass. + + """ + self.column_keys = column_keys + + self.cache_key = cache_key + + if cache_key: + cksm = {b.key: b for b in cache_key[1]} + ckbm = {b: [b] for b in cache_key[1]} + self._cache_key_bind_match = (ckbm, cksm) + + # compile INSERT/UPDATE defaults/sequences to expect executemany + # style execution, which may mean no pre-execute of defaults, + # or no RETURNING + self.for_executemany = for_executemany + + self.linting = linting + + # a dictionary of bind parameter keys to BindParameter + # instances. + self.binds = {} + + # a dictionary of BindParameter instances to "compiled" names + # that are actually present in the generated SQL + self.bind_names = util.column_dict() + + # stack which keeps track of nested SELECT statements + self.stack = [] + + self._result_columns = [] + + # true if the paramstyle is positional + self.positional = dialect.positional + if self.positional: + self._numeric_binds = nb = dialect.paramstyle.startswith("numeric") + if nb: + self._numeric_binds_identifier_char = ( + "$" if dialect.paramstyle == "numeric_dollar" else ":" + ) + + self.compilation_bindtemplate = _pyformat_template + else: + self.compilation_bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + + self.ctes = None + + self.label_length = ( + dialect.label_length or dialect.max_identifier_length + ) + + # a map which tracks "anonymous" identifiers that are created on + # the fly here + self.anon_map = prefix_anon_map() + + # a map which tracks "truncated" names based on + # dialect.label_length or dialect.max_identifier_length + self.truncated_names: Dict[Tuple[str, str], str] = {} + self._truncated_counters: Dict[str, int] = {} + + Compiled.__init__(self, dialect, statement, **kwargs) + + if self.isinsert or self.isupdate or self.isdelete: + if TYPE_CHECKING: + assert isinstance(statement, UpdateBase) + + if self.isinsert or self.isupdate: + if TYPE_CHECKING: + assert isinstance(statement, ValuesBase) + if statement._inline: + self.inline = True + elif self.for_executemany and ( + not self.isinsert + or ( + self.dialect.insert_executemany_returning + and statement._return_defaults + ) + ): + self.inline = True + + self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + + if _supporting_against: + self.__dict__.update( + { + k: v + for k, v in _supporting_against.__dict__.items() + if k + not in { + "state", + "dialect", + "preparer", + "positional", + "_numeric_binds", + "compilation_bindtemplate", + "bindtemplate", + } + } + ) + + if self.state is CompilerState.STRING_APPLIED: + if self.positional: + if self._numeric_binds: + self._process_numeric() + else: + self._process_positional() + + if self._render_postcompile: + parameters = self.construct_params( + escape_names=False, + _no_postcompile=True, + ) + + self._process_parameters_for_postcompile( + parameters, _populate_self=True + ) + + @property + def insert_single_values_expr(self) -> Optional[str]: + """When an INSERT is compiled with a single set of parameters inside + a VALUES expression, the string is assigned here, where it can be + used for insert batching schemes to rewrite the VALUES expression. + + .. versionadded:: 1.3.8 + + .. versionchanged:: 2.0 This collection is no longer used by + SQLAlchemy's built-in dialects, in favor of the currently + internal ``_insertmanyvalues`` collection that is used only by + :class:`.SQLCompiler`. + + """ + if self._insertmanyvalues is None: + return None + else: + return self._insertmanyvalues.single_values_expr + + @util.ro_memoized_property + def effective_returning(self) -> Optional[Sequence[ColumnElement[Any]]]: + """The effective "returning" columns for INSERT, UPDATE or DELETE. + + This is either the so-called "implicit returning" columns which are + calculated by the compiler on the fly, or those present based on what's + present in ``self.statement._returning`` (expanded into individual + columns using the ``._all_selected_columns`` attribute) i.e. those set + explicitly using the :meth:`.UpdateBase.returning` method. + + .. versionadded:: 2.0 + + """ + if self.implicit_returning: + return self.implicit_returning + elif self.statement is not None and is_dml(self.statement): + return [ + c + for c in self.statement._all_selected_columns + if is_column_element(c) + ] + + else: + return None + + @property + def returning(self): + """backwards compatibility; returns the + effective_returning collection. + + """ + return self.effective_returning + + @property + def current_executable(self): + """Return the current 'executable' that is being compiled. + + This is currently the :class:`_sql.Select`, :class:`_sql.Insert`, + :class:`_sql.Update`, :class:`_sql.Delete`, + :class:`_sql.CompoundSelect` object that is being compiled. + Specifically it's assigned to the ``self.stack`` list of elements. + + When a statement like the above is being compiled, it normally + is also assigned to the ``.statement`` attribute of the + :class:`_sql.Compiler` object. However, all SQL constructs are + ultimately nestable, and this attribute should never be consulted + by a ``visit_`` method, as it is not guaranteed to be assigned + nor guaranteed to correspond to the current statement being compiled. + + .. versionadded:: 1.3.21 + + For compatibility with previous versions, use the following + recipe:: + + statement = getattr(self, "current_executable", False) + if statement is False: + statement = self.stack[-1]["selectable"] + + For versions 1.4 and above, ensure only .current_executable + is used; the format of "self.stack" may change. + + + """ + try: + return self.stack[-1]["selectable"] + except IndexError as ie: + raise IndexError("Compiler does not have a stack entry") from ie + + @property + def prefetch(self): + return list(self.insert_prefetch) + list(self.update_prefetch) + + @util.memoized_property + def _global_attributes(self) -> Dict[Any, Any]: + return {} + + @util.memoized_instancemethod + def _init_cte_state(self) -> MutableMapping[CTE, str]: + """Initialize collections related to CTEs only if + a CTE is located, to save on the overhead of + these collections otherwise. + + """ + # collect CTEs to tack on top of a SELECT + # To store the query to print - Dict[cte, text_query] + ctes: MutableMapping[CTE, str] = util.OrderedDict() + self.ctes = ctes + + # Detect same CTE references - Dict[(level, name), cte] + # Level is required for supporting nesting + self.ctes_by_level_name = {} + + # To retrieve key/level in ctes_by_level_name - + # Dict[cte_reference, (level, cte_name, cte_opts)] + self.level_name_by_cte = {} + + self.ctes_recursive = False + + return ctes + + @contextlib.contextmanager + def _nested_result(self): + """special API to support the use case of 'nested result sets'""" + result_columns, ordered_columns = ( + self._result_columns, + self._ordered_columns, + ) + self._result_columns, self._ordered_columns = [], False + + try: + if self.stack: + entry = self.stack[-1] + entry["need_result_map_for_nested"] = True + else: + entry = None + yield self._result_columns, self._ordered_columns + finally: + if entry: + entry.pop("need_result_map_for_nested") + self._result_columns, self._ordered_columns = ( + result_columns, + ordered_columns, + ) + + def _process_positional(self): + assert not self.positiontup + assert self.state is CompilerState.STRING_APPLIED + assert not self._numeric_binds + + if self.dialect.paramstyle == "format": + placeholder = "%s" + else: + assert self.dialect.paramstyle == "qmark" + placeholder = "?" + + positions = [] + + def find_position(m: re.Match[str]) -> str: + normal_bind = m.group(1) + if normal_bind: + positions.append(normal_bind) + return placeholder + else: + # this a post-compile bind + positions.append(m.group(2)) + return m.group(0) + + self.string = re.sub( + self._positional_pattern, find_position, self.string + ) + + if self.escaped_bind_names: + reverse_escape = {v: k for k, v in self.escaped_bind_names.items()} + assert len(self.escaped_bind_names) == len(reverse_escape) + self.positiontup = [ + reverse_escape.get(name, name) for name in positions + ] + else: + self.positiontup = positions + + if self._insertmanyvalues: + positions = [] + + single_values_expr = re.sub( + self._positional_pattern, + find_position, + self._insertmanyvalues.single_values_expr, + ) + insert_crud_params = [ + ( + v[0], + v[1], + re.sub(self._positional_pattern, find_position, v[2]), + v[3], + ) + for v in self._insertmanyvalues.insert_crud_params + ] + + self._insertmanyvalues = self._insertmanyvalues._replace( + single_values_expr=single_values_expr, + insert_crud_params=insert_crud_params, + ) + + def _process_numeric(self): + assert self._numeric_binds + assert self.state is CompilerState.STRING_APPLIED + + num = 1 + param_pos: Dict[str, str] = {} + order: Iterable[str] + if self._insertmanyvalues and self._values_bindparam is not None: + # bindparams that are not in values are always placed first. + # this avoids the need of changing them when using executemany + # values () () + order = itertools.chain( + ( + name + for name in self.bind_names.values() + if name not in self._values_bindparam + ), + self.bind_names.values(), + ) + else: + order = self.bind_names.values() + + for bind_name in order: + if bind_name in param_pos: + continue + bind = self.binds[bind_name] + if ( + bind in self.post_compile_params + or bind in self.literal_execute_params + ): + # set to None to just mark the in positiontup, it will not + # be replaced below. + param_pos[bind_name] = None # type: ignore + else: + ph = f"{self._numeric_binds_identifier_char}{num}" + num += 1 + param_pos[bind_name] = ph + + self.next_numeric_pos = num + + self.positiontup = list(param_pos) + if self.escaped_bind_names: + len_before = len(param_pos) + param_pos = { + self.escaped_bind_names.get(name, name): pos + for name, pos in param_pos.items() + } + assert len(param_pos) == len_before + + # Can't use format here since % chars are not escaped. + self.string = self._pyformat_pattern.sub( + lambda m: param_pos[m.group(1)], self.string + ) + + if self._insertmanyvalues: + single_values_expr = ( + # format is ok here since single_values_expr includes only + # place-holders + self._insertmanyvalues.single_values_expr + % param_pos + ) + insert_crud_params = [ + (v[0], v[1], "%s", v[3]) + for v in self._insertmanyvalues.insert_crud_params + ] + + self._insertmanyvalues = self._insertmanyvalues._replace( + # This has the numbers (:1, :2) + single_values_expr=single_values_expr, + # The single binds are instead %s so they can be formatted + insert_crud_params=insert_crud_params, + ) + + @util.memoized_property + def _bind_processors( + self, + ) -> MutableMapping[ + str, Union[_BindProcessorType[Any], Sequence[_BindProcessorType[Any]]] + ]: + # mypy is not able to see the two value types as the above Union, + # it just sees "object". don't know how to resolve + return { + key: value # type: ignore + for key, value in ( + ( + self.bind_names[bindparam], + ( + bindparam.type._cached_bind_processor(self.dialect) + if not bindparam.type._is_tuple_type + else tuple( + elem_type._cached_bind_processor(self.dialect) + for elem_type in cast( + TupleType, bindparam.type + ).types + ) + ), + ) + for bindparam in self.bind_names + ) + if value is not None + } + + def is_subquery(self): + return len(self.stack) > 1 + + @property + def sql_compiler(self): + return self + + def construct_expanded_state( + self, + params: Optional[_CoreSingleExecuteParams] = None, + escape_names: bool = True, + ) -> ExpandedState: + """Return a new :class:`.ExpandedState` for a given parameter set. + + For queries that use "expanding" or other late-rendered parameters, + this method will provide for both the finalized SQL string as well + as the parameters that would be used for a particular parameter set. + + .. versionadded:: 2.0.0rc1 + + """ + parameters = self.construct_params( + params, + escape_names=escape_names, + _no_postcompile=True, + ) + return self._process_parameters_for_postcompile( + parameters, + ) + + def construct_params( + self, + params: Optional[_CoreSingleExecuteParams] = None, + extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None, + escape_names: bool = True, + _group_number: Optional[int] = None, + _check: bool = True, + _no_postcompile: bool = False, + ) -> _MutableCoreSingleExecuteParams: + """return a dictionary of bind parameter keys and values""" + + if self._render_postcompile and not _no_postcompile: + assert self._post_compile_expanded_state is not None + if not params: + return dict(self._post_compile_expanded_state.parameters) + else: + raise exc.InvalidRequestError( + "can't construct new parameters when render_postcompile " + "is used; the statement is hard-linked to the original " + "parameters. Use construct_expanded_state to generate a " + "new statement and parameters." + ) + + has_escaped_names = escape_names and bool(self.escaped_bind_names) + + if extracted_parameters: + # related the bound parameters collected in the original cache key + # to those collected in the incoming cache key. They will not have + # matching names but they will line up positionally in the same + # way. The parameters present in self.bind_names may be clones of + # these original cache key params in the case of DML but the .key + # will be guaranteed to match. + if self.cache_key is None: + raise exc.CompileError( + "This compiled object has no original cache key; " + "can't pass extracted_parameters to construct_params" + ) + else: + orig_extracted = self.cache_key[1] + + ckbm_tuple = self._cache_key_bind_match + assert ckbm_tuple is not None + ckbm, _ = ckbm_tuple + resolved_extracted = { + bind: extracted + for b, extracted in zip(orig_extracted, extracted_parameters) + for bind in ckbm[b] + } + else: + resolved_extracted = None + + if params: + pd = {} + for bindparam, name in self.bind_names.items(): + escaped_name = ( + self.escaped_bind_names.get(name, name) + if has_escaped_names + else name + ) + + if bindparam.key in params: + pd[escaped_name] = params[bindparam.key] + elif name in params: + pd[escaped_name] = params[name] + + elif _check and bindparam.required: + if _group_number: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r, " + "in parameter group %d" + % (bindparam.key, _group_number), + code="cd3x", + ) + else: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r" + % bindparam.key, + code="cd3x", + ) + else: + if resolved_extracted: + value_param = resolved_extracted.get( + bindparam, bindparam + ) + else: + value_param = bindparam + + if bindparam.callable: + pd[escaped_name] = value_param.effective_value + else: + pd[escaped_name] = value_param.value + return pd + else: + pd = {} + for bindparam, name in self.bind_names.items(): + escaped_name = ( + self.escaped_bind_names.get(name, name) + if has_escaped_names + else name + ) + + if _check and bindparam.required: + if _group_number: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r, " + "in parameter group %d" + % (bindparam.key, _group_number), + code="cd3x", + ) + else: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r" + % bindparam.key, + code="cd3x", + ) + + if resolved_extracted: + value_param = resolved_extracted.get(bindparam, bindparam) + else: + value_param = bindparam + + if bindparam.callable: + pd[escaped_name] = value_param.effective_value + else: + pd[escaped_name] = value_param.value + + return pd + + @util.memoized_instancemethod + def _get_set_input_sizes_lookup(self): + dialect = self.dialect + + include_types = dialect.include_set_input_sizes + exclude_types = dialect.exclude_set_input_sizes + + dbapi = dialect.dbapi + + def lookup_type(typ): + dbtype = typ._unwrapped_dialect_impl(dialect).get_dbapi_type(dbapi) + + if ( + dbtype is not None + and (exclude_types is None or dbtype not in exclude_types) + and (include_types is None or dbtype in include_types) + ): + return dbtype + else: + return None + + inputsizes = {} + + literal_execute_params = self.literal_execute_params + + for bindparam in self.bind_names: + if bindparam in literal_execute_params: + continue + + if bindparam.type._is_tuple_type: + inputsizes[bindparam] = [ + lookup_type(typ) + for typ in cast(TupleType, bindparam.type).types + ] + else: + inputsizes[bindparam] = lookup_type(bindparam.type) + + return inputsizes + + @property + def params(self): + """Return the bind param dictionary embedded into this + compiled object, for those values that are present. + + .. seealso:: + + :ref:`faq_sql_expression_string` - includes a usage example for + debugging use cases. + + """ + return self.construct_params(_check=False) + + def _process_parameters_for_postcompile( + self, + parameters: _MutableCoreSingleExecuteParams, + _populate_self: bool = False, + ) -> ExpandedState: + """handle special post compile parameters. + + These include: + + * "expanding" parameters -typically IN tuples that are rendered + on a per-parameter basis for an otherwise fixed SQL statement string. + + * literal_binds compiled with the literal_execute flag. Used for + things like SQL Server "TOP N" where the driver does not accommodate + N as a bound parameter. + + """ + + expanded_parameters = {} + new_positiontup: Optional[List[str]] + + pre_expanded_string = self._pre_expanded_string + if pre_expanded_string is None: + pre_expanded_string = self.string + + if self.positional: + new_positiontup = [] + + pre_expanded_positiontup = self._pre_expanded_positiontup + if pre_expanded_positiontup is None: + pre_expanded_positiontup = self.positiontup + + else: + new_positiontup = pre_expanded_positiontup = None + + processors = self._bind_processors + single_processors = cast( + "Mapping[str, _BindProcessorType[Any]]", processors + ) + tuple_processors = cast( + "Mapping[str, Sequence[_BindProcessorType[Any]]]", processors + ) + + new_processors: Dict[str, _BindProcessorType[Any]] = {} + + replacement_expressions: Dict[str, Any] = {} + to_update_sets: Dict[str, Any] = {} + + # notes: + # *unescaped* parameter names in: + # self.bind_names, self.binds, self._bind_processors, self.positiontup + # + # *escaped* parameter names in: + # construct_params(), replacement_expressions + + numeric_positiontup: Optional[List[str]] = None + + if self.positional and pre_expanded_positiontup is not None: + names: Iterable[str] = pre_expanded_positiontup + if self._numeric_binds: + numeric_positiontup = [] + else: + names = self.bind_names.values() + + ebn = self.escaped_bind_names + for name in names: + escaped_name = ebn.get(name, name) if ebn else name + parameter = self.binds[name] + + if parameter in self.literal_execute_params: + if escaped_name not in replacement_expressions: + replacement_expressions[escaped_name] = ( + self.render_literal_bindparam( + parameter, + render_literal_value=parameters.pop(escaped_name), + ) + ) + continue + + if parameter in self.post_compile_params: + if escaped_name in replacement_expressions: + to_update = to_update_sets[escaped_name] + values = None + else: + # we are removing the parameter from parameters + # because it is a list value, which is not expected by + # TypeEngine objects that would otherwise be asked to + # process it. the single name is being replaced with + # individual numbered parameters for each value in the + # param. + # + # note we are also inserting *escaped* parameter names + # into the given dictionary. default dialect will + # use these param names directly as they will not be + # in the escaped_bind_names dictionary. + values = parameters.pop(name) + + leep_res = self._literal_execute_expanding_parameter( + escaped_name, parameter, values + ) + (to_update, replacement_expr) = leep_res + + to_update_sets[escaped_name] = to_update + replacement_expressions[escaped_name] = replacement_expr + + if not parameter.literal_execute: + parameters.update(to_update) + if parameter.type._is_tuple_type: + assert values is not None + new_processors.update( + ( + "%s_%s_%s" % (name, i, j), + tuple_processors[name][j - 1], + ) + for i, tuple_element in enumerate(values, 1) + for j, _ in enumerate(tuple_element, 1) + if name in tuple_processors + and tuple_processors[name][j - 1] is not None + ) + else: + new_processors.update( + (key, single_processors[name]) + for key, _ in to_update + if name in single_processors + ) + if numeric_positiontup is not None: + numeric_positiontup.extend( + name for name, _ in to_update + ) + elif new_positiontup is not None: + # to_update has escaped names, but that's ok since + # these are new names, that aren't in the + # escaped_bind_names dict. + new_positiontup.extend(name for name, _ in to_update) + expanded_parameters[name] = [ + expand_key for expand_key, _ in to_update + ] + elif new_positiontup is not None: + new_positiontup.append(name) + + def process_expanding(m): + key = m.group(1) + expr = replacement_expressions[key] + + # if POSTCOMPILE included a bind_expression, render that + # around each element + if m.group(2): + tok = m.group(2).split("~~") + be_left, be_right = tok[1], tok[3] + expr = ", ".join( + "%s%s%s" % (be_left, exp, be_right) + for exp in expr.split(", ") + ) + return expr + + statement = re.sub( + self._post_compile_pattern, process_expanding, pre_expanded_string + ) + + if numeric_positiontup is not None: + assert new_positiontup is not None + param_pos = { + key: f"{self._numeric_binds_identifier_char}{num}" + for num, key in enumerate( + numeric_positiontup, self.next_numeric_pos + ) + } + # Can't use format here since % chars are not escaped. + statement = self._pyformat_pattern.sub( + lambda m: param_pos[m.group(1)], statement + ) + new_positiontup.extend(numeric_positiontup) + + expanded_state = ExpandedState( + statement, + parameters, + new_processors, + new_positiontup, + expanded_parameters, + ) + + if _populate_self: + # this is for the "render_postcompile" flag, which is not + # otherwise used internally and is for end-user debugging and + # special use cases. + self._pre_expanded_string = pre_expanded_string + self._pre_expanded_positiontup = pre_expanded_positiontup + self.string = expanded_state.statement + self.positiontup = ( + list(expanded_state.positiontup or ()) + if self.positional + else None + ) + self._post_compile_expanded_state = expanded_state + + return expanded_state + + @util.preload_module("sqlalchemy.engine.cursor") + def _create_result_map(self): + """utility method used for unit tests only.""" + cursor = util.preloaded.engine_cursor + return cursor.CursorResultMetaData._create_description_match_map( + self._result_columns + ) + + # assigned by crud.py for insert/update statements + _get_bind_name_for_col: _BindNameForColProtocol + + @util.memoized_property + def _within_exec_param_key_getter(self) -> Callable[[Any], str]: + getter = self._get_bind_name_for_col + return getter + + @util.memoized_property + @util.preload_module("sqlalchemy.engine.result") + def _inserted_primary_key_from_lastrowid_getter(self): + result = util.preloaded.engine_result + + param_key_getter = self._within_exec_param_key_getter + + assert self.compile_state is not None + statement = self.compile_state.statement + + if TYPE_CHECKING: + assert isinstance(statement, Insert) + + table = statement.table + + getters = [ + (operator.methodcaller("get", param_key_getter(col), None), col) + for col in table.primary_key + ] + + autoinc_getter = None + autoinc_col = table._autoincrement_column + if autoinc_col is not None: + # apply type post processors to the lastrowid + lastrowid_processor = autoinc_col.type._cached_result_processor( + self.dialect, None + ) + autoinc_key = param_key_getter(autoinc_col) + + # if a bind value is present for the autoincrement column + # in the parameters, we need to do the logic dictated by + # #7998; honor a non-None user-passed parameter over lastrowid. + # previously in the 1.4 series we weren't fetching lastrowid + # at all if the key were present in the parameters + if autoinc_key in self.binds: + + def _autoinc_getter(lastrowid, parameters): + param_value = parameters.get(autoinc_key, lastrowid) + if param_value is not None: + # they supplied non-None parameter, use that. + # SQLite at least is observed to return the wrong + # cursor.lastrowid for INSERT..ON CONFLICT so it + # can't be used in all cases + return param_value + else: + # use lastrowid + return lastrowid + + # work around mypy https://github.com/python/mypy/issues/14027 + autoinc_getter = _autoinc_getter + + else: + lastrowid_processor = None + + row_fn = result.result_tuple([col.key for col in table.primary_key]) + + def get(lastrowid, parameters): + """given cursor.lastrowid value and the parameters used for INSERT, + return a "row" that represents the primary key, either by + using the "lastrowid" or by extracting values from the parameters + that were sent along with the INSERT. + + """ + if lastrowid_processor is not None: + lastrowid = lastrowid_processor(lastrowid) + + if lastrowid is None: + return row_fn(getter(parameters) for getter, col in getters) + else: + return row_fn( + ( + ( + autoinc_getter(lastrowid, parameters) + if autoinc_getter is not None + else lastrowid + ) + if col is autoinc_col + else getter(parameters) + ) + for getter, col in getters + ) + + return get + + @util.memoized_property + @util.preload_module("sqlalchemy.engine.result") + def _inserted_primary_key_from_returning_getter(self): + if typing.TYPE_CHECKING: + from ..engine import result + else: + result = util.preloaded.engine_result + + assert self.compile_state is not None + statement = self.compile_state.statement + + if TYPE_CHECKING: + assert isinstance(statement, Insert) + + param_key_getter = self._within_exec_param_key_getter + table = statement.table + + returning = self.implicit_returning + assert returning is not None + ret = {col: idx for idx, col in enumerate(returning)} + + getters = cast( + "List[Tuple[Callable[[Any], Any], bool]]", + [ + ( + (operator.itemgetter(ret[col]), True) + if col in ret + else ( + operator.methodcaller( + "get", param_key_getter(col), None + ), + False, + ) + ) + for col in table.primary_key + ], + ) + + row_fn = result.result_tuple([col.key for col in table.primary_key]) + + def get(row, parameters): + return row_fn( + getter(row) if use_row else getter(parameters) + for getter, use_row in getters + ) + + return get + + def default_from(self): + """Called when a SELECT statement has no froms, and no FROM clause is + to be appended. + + Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output. + + """ + return "" + + def visit_override_binds(self, override_binds, **kw): + """SQL compile the nested element of an _OverrideBinds with + bindparams swapped out. + + The _OverrideBinds is not normally expected to be compiled; it + is meant to be used when an already cached statement is to be used, + the compilation was already performed, and only the bound params should + be swapped in at execution time. + + However, there are test cases that exericise this object, and + additionally the ORM subquery loader is known to feed in expressions + which include this construct into new queries (discovered in #11173), + so it has to do the right thing at compile time as well. + + """ + + # get SQL text first + sqltext = override_binds.element._compiler_dispatch(self, **kw) + + # for a test compile that is not for caching, change binds after the + # fact. note that we don't try to + # swap the bindparam as we compile, because our element may be + # elsewhere in the statement already (e.g. a subquery or perhaps a + # CTE) and was already visited / compiled. See + # test_relationship_criteria.py -> + # test_selectinload_local_criteria_subquery + for k in override_binds.translate: + if k not in self.binds: + continue + bp = self.binds[k] + + # so this would work, just change the value of bp in place. + # but we dont want to mutate things outside. + # bp.value = override_binds.translate[bp.key] + # continue + + # instead, need to replace bp with new_bp or otherwise accommodate + # in all internal collections + new_bp = bp._with_value( + override_binds.translate[bp.key], + maintain_key=True, + required=False, + ) + + name = self.bind_names[bp] + self.binds[k] = self.binds[name] = new_bp + self.bind_names[new_bp] = name + self.bind_names.pop(bp, None) + + if bp in self.post_compile_params: + self.post_compile_params |= {new_bp} + if bp in self.literal_execute_params: + self.literal_execute_params |= {new_bp} + + ckbm_tuple = self._cache_key_bind_match + if ckbm_tuple: + ckbm, cksm = ckbm_tuple + for bp in bp._cloned_set: + if bp.key in cksm: + cb = cksm[bp.key] + ckbm[cb].append(new_bp) + + return sqltext + + def visit_grouping(self, grouping, asfrom=False, **kwargs): + return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" + + def visit_select_statement_grouping(self, grouping, **kwargs): + return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" + + def visit_label_reference( + self, element, within_columns_clause=False, **kwargs + ): + if self.stack and self.dialect.supports_simple_order_by_label: + try: + compile_state = cast( + "Union[SelectState, CompoundSelectState]", + self.stack[-1]["compile_state"], + ) + except KeyError as ke: + raise exc.CompileError( + "Can't resolve label reference for ORDER BY / " + "GROUP BY / DISTINCT etc." + ) from ke + + ( + with_cols, + only_froms, + only_cols, + ) = compile_state._label_resolve_dict + if within_columns_clause: + resolve_dict = only_froms + else: + resolve_dict = only_cols + + # this can be None in the case that a _label_reference() + # were subject to a replacement operation, in which case + # the replacement of the Label element may have changed + # to something else like a ColumnClause expression. + order_by_elem = element.element._order_by_label_element + + if ( + order_by_elem is not None + and order_by_elem.name in resolve_dict + and order_by_elem.shares_lineage( + resolve_dict[order_by_elem.name] + ) + ): + kwargs["render_label_as_label"] = ( + element.element._order_by_label_element + ) + return self.process( + element.element, + within_columns_clause=within_columns_clause, + **kwargs, + ) + + def visit_textual_label_reference( + self, element, within_columns_clause=False, **kwargs + ): + if not self.stack: + # compiling the element outside of the context of a SELECT + return self.process(element._text_clause) + + try: + compile_state = cast( + "Union[SelectState, CompoundSelectState]", + self.stack[-1]["compile_state"], + ) + except KeyError as ke: + coercions._no_text_coercion( + element.element, + extra=( + "Can't resolve label reference for ORDER BY / " + "GROUP BY / DISTINCT etc." + ), + exc_cls=exc.CompileError, + err=ke, + ) + + with_cols, only_froms, only_cols = compile_state._label_resolve_dict + try: + if within_columns_clause: + col = only_froms[element.element] + else: + col = with_cols[element.element] + except KeyError as err: + coercions._no_text_coercion( + element.element, + extra=( + "Can't resolve label reference for ORDER BY / " + "GROUP BY / DISTINCT etc." + ), + exc_cls=exc.CompileError, + err=err, + ) + else: + kwargs["render_label_as_label"] = col + return self.process( + col, within_columns_clause=within_columns_clause, **kwargs + ) + + def visit_label( + self, + label, + add_to_result_map=None, + within_label_clause=False, + within_columns_clause=False, + render_label_as_label=None, + result_map_targets=(), + **kw, + ): + # only render labels within the columns clause + # or ORDER BY clause of a select. dialect-specific compilers + # can modify this behavior. + render_label_with_as = ( + within_columns_clause and not within_label_clause + ) + render_label_only = render_label_as_label is label + + if render_label_only or render_label_with_as: + if isinstance(label.name, elements._truncated_label): + labelname = self._truncated_identifier("colident", label.name) + else: + labelname = label.name + + if render_label_with_as: + if add_to_result_map is not None: + add_to_result_map( + labelname, + label.name, + (label, labelname) + label._alt_names + result_map_targets, + label.type, + ) + return ( + label.element._compiler_dispatch( + self, + within_columns_clause=True, + within_label_clause=True, + **kw, + ) + + OPERATORS[operators.as_] + + self.preparer.format_label(label, labelname) + ) + elif render_label_only: + return self.preparer.format_label(label, labelname) + else: + return label.element._compiler_dispatch( + self, within_columns_clause=False, **kw + ) + + def _fallback_column_name(self, column): + raise exc.CompileError( + "Cannot compile Column object until its 'name' is assigned." + ) + + def visit_lambda_element(self, element, **kw): + sql_element = element._resolved + return self.process(sql_element, **kw) + + def visit_column( + self, + column: ColumnClause[Any], + add_to_result_map: Optional[_ResultMapAppender] = None, + include_table: bool = True, + result_map_targets: Tuple[Any, ...] = (), + ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] = None, + **kwargs: Any, + ) -> str: + name = orig_name = column.name + if name is None: + name = self._fallback_column_name(column) + + is_literal = column.is_literal + if not is_literal and isinstance(name, elements._truncated_label): + name = self._truncated_identifier("colident", name) + + if add_to_result_map is not None: + targets = (column, name, column.key) + result_map_targets + if column._tq_label: + targets += (column._tq_label,) + + add_to_result_map(name, orig_name, targets, column.type) + + if is_literal: + # note we are not currently accommodating for + # literal_column(quoted_name('ident', True)) here + name = self.escape_literal_column(name) + else: + name = self.preparer.quote(name) + table = column.table + if table is None or not include_table or not table.named_with_column: + return name + else: + effective_schema = self.preparer.schema_for_object(table) + + if effective_schema: + schema_prefix = ( + self.preparer.quote_schema(effective_schema) + "." + ) + else: + schema_prefix = "" + + if TYPE_CHECKING: + assert isinstance(table, NamedFromClause) + tablename = table.name + + if ( + not effective_schema + and ambiguous_table_name_map + and tablename in ambiguous_table_name_map + ): + tablename = ambiguous_table_name_map[tablename] + + if isinstance(tablename, elements._truncated_label): + tablename = self._truncated_identifier("alias", tablename) + + return schema_prefix + self.preparer.quote(tablename) + "." + name + + def visit_collation(self, element, **kw): + return self.preparer.format_collation(element.collation) + + def visit_fromclause(self, fromclause, **kwargs): + return fromclause.name + + def visit_index(self, index, **kwargs): + return index.name + + def visit_typeclause(self, typeclause, **kw): + kw["type_expression"] = typeclause + kw["identifier_preparer"] = self.preparer + return self.dialect.type_compiler_instance.process( + typeclause.type, **kw + ) + + def post_process_text(self, text): + if self.preparer._double_percents: + text = text.replace("%", "%%") + return text + + def escape_literal_column(self, text): + if self.preparer._double_percents: + text = text.replace("%", "%%") + return text + + def visit_textclause(self, textclause, add_to_result_map=None, **kw): + def do_bindparam(m): + name = m.group(1) + if name in textclause._bindparams: + return self.process(textclause._bindparams[name], **kw) + else: + return self.bindparam_string(name, **kw) + + if not self.stack: + self.isplaintext = True + + if add_to_result_map: + # text() object is present in the columns clause of a + # select(). Add a no-name entry to the result map so that + # row[text()] produces a result + add_to_result_map(None, None, (textclause,), sqltypes.NULLTYPE) + + # un-escape any \:params + return BIND_PARAMS_ESC.sub( + lambda m: m.group(1), + BIND_PARAMS.sub( + do_bindparam, self.post_process_text(textclause.text) + ), + ) + + def visit_textual_select( + self, taf, compound_index=None, asfrom=False, **kw + ): + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + new_entry: _CompilerStackEntry = { + "correlate_froms": set(), + "asfrom_froms": set(), + "selectable": taf, + } + self.stack.append(new_entry) + + if taf._independent_ctes: + self._dispatch_independent_ctes(taf, kw) + + populate_result_map = ( + toplevel + or ( + compound_index == 0 + and entry.get("need_result_map_for_compound", False) + ) + or entry.get("need_result_map_for_nested", False) + ) + + if populate_result_map: + self._ordered_columns = self._textual_ordered_columns = ( + taf.positional + ) + + # enable looser result column matching when the SQL text links to + # Column objects by name only + self._loose_column_name_matching = not taf.positional and bool( + taf.column_args + ) + + for c in taf.column_args: + self.process( + c, + within_columns_clause=True, + add_to_result_map=self._add_to_result_map, + ) + + text = self.process(taf.element, **kw) + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + self.stack.pop(-1) + + return text + + def visit_null(self, expr, **kw): + return "NULL" + + def visit_true(self, expr, **kw): + if self.dialect.supports_native_boolean: + return "true" + else: + return "1" + + def visit_false(self, expr, **kw): + if self.dialect.supports_native_boolean: + return "false" + else: + return "0" + + def _generate_delimited_list(self, elements, separator, **kw): + return separator.join( + s + for s in (c._compiler_dispatch(self, **kw) for c in elements) + if s + ) + + def _generate_delimited_and_list(self, clauses, **kw): + lcc, clauses = elements.BooleanClauseList._process_clauses_for_boolean( + operators.and_, + elements.True_._singleton, + elements.False_._singleton, + clauses, + ) + if lcc == 1: + return clauses[0]._compiler_dispatch(self, **kw) + else: + separator = OPERATORS[operators.and_] + return separator.join( + s + for s in (c._compiler_dispatch(self, **kw) for c in clauses) + if s + ) + + def visit_tuple(self, clauselist, **kw): + return "(%s)" % self.visit_clauselist(clauselist, **kw) + + def visit_clauselist(self, clauselist, **kw): + sep = clauselist.operator + if sep is None: + sep = " " + else: + sep = OPERATORS[clauselist.operator] + + return self._generate_delimited_list(clauselist.clauses, sep, **kw) + + def visit_expression_clauselist(self, clauselist, **kw): + operator_ = clauselist.operator + + disp = self._get_operator_dispatch( + operator_, "expression_clauselist", None + ) + if disp: + return disp(clauselist, operator_, **kw) + + try: + opstring = OPERATORS[operator_] + except KeyError as err: + raise exc.UnsupportedCompilationError(self, operator_) from err + else: + kw["_in_operator_expression"] = True + return self._generate_delimited_list( + clauselist.clauses, opstring, **kw + ) + + def visit_case(self, clause, **kwargs): + x = "CASE " + if clause.value is not None: + x += clause.value._compiler_dispatch(self, **kwargs) + " " + for cond, result in clause.whens: + x += ( + "WHEN " + + cond._compiler_dispatch(self, **kwargs) + + " THEN " + + result._compiler_dispatch(self, **kwargs) + + " " + ) + if clause.else_ is not None: + x += ( + "ELSE " + clause.else_._compiler_dispatch(self, **kwargs) + " " + ) + x += "END" + return x + + def visit_type_coerce(self, type_coerce, **kw): + return type_coerce.typed_expression._compiler_dispatch(self, **kw) + + def visit_cast(self, cast, **kwargs): + type_clause = cast.typeclause._compiler_dispatch(self, **kwargs) + match = re.match("(.*)( COLLATE .*)", type_clause) + return "CAST(%s AS %s)%s" % ( + cast.clause._compiler_dispatch(self, **kwargs), + match.group(1) if match else type_clause, + match.group(2) if match else "", + ) + + def _format_frame_clause(self, range_, **kw): + return "%s AND %s" % ( + ( + "UNBOUNDED PRECEDING" + if range_[0] is elements.RANGE_UNBOUNDED + else ( + "CURRENT ROW" + if range_[0] is elements.RANGE_CURRENT + else ( + "%s PRECEDING" + % ( + self.process( + elements.literal(abs(range_[0])), **kw + ), + ) + if range_[0] < 0 + else "%s FOLLOWING" + % (self.process(elements.literal(range_[0]), **kw),) + ) + ) + ), + ( + "UNBOUNDED FOLLOWING" + if range_[1] is elements.RANGE_UNBOUNDED + else ( + "CURRENT ROW" + if range_[1] is elements.RANGE_CURRENT + else ( + "%s PRECEDING" + % ( + self.process( + elements.literal(abs(range_[1])), **kw + ), + ) + if range_[1] < 0 + else "%s FOLLOWING" + % (self.process(elements.literal(range_[1]), **kw),) + ) + ) + ), + ) + + def visit_over(self, over, **kwargs): + text = over.element._compiler_dispatch(self, **kwargs) + if over.range_: + range_ = "RANGE BETWEEN %s" % self._format_frame_clause( + over.range_, **kwargs + ) + elif over.rows: + range_ = "ROWS BETWEEN %s" % self._format_frame_clause( + over.rows, **kwargs + ) + else: + range_ = None + + return "%s OVER (%s)" % ( + text, + " ".join( + [ + "%s BY %s" + % (word, clause._compiler_dispatch(self, **kwargs)) + for word, clause in ( + ("PARTITION", over.partition_by), + ("ORDER", over.order_by), + ) + if clause is not None and len(clause) + ] + + ([range_] if range_ else []) + ), + ) + + def visit_withingroup(self, withingroup, **kwargs): + return "%s WITHIN GROUP (ORDER BY %s)" % ( + withingroup.element._compiler_dispatch(self, **kwargs), + withingroup.order_by._compiler_dispatch(self, **kwargs), + ) + + def visit_funcfilter(self, funcfilter, **kwargs): + return "%s FILTER (WHERE %s)" % ( + funcfilter.func._compiler_dispatch(self, **kwargs), + funcfilter.criterion._compiler_dispatch(self, **kwargs), + ) + + def visit_extract(self, extract, **kwargs): + field = self.extract_map.get(extract.field, extract.field) + return "EXTRACT(%s FROM %s)" % ( + field, + extract.expr._compiler_dispatch(self, **kwargs), + ) + + def visit_scalar_function_column(self, element, **kw): + compiled_fn = self.visit_function(element.fn, **kw) + compiled_col = self.visit_column(element, **kw) + return "(%s).%s" % (compiled_fn, compiled_col) + + def visit_function( + self, + func: Function[Any], + add_to_result_map: Optional[_ResultMapAppender] = None, + **kwargs: Any, + ) -> str: + if add_to_result_map is not None: + add_to_result_map(func.name, func.name, (func.name,), func.type) + + disp = getattr(self, "visit_%s_func" % func.name.lower(), None) + + text: str + + if disp: + text = disp(func, **kwargs) + else: + name = FUNCTIONS.get(func._deannotate().__class__, None) + if name: + if func._has_args: + name += "%(expr)s" + else: + name = func.name + name = ( + self.preparer.quote(name) + if self.preparer._requires_quotes_illegal_chars(name) + or isinstance(name, elements.quoted_name) + else name + ) + name = name + "%(expr)s" + text = ".".join( + [ + ( + self.preparer.quote(tok) + if self.preparer._requires_quotes_illegal_chars(tok) + or isinstance(name, elements.quoted_name) + else tok + ) + for tok in func.packagenames + ] + + [name] + ) % {"expr": self.function_argspec(func, **kwargs)} + + if func._with_ordinality: + text += " WITH ORDINALITY" + return text + + def visit_next_value_func(self, next_value, **kw): + return self.visit_sequence(next_value.sequence) + + def visit_sequence(self, sequence, **kw): + raise NotImplementedError( + "Dialect '%s' does not support sequence increments." + % self.dialect.name + ) + + def function_argspec(self, func, **kwargs): + return func.clause_expr._compiler_dispatch(self, **kwargs) + + def visit_compound_select( + self, cs, asfrom=False, compound_index=None, **kwargs + ): + toplevel = not self.stack + + compile_state = cs._compile_state_factory(cs, self, **kwargs) + + if toplevel and not self.compile_state: + self.compile_state = compile_state + + compound_stmt = compile_state.statement + + entry = self._default_stack_entry if toplevel else self.stack[-1] + need_result_map = toplevel or ( + not compound_index + and entry.get("need_result_map_for_compound", False) + ) + + # indicates there is already a CompoundSelect in play + if compound_index == 0: + entry["select_0"] = cs + + self.stack.append( + { + "correlate_froms": entry["correlate_froms"], + "asfrom_froms": entry["asfrom_froms"], + "selectable": cs, + "compile_state": compile_state, + "need_result_map_for_compound": need_result_map, + } + ) + + if compound_stmt._independent_ctes: + self._dispatch_independent_ctes(compound_stmt, kwargs) + + keyword = self.compound_keywords[cs.keyword] + + text = (" " + keyword + " ").join( + ( + c._compiler_dispatch( + self, asfrom=asfrom, compound_index=i, **kwargs + ) + for i, c in enumerate(cs.selects) + ) + ) + + kwargs["include_table"] = False + text += self.group_by_clause(cs, **dict(asfrom=asfrom, **kwargs)) + text += self.order_by_clause(cs, **kwargs) + if cs._has_row_limiting_clause: + text += self._row_limit_clause(cs, **kwargs) + + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = ( + self._render_cte_clause( + nesting_level=nesting_level, + include_following_stack=True, + ) + + text + ) + + self.stack.pop(-1) + return text + + def _row_limit_clause(self, cs, **kwargs): + if cs._fetch_clause is not None: + return self.fetch_clause(cs, **kwargs) + else: + return self.limit_clause(cs, **kwargs) + + def _get_operator_dispatch(self, operator_, qualifier1, qualifier2): + attrname = "visit_%s_%s%s" % ( + operator_.__name__, + qualifier1, + "_" + qualifier2 if qualifier2 else "", + ) + return getattr(self, attrname, None) + + def visit_unary( + self, unary, add_to_result_map=None, result_map_targets=(), **kw + ): + if add_to_result_map is not None: + result_map_targets += (unary,) + kw["add_to_result_map"] = add_to_result_map + kw["result_map_targets"] = result_map_targets + + if unary.operator: + if unary.modifier: + raise exc.CompileError( + "Unary expression does not support operator " + "and modifier simultaneously" + ) + disp = self._get_operator_dispatch( + unary.operator, "unary", "operator" + ) + if disp: + return disp(unary, unary.operator, **kw) + else: + return self._generate_generic_unary_operator( + unary, OPERATORS[unary.operator], **kw + ) + elif unary.modifier: + disp = self._get_operator_dispatch( + unary.modifier, "unary", "modifier" + ) + if disp: + return disp(unary, unary.modifier, **kw) + else: + return self._generate_generic_unary_modifier( + unary, OPERATORS[unary.modifier], **kw + ) + else: + raise exc.CompileError( + "Unary expression has no operator or modifier" + ) + + def visit_truediv_binary(self, binary, operator, **kw): + if self.dialect.div_is_floordiv: + return ( + self.process(binary.left, **kw) + + " / " + # TODO: would need a fast cast again here, + # unless we want to use an implicit cast like "+ 0.0" + + self.process( + elements.Cast( + binary.right, + ( + binary.right.type + if binary.right.type._type_affinity + is sqltypes.Numeric + else sqltypes.Numeric() + ), + ), + **kw, + ) + ) + else: + return ( + self.process(binary.left, **kw) + + " / " + + self.process(binary.right, **kw) + ) + + def visit_floordiv_binary(self, binary, operator, **kw): + if ( + self.dialect.div_is_floordiv + and binary.right.type._type_affinity is sqltypes.Integer + ): + return ( + self.process(binary.left, **kw) + + " / " + + self.process(binary.right, **kw) + ) + else: + return "FLOOR(%s)" % ( + self.process(binary.left, **kw) + + " / " + + self.process(binary.right, **kw) + ) + + def visit_is_true_unary_operator(self, element, operator, **kw): + if ( + element._is_implicitly_boolean + or self.dialect.supports_native_boolean + ): + return self.process(element.element, **kw) + else: + return "%s = 1" % self.process(element.element, **kw) + + def visit_is_false_unary_operator(self, element, operator, **kw): + if ( + element._is_implicitly_boolean + or self.dialect.supports_native_boolean + ): + return "NOT %s" % self.process(element.element, **kw) + else: + return "%s = 0" % self.process(element.element, **kw) + + def visit_not_match_op_binary(self, binary, operator, **kw): + return "NOT %s" % self.visit_binary( + binary, override_operator=operators.match_op + ) + + def visit_not_in_op_binary(self, binary, operator, **kw): + # The brackets are required in the NOT IN operation because the empty + # case is handled using the form "(col NOT IN (null) OR 1 = 1)". + # The presence of the OR makes the brackets required. + return "(%s)" % self._generate_generic_binary( + binary, OPERATORS[operator], **kw + ) + + def visit_empty_set_op_expr(self, type_, expand_op, **kw): + if expand_op is operators.not_in_op: + if len(type_) > 1: + return "(%s)) OR (1 = 1" % ( + ", ".join("NULL" for element in type_) + ) + else: + return "NULL) OR (1 = 1" + elif expand_op is operators.in_op: + if len(type_) > 1: + return "(%s)) AND (1 != 1" % ( + ", ".join("NULL" for element in type_) + ) + else: + return "NULL) AND (1 != 1" + else: + return self.visit_empty_set_expr(type_) + + def visit_empty_set_expr(self, element_types, **kw): + raise NotImplementedError( + "Dialect '%s' does not support empty set expression." + % self.dialect.name + ) + + def _literal_execute_expanding_parameter_literal_binds( + self, parameter, values, bind_expression_template=None + ): + typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect) + + if not values: + # empty IN expression. note we don't need to use + # bind_expression_template here because there are no + # expressions to render. + + if typ_dialect_impl._is_tuple_type: + replacement_expression = ( + "VALUES " if self.dialect.tuple_in_values else "" + ) + self.visit_empty_set_op_expr( + parameter.type.types, parameter.expand_op + ) + + else: + replacement_expression = self.visit_empty_set_op_expr( + [parameter.type], parameter.expand_op + ) + + elif typ_dialect_impl._is_tuple_type or ( + typ_dialect_impl._isnull + and isinstance(values[0], collections_abc.Sequence) + and not isinstance(values[0], (str, bytes)) + ): + if typ_dialect_impl._has_bind_expression: + raise NotImplementedError( + "bind_expression() on TupleType not supported with " + "literal_binds" + ) + + replacement_expression = ( + "VALUES " if self.dialect.tuple_in_values else "" + ) + ", ".join( + "(%s)" + % ( + ", ".join( + self.render_literal_value(value, param_type) + for value, param_type in zip( + tuple_element, parameter.type.types + ) + ) + ) + for i, tuple_element in enumerate(values) + ) + else: + if bind_expression_template: + post_compile_pattern = self._post_compile_pattern + m = post_compile_pattern.search(bind_expression_template) + assert m and m.group( + 2 + ), "unexpected format for expanding parameter" + + tok = m.group(2).split("~~") + be_left, be_right = tok[1], tok[3] + replacement_expression = ", ".join( + "%s%s%s" + % ( + be_left, + self.render_literal_value(value, parameter.type), + be_right, + ) + for value in values + ) + else: + replacement_expression = ", ".join( + self.render_literal_value(value, parameter.type) + for value in values + ) + + return (), replacement_expression + + def _literal_execute_expanding_parameter(self, name, parameter, values): + if parameter.literal_execute: + return self._literal_execute_expanding_parameter_literal_binds( + parameter, values + ) + + dialect = self.dialect + typ_dialect_impl = parameter.type._unwrapped_dialect_impl(dialect) + + if self._numeric_binds: + bind_template = self.compilation_bindtemplate + else: + bind_template = self.bindtemplate + + if ( + self.dialect._bind_typing_render_casts + and typ_dialect_impl.render_bind_cast + ): + + def _render_bindtemplate(name): + return self.render_bind_cast( + parameter.type, + typ_dialect_impl, + bind_template % {"name": name}, + ) + + else: + + def _render_bindtemplate(name): + return bind_template % {"name": name} + + if not values: + to_update = [] + if typ_dialect_impl._is_tuple_type: + replacement_expression = self.visit_empty_set_op_expr( + parameter.type.types, parameter.expand_op + ) + else: + replacement_expression = self.visit_empty_set_op_expr( + [parameter.type], parameter.expand_op + ) + + elif typ_dialect_impl._is_tuple_type or ( + typ_dialect_impl._isnull + and isinstance(values[0], collections_abc.Sequence) + and not isinstance(values[0], (str, bytes)) + ): + assert not typ_dialect_impl._is_array + to_update = [ + ("%s_%s_%s" % (name, i, j), value) + for i, tuple_element in enumerate(values, 1) + for j, value in enumerate(tuple_element, 1) + ] + + replacement_expression = ( + "VALUES " if dialect.tuple_in_values else "" + ) + ", ".join( + "(%s)" + % ( + ", ".join( + _render_bindtemplate( + to_update[i * len(tuple_element) + j][0] + ) + for j, value in enumerate(tuple_element) + ) + ) + for i, tuple_element in enumerate(values) + ) + else: + to_update = [ + ("%s_%s" % (name, i), value) + for i, value in enumerate(values, 1) + ] + replacement_expression = ", ".join( + _render_bindtemplate(key) for key, value in to_update + ) + + return to_update, replacement_expression + + def visit_binary( + self, + binary, + override_operator=None, + eager_grouping=False, + from_linter=None, + lateral_from_linter=None, + **kw, + ): + if from_linter and operators.is_comparison(binary.operator): + if lateral_from_linter is not None: + enclosing_lateral = kw["enclosing_lateral"] + lateral_from_linter.edges.update( + itertools.product( + _de_clone( + binary.left._from_objects + [enclosing_lateral] + ), + _de_clone( + binary.right._from_objects + [enclosing_lateral] + ), + ) + ) + else: + from_linter.edges.update( + itertools.product( + _de_clone(binary.left._from_objects), + _de_clone(binary.right._from_objects), + ) + ) + + # don't allow "? = ?" to render + if ( + self.ansi_bind_rules + and isinstance(binary.left, elements.BindParameter) + and isinstance(binary.right, elements.BindParameter) + ): + kw["literal_execute"] = True + + operator_ = override_operator or binary.operator + disp = self._get_operator_dispatch(operator_, "binary", None) + if disp: + return disp(binary, operator_, **kw) + else: + try: + opstring = OPERATORS[operator_] + except KeyError as err: + raise exc.UnsupportedCompilationError(self, operator_) from err + else: + return self._generate_generic_binary( + binary, + opstring, + from_linter=from_linter, + lateral_from_linter=lateral_from_linter, + **kw, + ) + + def visit_function_as_comparison_op_binary(self, element, operator, **kw): + return self.process(element.sql_function, **kw) + + def visit_mod_binary(self, binary, operator, **kw): + if self.preparer._double_percents: + return ( + self.process(binary.left, **kw) + + " %% " + + self.process(binary.right, **kw) + ) + else: + return ( + self.process(binary.left, **kw) + + " % " + + self.process(binary.right, **kw) + ) + + def visit_custom_op_binary(self, element, operator, **kw): + kw["eager_grouping"] = operator.eager_grouping + return self._generate_generic_binary( + element, + " " + self.escape_literal_column(operator.opstring) + " ", + **kw, + ) + + def visit_custom_op_unary_operator(self, element, operator, **kw): + return self._generate_generic_unary_operator( + element, self.escape_literal_column(operator.opstring) + " ", **kw + ) + + def visit_custom_op_unary_modifier(self, element, operator, **kw): + return self._generate_generic_unary_modifier( + element, " " + self.escape_literal_column(operator.opstring), **kw + ) + + def _generate_generic_binary( + self, binary, opstring, eager_grouping=False, **kw + ): + _in_operator_expression = kw.get("_in_operator_expression", False) + + kw["_in_operator_expression"] = True + kw["_binary_op"] = binary.operator + text = ( + binary.left._compiler_dispatch( + self, eager_grouping=eager_grouping, **kw + ) + + opstring + + binary.right._compiler_dispatch( + self, eager_grouping=eager_grouping, **kw + ) + ) + + if _in_operator_expression and eager_grouping: + text = "(%s)" % text + return text + + def _generate_generic_unary_operator(self, unary, opstring, **kw): + return opstring + unary.element._compiler_dispatch(self, **kw) + + def _generate_generic_unary_modifier(self, unary, opstring, **kw): + return unary.element._compiler_dispatch(self, **kw) + opstring + + @util.memoized_property + def _like_percent_literal(self): + return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE) + + def visit_ilike_case_insensitive_operand(self, element, **kw): + return f"lower({element.element._compiler_dispatch(self, **kw)})" + + def visit_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right).concat(percent) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right).concat(percent) + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_icontains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat( + ilike_case_insensitive(binary.right) + ).concat(percent) + return self.visit_ilike_op_binary(binary, operator, **kw) + + def visit_not_icontains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat( + ilike_case_insensitive(binary.right) + ).concat(percent) + return self.visit_not_ilike_op_binary(binary, operator, **kw) + + def visit_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent._rconcat(binary.right) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent._rconcat(binary.right) + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_istartswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent._rconcat(ilike_case_insensitive(binary.right)) + return self.visit_ilike_op_binary(binary, operator, **kw) + + def visit_not_istartswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent._rconcat(ilike_case_insensitive(binary.right)) + return self.visit_not_ilike_op_binary(binary, operator, **kw) + + def visit_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right) + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_iendswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat(ilike_case_insensitive(binary.right)) + return self.visit_ilike_op_binary(binary, operator, **kw) + + def visit_not_iendswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat(ilike_case_insensitive(binary.right)) + return self.visit_not_ilike_op_binary(binary, operator, **kw) + + def visit_like_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + + return "%s LIKE %s" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + ( + " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape is not None + else "" + ) + + def visit_not_like_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return "%s NOT LIKE %s" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + ( + " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape is not None + else "" + ) + + def visit_ilike_op_binary(self, binary, operator, **kw): + if operator is operators.ilike_op: + binary = binary._clone() + binary.left = ilike_case_insensitive(binary.left) + binary.right = ilike_case_insensitive(binary.right) + # else we assume ilower() has been applied + + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_ilike_op_binary(self, binary, operator, **kw): + if operator is operators.not_ilike_op: + binary = binary._clone() + binary.left = ilike_case_insensitive(binary.left) + binary.right = ilike_case_insensitive(binary.right) + # else we assume ilower() has been applied + + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_between_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, " BETWEEN SYMMETRIC " if symmetric else " BETWEEN ", **kw + ) + + def visit_not_between_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, + " NOT BETWEEN SYMMETRIC " if symmetric else " NOT BETWEEN ", + **kw, + ) + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + raise exc.CompileError( + "%s dialect does not support regular expressions" + % self.dialect.name + ) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + raise exc.CompileError( + "%s dialect does not support regular expressions" + % self.dialect.name + ) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + raise exc.CompileError( + "%s dialect does not support regular expression replacements" + % self.dialect.name + ) + + def visit_bindparam( + self, + bindparam, + within_columns_clause=False, + literal_binds=False, + skip_bind_expression=False, + literal_execute=False, + render_postcompile=False, + **kwargs, + ): + + if not skip_bind_expression: + impl = bindparam.type.dialect_impl(self.dialect) + if impl._has_bind_expression: + bind_expression = impl.bind_expression(bindparam) + wrapped = self.process( + bind_expression, + skip_bind_expression=True, + within_columns_clause=within_columns_clause, + literal_binds=literal_binds and not bindparam.expanding, + literal_execute=literal_execute, + render_postcompile=render_postcompile, + **kwargs, + ) + if bindparam.expanding: + # for postcompile w/ expanding, move the "wrapped" part + # of this into the inside + + m = re.match( + r"^(.*)\(__\[POSTCOMPILE_(\S+?)\]\)(.*)$", wrapped + ) + assert m, "unexpected format for expanding parameter" + wrapped = "(__[POSTCOMPILE_%s~~%s~~REPL~~%s~~])" % ( + m.group(2), + m.group(1), + m.group(3), + ) + + if literal_binds: + ret = self.render_literal_bindparam( + bindparam, + within_columns_clause=True, + bind_expression_template=wrapped, + **kwargs, + ) + return "(%s)" % ret + + return wrapped + + if not literal_binds: + literal_execute = ( + literal_execute + or bindparam.literal_execute + or (within_columns_clause and self.ansi_bind_rules) + ) + post_compile = literal_execute or bindparam.expanding + else: + post_compile = False + + if literal_binds: + ret = self.render_literal_bindparam( + bindparam, within_columns_clause=True, **kwargs + ) + if bindparam.expanding: + ret = "(%s)" % ret + return ret + + name = self._truncate_bindparam(bindparam) + + if name in self.binds: + existing = self.binds[name] + if existing is not bindparam: + if ( + (existing.unique or bindparam.unique) + and not existing.proxy_set.intersection( + bindparam.proxy_set + ) + and not existing._cloned_set.intersection( + bindparam._cloned_set + ) + ): + raise exc.CompileError( + "Bind parameter '%s' conflicts with " + "unique bind parameter of the same name" % name + ) + elif existing.expanding != bindparam.expanding: + raise exc.CompileError( + "Can't reuse bound parameter name '%s' in both " + "'expanding' (e.g. within an IN expression) and " + "non-expanding contexts. If this parameter is to " + "receive a list/array value, set 'expanding=True' on " + "it for expressions that aren't IN, otherwise use " + "a different parameter name." % (name,) + ) + elif existing._is_crud or bindparam._is_crud: + if existing._is_crud and bindparam._is_crud: + # TODO: this condition is not well understood. + # see tests in test/sql/test_update.py + raise exc.CompileError( + "Encountered unsupported case when compiling an " + "INSERT or UPDATE statement. If this is a " + "multi-table " + "UPDATE statement, please provide string-named " + "arguments to the " + "values() method with distinct names; support for " + "multi-table UPDATE statements that " + "target multiple tables for UPDATE is very " + "limited", + ) + else: + raise exc.CompileError( + f"bindparam() name '{bindparam.key}' is reserved " + "for automatic usage in the VALUES or SET " + "clause of this " + "insert/update statement. Please use a " + "name other than column name when using " + "bindparam() " + "with insert() or update() (for example, " + f"'b_{bindparam.key}')." + ) + + self.binds[bindparam.key] = self.binds[name] = bindparam + + # if we are given a cache key that we're going to match against, + # relate the bindparam here to one that is most likely present + # in the "extracted params" portion of the cache key. this is used + # to set up a positional mapping that is used to determine the + # correct parameters for a subsequent use of this compiled with + # a different set of parameter values. here, we accommodate for + # parameters that may have been cloned both before and after the cache + # key was been generated. + ckbm_tuple = self._cache_key_bind_match + + if ckbm_tuple: + ckbm, cksm = ckbm_tuple + for bp in bindparam._cloned_set: + if bp.key in cksm: + cb = cksm[bp.key] + ckbm[cb].append(bindparam) + + if bindparam.isoutparam: + self.has_out_parameters = True + + if post_compile: + if render_postcompile: + self._render_postcompile = True + + if literal_execute: + self.literal_execute_params |= {bindparam} + else: + self.post_compile_params |= {bindparam} + + ret = self.bindparam_string( + name, + post_compile=post_compile, + expanding=bindparam.expanding, + bindparam_type=bindparam.type, + **kwargs, + ) + + if bindparam.expanding: + ret = "(%s)" % ret + + return ret + + def render_bind_cast(self, type_, dbapi_type, sqltext): + raise NotImplementedError() + + def render_literal_bindparam( + self, + bindparam, + render_literal_value=NO_ARG, + bind_expression_template=None, + **kw, + ): + if render_literal_value is not NO_ARG: + value = render_literal_value + else: + if bindparam.value is None and bindparam.callable is None: + op = kw.get("_binary_op", None) + if op and op not in (operators.is_, operators.is_not): + util.warn_limited( + "Bound parameter '%s' rendering literal NULL in a SQL " + "expression; comparisons to NULL should not use " + "operators outside of 'is' or 'is not'", + (bindparam.key,), + ) + return self.process(sqltypes.NULLTYPE, **kw) + value = bindparam.effective_value + + if bindparam.expanding: + leep = self._literal_execute_expanding_parameter_literal_binds + to_update, replacement_expr = leep( + bindparam, + value, + bind_expression_template=bind_expression_template, + ) + return replacement_expr + else: + return self.render_literal_value(value, bindparam.type) + + def render_literal_value(self, value, type_): + """Render the value of a bind parameter as a quoted literal. + + This is used for statement sections that do not accept bind parameters + on the target driver/database. + + This should be implemented by subclasses using the quoting services + of the DBAPI. + + """ + + if value is None and not type_.should_evaluate_none: + # issue #10535 - handle NULL in the compiler without placing + # this onto each type, except for "evaluate None" types + # (e.g. JSON) + return self.process(elements.Null._instance()) + + processor = type_._cached_literal_processor(self.dialect) + if processor: + try: + return processor(value) + except Exception as e: + raise exc.CompileError( + f"Could not render literal value " + f'"{sql_util._repr_single_value(value)}" ' + f"with datatype " + f"{type_}; see parent stack trace for " + "more detail." + ) from e + + else: + raise exc.CompileError( + f"No literal value renderer is available for literal value " + f'"{sql_util._repr_single_value(value)}" ' + f"with datatype {type_}" + ) + + def _truncate_bindparam(self, bindparam): + if bindparam in self.bind_names: + return self.bind_names[bindparam] + + bind_name = bindparam.key + if isinstance(bind_name, elements._truncated_label): + bind_name = self._truncated_identifier("bindparam", bind_name) + + # add to bind_names for translation + self.bind_names[bindparam] = bind_name + + return bind_name + + def _truncated_identifier( + self, ident_class: str, name: _truncated_label + ) -> str: + if (ident_class, name) in self.truncated_names: + return self.truncated_names[(ident_class, name)] + + anonname = name.apply_map(self.anon_map) + + if len(anonname) > self.label_length - 6: + counter = self._truncated_counters.get(ident_class, 1) + truncname = ( + anonname[0 : max(self.label_length - 6, 0)] + + "_" + + hex(counter)[2:] + ) + self._truncated_counters[ident_class] = counter + 1 + else: + truncname = anonname + self.truncated_names[(ident_class, name)] = truncname + return truncname + + def _anonymize(self, name: str) -> str: + return name % self.anon_map + + def bindparam_string( + self, + name: str, + post_compile: bool = False, + expanding: bool = False, + escaped_from: Optional[str] = None, + bindparam_type: Optional[TypeEngine[Any]] = None, + accumulate_bind_names: Optional[Set[str]] = None, + visited_bindparam: Optional[List[str]] = None, + **kw: Any, + ) -> str: + # TODO: accumulate_bind_names is passed by crud.py to gather + # names on a per-value basis, visited_bindparam is passed by + # visit_insert() to collect all parameters in the statement. + # see if this gathering can be simplified somehow + if accumulate_bind_names is not None: + accumulate_bind_names.add(name) + if visited_bindparam is not None: + visited_bindparam.append(name) + + if not escaped_from: + if self._bind_translate_re.search(name): + # not quite the translate use case as we want to + # also get a quick boolean if we even found + # unusual characters in the name + new_name = self._bind_translate_re.sub( + lambda m: self._bind_translate_chars[m.group(0)], + name, + ) + escaped_from = name + name = new_name + + if escaped_from: + self.escaped_bind_names = self.escaped_bind_names.union( + {escaped_from: name} + ) + if post_compile: + ret = "__[POSTCOMPILE_%s]" % name + if expanding: + # for expanding, bound parameters or literal values will be + # rendered per item + return ret + + # otherwise, for non-expanding "literal execute", apply + # bind casts as determined by the datatype + if bindparam_type is not None: + type_impl = bindparam_type._unwrapped_dialect_impl( + self.dialect + ) + if type_impl.render_literal_cast: + ret = self.render_bind_cast(bindparam_type, type_impl, ret) + return ret + elif self.state is CompilerState.COMPILING: + ret = self.compilation_bindtemplate % {"name": name} + else: + ret = self.bindtemplate % {"name": name} + + if ( + bindparam_type is not None + and self.dialect._bind_typing_render_casts + ): + type_impl = bindparam_type._unwrapped_dialect_impl(self.dialect) + if type_impl.render_bind_cast: + ret = self.render_bind_cast(bindparam_type, type_impl, ret) + + return ret + + def _dispatch_independent_ctes(self, stmt, kw): + local_kw = kw.copy() + local_kw.pop("cte_opts", None) + for cte, opt in zip( + stmt._independent_ctes, stmt._independent_ctes_opts + ): + cte._compiler_dispatch(self, cte_opts=opt, **local_kw) + + def visit_cte( + self, + cte: CTE, + asfrom: bool = False, + ashint: bool = False, + fromhints: Optional[_FromHintsType] = None, + visiting_cte: Optional[CTE] = None, + from_linter: Optional[FromLinter] = None, + cte_opts: selectable._CTEOpts = selectable._CTEOpts(False), + **kwargs: Any, + ) -> Optional[str]: + self_ctes = self._init_cte_state() + assert self_ctes is self.ctes + + kwargs["visiting_cte"] = cte + + cte_name = cte.name + + if isinstance(cte_name, elements._truncated_label): + cte_name = self._truncated_identifier("alias", cte_name) + + is_new_cte = True + embedded_in_current_named_cte = False + + _reference_cte = cte._get_reference_cte() + + nesting = cte.nesting or cte_opts.nesting + + # check for CTE already encountered + if _reference_cte in self.level_name_by_cte: + cte_level, _, existing_cte_opts = self.level_name_by_cte[ + _reference_cte + ] + assert _ == cte_name + + cte_level_name = (cte_level, cte_name) + existing_cte = self.ctes_by_level_name[cte_level_name] + + # check if we are receiving it here with a specific + # "nest_here" location; if so, move it to this location + + if cte_opts.nesting: + if existing_cte_opts.nesting: + raise exc.CompileError( + "CTE is stated as 'nest_here' in " + "more than one location" + ) + + old_level_name = (cte_level, cte_name) + cte_level = len(self.stack) if nesting else 1 + cte_level_name = new_level_name = (cte_level, cte_name) + + del self.ctes_by_level_name[old_level_name] + self.ctes_by_level_name[new_level_name] = existing_cte + self.level_name_by_cte[_reference_cte] = new_level_name + ( + cte_opts, + ) + + else: + cte_level = len(self.stack) if nesting else 1 + cte_level_name = (cte_level, cte_name) + + if cte_level_name in self.ctes_by_level_name: + existing_cte = self.ctes_by_level_name[cte_level_name] + else: + existing_cte = None + + if existing_cte is not None: + embedded_in_current_named_cte = visiting_cte is existing_cte + + # we've generated a same-named CTE that we are enclosed in, + # or this is the same CTE. just return the name. + if cte is existing_cte._restates or cte is existing_cte: + is_new_cte = False + elif existing_cte is cte._restates: + # we've generated a same-named CTE that is + # enclosed in us - we take precedence, so + # discard the text for the "inner". + del self_ctes[existing_cte] + + existing_cte_reference_cte = existing_cte._get_reference_cte() + + assert existing_cte_reference_cte is _reference_cte + assert existing_cte_reference_cte is existing_cte + + del self.level_name_by_cte[existing_cte_reference_cte] + else: + # if the two CTEs are deep-copy identical, consider them + # the same, **if** they are clones, that is, they came from + # the ORM or other visit method + if ( + cte._is_clone_of is not None + or existing_cte._is_clone_of is not None + ) and cte.compare(existing_cte): + is_new_cte = False + else: + raise exc.CompileError( + "Multiple, unrelated CTEs found with " + "the same name: %r" % cte_name + ) + + if not asfrom and not is_new_cte: + return None + + if cte._cte_alias is not None: + pre_alias_cte = cte._cte_alias + cte_pre_alias_name = cte._cte_alias.name + if isinstance(cte_pre_alias_name, elements._truncated_label): + cte_pre_alias_name = self._truncated_identifier( + "alias", cte_pre_alias_name + ) + else: + pre_alias_cte = cte + cte_pre_alias_name = None + + if is_new_cte: + self.ctes_by_level_name[cte_level_name] = cte + self.level_name_by_cte[_reference_cte] = cte_level_name + ( + cte_opts, + ) + + if pre_alias_cte not in self.ctes: + self.visit_cte(pre_alias_cte, **kwargs) + + if not cte_pre_alias_name and cte not in self_ctes: + if cte.recursive: + self.ctes_recursive = True + text = self.preparer.format_alias(cte, cte_name) + if cte.recursive: + col_source = cte.element + + # TODO: can we get at the .columns_plus_names collection + # that is already (or will be?) generated for the SELECT + # rather than calling twice? + recur_cols = [ + # TODO: proxy_name is not technically safe, + # see test_cte-> + # test_with_recursive_no_name_currently_buggy. not + # clear what should be done with such a case + fallback_label_name or proxy_name + for ( + _, + proxy_name, + fallback_label_name, + c, + repeated, + ) in (col_source._generate_columns_plus_names(True)) + if not repeated + ] + + text += "(%s)" % ( + ", ".join( + self.preparer.format_label_name( + ident, anon_map=self.anon_map + ) + for ident in recur_cols + ) + ) + + assert kwargs.get("subquery", False) is False + + if not self.stack: + # toplevel, this is a stringify of the + # cte directly. just compile the inner + # the way alias() does. + return cte.element._compiler_dispatch( + self, asfrom=asfrom, **kwargs + ) + else: + prefixes = self._generate_prefixes( + cte, cte._prefixes, **kwargs + ) + inner = cte.element._compiler_dispatch( + self, asfrom=True, **kwargs + ) + + text += " AS %s\n(%s)" % (prefixes, inner) + + if cte._suffixes: + text += " " + self._generate_prefixes( + cte, cte._suffixes, **kwargs + ) + + self_ctes[cte] = text + + if asfrom: + if from_linter: + from_linter.froms[cte._de_clone()] = cte_name + + if not is_new_cte and embedded_in_current_named_cte: + return self.preparer.format_alias(cte, cte_name) + + if cte_pre_alias_name: + text = self.preparer.format_alias(cte, cte_pre_alias_name) + if self.preparer._requires_quotes(cte_name): + cte_name = self.preparer.quote(cte_name) + text += self.get_render_as_alias_suffix(cte_name) + return text + else: + return self.preparer.format_alias(cte, cte_name) + + return None + + def visit_table_valued_alias(self, element, **kw): + if element.joins_implicitly: + kw["from_linter"] = None + if element._is_lateral: + return self.visit_lateral(element, **kw) + else: + return self.visit_alias(element, **kw) + + def visit_table_valued_column(self, element, **kw): + return self.visit_column(element, **kw) + + def visit_alias( + self, + alias, + asfrom=False, + ashint=False, + iscrud=False, + fromhints=None, + subquery=False, + lateral=False, + enclosing_alias=None, + from_linter=None, + **kwargs, + ): + if lateral: + if "enclosing_lateral" not in kwargs: + # if lateral is set and enclosing_lateral is not + # present, we assume we are being called directly + # from visit_lateral() and we need to set enclosing_lateral. + assert alias._is_lateral + kwargs["enclosing_lateral"] = alias + + # for lateral objects, we track a second from_linter that is... + # lateral! to the level above us. + if ( + from_linter + and "lateral_from_linter" not in kwargs + and "enclosing_lateral" in kwargs + ): + kwargs["lateral_from_linter"] = from_linter + + if enclosing_alias is not None and enclosing_alias.element is alias: + inner = alias.element._compiler_dispatch( + self, + asfrom=asfrom, + ashint=ashint, + iscrud=iscrud, + fromhints=fromhints, + lateral=lateral, + enclosing_alias=alias, + **kwargs, + ) + if subquery and (asfrom or lateral): + inner = "(%s)" % (inner,) + return inner + else: + enclosing_alias = kwargs["enclosing_alias"] = alias + + if asfrom or ashint: + if isinstance(alias.name, elements._truncated_label): + alias_name = self._truncated_identifier("alias", alias.name) + else: + alias_name = alias.name + + if ashint: + return self.preparer.format_alias(alias, alias_name) + elif asfrom: + if from_linter: + from_linter.froms[alias._de_clone()] = alias_name + + inner = alias.element._compiler_dispatch( + self, asfrom=True, lateral=lateral, **kwargs + ) + if subquery: + inner = "(%s)" % (inner,) + + ret = inner + self.get_render_as_alias_suffix( + self.preparer.format_alias(alias, alias_name) + ) + + if alias._supports_derived_columns and alias._render_derived: + ret += "(%s)" % ( + ", ".join( + "%s%s" + % ( + self.preparer.quote(col.name), + ( + " %s" + % self.dialect.type_compiler_instance.process( + col.type, **kwargs + ) + if alias._render_derived_w_types + else "" + ), + ) + for col in alias.c + ) + ) + + if fromhints and alias in fromhints: + ret = self.format_from_hint_text( + ret, alias, fromhints[alias], iscrud + ) + + return ret + else: + # note we cancel the "subquery" flag here as well + return alias.element._compiler_dispatch( + self, lateral=lateral, **kwargs + ) + + def visit_subquery(self, subquery, **kw): + kw["subquery"] = True + return self.visit_alias(subquery, **kw) + + def visit_lateral(self, lateral_, **kw): + kw["lateral"] = True + return "LATERAL %s" % self.visit_alias(lateral_, **kw) + + def visit_tablesample(self, tablesample, asfrom=False, **kw): + text = "%s TABLESAMPLE %s" % ( + self.visit_alias(tablesample, asfrom=True, **kw), + tablesample._get_method()._compiler_dispatch(self, **kw), + ) + + if tablesample.seed is not None: + text += " REPEATABLE (%s)" % ( + tablesample.seed._compiler_dispatch(self, **kw) + ) + + return text + + def _render_values(self, element, **kw): + kw.setdefault("literal_binds", element.literal_binds) + tuples = ", ".join( + self.process( + elements.Tuple( + types=element._column_types, *elem + ).self_group(), + **kw, + ) + for chunk in element._data + for elem in chunk + ) + return f"VALUES {tuples}" + + def visit_values(self, element, asfrom=False, from_linter=None, **kw): + v = self._render_values(element, **kw) + + if element._unnamed: + name = None + elif isinstance(element.name, elements._truncated_label): + name = self._truncated_identifier("values", element.name) + else: + name = element.name + + if element._is_lateral: + lateral = "LATERAL " + else: + lateral = "" + + if asfrom: + if from_linter: + from_linter.froms[element._de_clone()] = ( + name if name is not None else "(unnamed VALUES element)" + ) + + if name: + kw["include_table"] = False + v = "%s(%s)%s (%s)" % ( + lateral, + v, + self.get_render_as_alias_suffix(self.preparer.quote(name)), + ( + ", ".join( + c._compiler_dispatch(self, **kw) + for c in element.columns + ) + ), + ) + else: + v = "%s(%s)" % (lateral, v) + return v + + def visit_scalar_values(self, element, **kw): + return f"({self._render_values(element, **kw)})" + + def get_render_as_alias_suffix(self, alias_name_text): + return " AS " + alias_name_text + + def _add_to_result_map( + self, + keyname: str, + name: str, + objects: Tuple[Any, ...], + type_: TypeEngine[Any], + ) -> None: + + # note objects must be non-empty for cursor.py to handle the + # collection properly + assert objects + + if keyname is None or keyname == "*": + self._ordered_columns = False + self._ad_hoc_textual = True + if type_._is_tuple_type: + raise exc.CompileError( + "Most backends don't support SELECTing " + "from a tuple() object. If this is an ORM query, " + "consider using the Bundle object." + ) + self._result_columns.append( + ResultColumnsEntry(keyname, name, objects, type_) + ) + + def _label_returning_column( + self, stmt, column, populate_result_map, column_clause_args=None, **kw + ): + """Render a column with necessary labels inside of a RETURNING clause. + + This method is provided for individual dialects in place of calling + the _label_select_column method directly, so that the two use cases + of RETURNING vs. SELECT can be disambiguated going forward. + + .. versionadded:: 1.4.21 + + """ + return self._label_select_column( + None, + column, + populate_result_map, + False, + {} if column_clause_args is None else column_clause_args, + **kw, + ) + + def _label_select_column( + self, + select, + column, + populate_result_map, + asfrom, + column_clause_args, + name=None, + proxy_name=None, + fallback_label_name=None, + within_columns_clause=True, + column_is_repeated=False, + need_column_expressions=False, + include_table=True, + ): + """produce labeled columns present in a select().""" + impl = column.type.dialect_impl(self.dialect) + + if impl._has_column_expression and ( + need_column_expressions or populate_result_map + ): + col_expr = impl.column_expression(column) + else: + col_expr = column + + if populate_result_map: + # pass an "add_to_result_map" callable into the compilation + # of embedded columns. this collects information about the + # column as it will be fetched in the result and is coordinated + # with cursor.description when the query is executed. + add_to_result_map = self._add_to_result_map + + # if the SELECT statement told us this column is a repeat, + # wrap the callable with one that prevents the addition of the + # targets + if column_is_repeated: + _add_to_result_map = add_to_result_map + + def add_to_result_map(keyname, name, objects, type_): + _add_to_result_map(keyname, name, (keyname,), type_) + + # if we redefined col_expr for type expressions, wrap the + # callable with one that adds the original column to the targets + elif col_expr is not column: + _add_to_result_map = add_to_result_map + + def add_to_result_map(keyname, name, objects, type_): + _add_to_result_map( + keyname, name, (column,) + objects, type_ + ) + + else: + add_to_result_map = None + + # this method is used by some of the dialects for RETURNING, + # which has different inputs. _label_returning_column was added + # as the better target for this now however for 1.4 we will keep + # _label_select_column directly compatible with this use case. + # these assertions right now set up the current expected inputs + assert within_columns_clause, ( + "_label_select_column is only relevant within " + "the columns clause of a SELECT or RETURNING" + ) + if isinstance(column, elements.Label): + if col_expr is not column: + result_expr = _CompileLabel( + col_expr, column.name, alt_names=(column.element,) + ) + else: + result_expr = col_expr + + elif name: + # here, _columns_plus_names has determined there's an explicit + # label name we need to use. this is the default for + # tablenames_plus_columnnames as well as when columns are being + # deduplicated on name + + assert ( + proxy_name is not None + ), "proxy_name is required if 'name' is passed" + + result_expr = _CompileLabel( + col_expr, + name, + alt_names=( + proxy_name, + # this is a hack to allow legacy result column lookups + # to work as they did before; this goes away in 2.0. + # TODO: this only seems to be tested indirectly + # via test/orm/test_deprecations.py. should be a + # resultset test for this + column._tq_label, + ), + ) + else: + # determine here whether this column should be rendered in + # a labelled context or not, as we were given no required label + # name from the caller. Here we apply heuristics based on the kind + # of SQL expression involved. + + if col_expr is not column: + # type-specific expression wrapping the given column, + # so we render a label + render_with_label = True + elif isinstance(column, elements.ColumnClause): + # table-bound column, we render its name as a label if we are + # inside of a subquery only + render_with_label = ( + asfrom + and not column.is_literal + and column.table is not None + ) + elif isinstance(column, elements.TextClause): + render_with_label = False + elif isinstance(column, elements.UnaryExpression): + render_with_label = column.wraps_column_expression or asfrom + elif ( + # general class of expressions that don't have a SQL-column + # addressible name. includes scalar selects, bind parameters, + # SQL functions, others + not isinstance(column, elements.NamedColumn) + # deeper check that indicates there's no natural "name" to + # this element, which accommodates for custom SQL constructs + # that might have a ".name" attribute (but aren't SQL + # functions) but are not implementing this more recently added + # base class. in theory the "NamedColumn" check should be + # enough, however here we seek to maintain legacy behaviors + # as well. + and column._non_anon_label is None + ): + render_with_label = True + else: + render_with_label = False + + if render_with_label: + if not fallback_label_name: + # used by the RETURNING case right now. we generate it + # here as 3rd party dialects may be referring to + # _label_select_column method directly instead of the + # just-added _label_returning_column method + assert not column_is_repeated + fallback_label_name = column._anon_name_label + + fallback_label_name = ( + elements._truncated_label(fallback_label_name) + if not isinstance( + fallback_label_name, elements._truncated_label + ) + else fallback_label_name + ) + + result_expr = _CompileLabel( + col_expr, fallback_label_name, alt_names=(proxy_name,) + ) + else: + result_expr = col_expr + + column_clause_args.update( + within_columns_clause=within_columns_clause, + add_to_result_map=add_to_result_map, + include_table=include_table, + ) + return result_expr._compiler_dispatch(self, **column_clause_args) + + def format_from_hint_text(self, sqltext, table, hint, iscrud): + hinttext = self.get_from_hint_text(table, hint) + if hinttext: + sqltext += " " + hinttext + return sqltext + + def get_select_hint_text(self, byfroms): + return None + + def get_from_hint_text(self, table, text): + return None + + def get_crud_hint_text(self, table, text): + return None + + def get_statement_hint_text(self, hint_texts): + return " ".join(hint_texts) + + _default_stack_entry: _CompilerStackEntry + + if not typing.TYPE_CHECKING: + _default_stack_entry = util.immutabledict( + [("correlate_froms", frozenset()), ("asfrom_froms", frozenset())] + ) + + def _display_froms_for_select( + self, select_stmt, asfrom, lateral=False, **kw + ): + # utility method to help external dialects + # get the correct from list for a select. + # specifically the oracle dialect needs this feature + # right now. + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + compile_state = select_stmt._compile_state_factory(select_stmt, self) + + correlate_froms = entry["correlate_froms"] + asfrom_froms = entry["asfrom_froms"] + + if asfrom and not lateral: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms + ), + implicit_correlate_froms=(), + ) + else: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms, + ) + return froms + + translate_select_structure: Any = None + """if not ``None``, should be a callable which accepts ``(select_stmt, + **kw)`` and returns a select object. this is used for structural changes + mostly to accommodate for LIMIT/OFFSET schemes + + """ + + def visit_select( + self, + select_stmt, + asfrom=False, + insert_into=False, + fromhints=None, + compound_index=None, + select_wraps_for=None, + lateral=False, + from_linter=None, + **kwargs, + ): + assert select_wraps_for is None, ( + "SQLAlchemy 1.4 requires use of " + "the translate_select_structure hook for structural " + "translations of SELECT objects" + ) + + # initial setup of SELECT. the compile_state_factory may now + # be creating a totally different SELECT from the one that was + # passed in. for ORM use this will convert from an ORM-state + # SELECT to a regular "Core" SELECT. other composed operations + # such as computation of joins will be performed. + + kwargs["within_columns_clause"] = False + + compile_state = select_stmt._compile_state_factory( + select_stmt, self, **kwargs + ) + kwargs["ambiguous_table_name_map"] = ( + compile_state._ambiguous_table_name_map + ) + + select_stmt = compile_state.statement + + toplevel = not self.stack + + if toplevel and not self.compile_state: + self.compile_state = compile_state + + is_embedded_select = compound_index is not None or insert_into + + # translate step for Oracle, SQL Server which often need to + # restructure the SELECT to allow for LIMIT/OFFSET and possibly + # other conditions + if self.translate_select_structure: + new_select_stmt = self.translate_select_structure( + select_stmt, asfrom=asfrom, **kwargs + ) + + # if SELECT was restructured, maintain a link to the originals + # and assemble a new compile state + if new_select_stmt is not select_stmt: + compile_state_wraps_for = compile_state + select_wraps_for = select_stmt + select_stmt = new_select_stmt + + compile_state = select_stmt._compile_state_factory( + select_stmt, self, **kwargs + ) + select_stmt = compile_state.statement + + entry = self._default_stack_entry if toplevel else self.stack[-1] + + populate_result_map = need_column_expressions = ( + toplevel + or entry.get("need_result_map_for_compound", False) + or entry.get("need_result_map_for_nested", False) + ) + + # indicates there is a CompoundSelect in play and we are not the + # first select + if compound_index: + populate_result_map = False + + # this was first proposed as part of #3372; however, it is not + # reached in current tests and could possibly be an assertion + # instead. + if not populate_result_map and "add_to_result_map" in kwargs: + del kwargs["add_to_result_map"] + + froms = self._setup_select_stack( + select_stmt, compile_state, entry, asfrom, lateral, compound_index + ) + + column_clause_args = kwargs.copy() + column_clause_args.update( + {"within_label_clause": False, "within_columns_clause": False} + ) + + text = "SELECT " # we're off to a good start ! + + if select_stmt._hints: + hint_text, byfrom = self._setup_select_hints(select_stmt) + if hint_text: + text += hint_text + " " + else: + byfrom = None + + if select_stmt._independent_ctes: + self._dispatch_independent_ctes(select_stmt, kwargs) + + if select_stmt._prefixes: + text += self._generate_prefixes( + select_stmt, select_stmt._prefixes, **kwargs + ) + + text += self.get_select_precolumns(select_stmt, **kwargs) + # the actual list of columns to print in the SELECT column list. + inner_columns = [ + c + for c in [ + self._label_select_column( + select_stmt, + column, + populate_result_map, + asfrom, + column_clause_args, + name=name, + proxy_name=proxy_name, + fallback_label_name=fallback_label_name, + column_is_repeated=repeated, + need_column_expressions=need_column_expressions, + ) + for ( + name, + proxy_name, + fallback_label_name, + column, + repeated, + ) in compile_state.columns_plus_names + ] + if c is not None + ] + + if populate_result_map and select_wraps_for is not None: + # if this select was generated from translate_select, + # rewrite the targeted columns in the result map + + translate = dict( + zip( + [ + name + for ( + key, + proxy_name, + fallback_label_name, + name, + repeated, + ) in compile_state.columns_plus_names + ], + [ + name + for ( + key, + proxy_name, + fallback_label_name, + name, + repeated, + ) in compile_state_wraps_for.columns_plus_names + ], + ) + ) + + self._result_columns = [ + ResultColumnsEntry( + key, name, tuple(translate.get(o, o) for o in obj), type_ + ) + for key, name, obj, type_ in self._result_columns + ] + + text = self._compose_select_body( + text, + select_stmt, + compile_state, + inner_columns, + froms, + byfrom, + toplevel, + kwargs, + ) + + if select_stmt._statement_hints: + per_dialect = [ + ht + for (dialect_name, ht) in select_stmt._statement_hints + if dialect_name in ("*", self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) + + # In compound query, CTEs are shared at the compound level + if self.ctes and (not is_embedded_select or toplevel): + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + if select_stmt._suffixes: + text += " " + self._generate_prefixes( + select_stmt, select_stmt._suffixes, **kwargs + ) + + self.stack.pop(-1) + + return text + + def _setup_select_hints( + self, select: Select[Any] + ) -> Tuple[str, _FromHintsType]: + byfrom = { + from_: hinttext + % {"name": from_._compiler_dispatch(self, ashint=True)} + for (from_, dialect), hinttext in select._hints.items() + if dialect in ("*", self.dialect.name) + } + hint_text = self.get_select_hint_text(byfrom) + return hint_text, byfrom + + def _setup_select_stack( + self, select, compile_state, entry, asfrom, lateral, compound_index + ): + correlate_froms = entry["correlate_froms"] + asfrom_froms = entry["asfrom_froms"] + + if compound_index == 0: + entry["select_0"] = select + elif compound_index: + select_0 = entry["select_0"] + numcols = len(select_0._all_selected_columns) + + if len(compile_state.columns_plus_names) != numcols: + raise exc.CompileError( + "All selectables passed to " + "CompoundSelect must have identical numbers of " + "columns; select #%d has %d columns, select " + "#%d has %d" + % ( + 1, + numcols, + compound_index + 1, + len(select._all_selected_columns), + ) + ) + + if asfrom and not lateral: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms + ), + implicit_correlate_froms=(), + ) + else: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms, + ) + + new_correlate_froms = set(_from_objects(*froms)) + all_correlate_froms = new_correlate_froms.union(correlate_froms) + + new_entry: _CompilerStackEntry = { + "asfrom_froms": new_correlate_froms, + "correlate_froms": all_correlate_froms, + "selectable": select, + "compile_state": compile_state, + } + self.stack.append(new_entry) + + return froms + + def _compose_select_body( + self, + text, + select, + compile_state, + inner_columns, + froms, + byfrom, + toplevel, + kwargs, + ): + text += ", ".join(inner_columns) + + if self.linting & COLLECT_CARTESIAN_PRODUCTS: + from_linter = FromLinter({}, set()) + warn_linting = self.linting & WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + # adjust the whitespace for no inner columns, part of #9440, + # so that a no-col SELECT comes out as "SELECT WHERE..." or + # "SELECT FROM ...". + # while it would be better to have built the SELECT starting string + # without trailing whitespace first, then add whitespace only if inner + # cols were present, this breaks compatibility with various custom + # compilation schemes that are currently being tested. + if not inner_columns: + text = text.rstrip() + + if froms: + text += " \nFROM " + + if select._hints: + text += ", ".join( + [ + f._compiler_dispatch( + self, + asfrom=True, + fromhints=byfrom, + from_linter=from_linter, + **kwargs, + ) + for f in froms + ] + ) + else: + text += ", ".join( + [ + f._compiler_dispatch( + self, + asfrom=True, + from_linter=from_linter, + **kwargs, + ) + for f in froms + ] + ) + else: + text += self.default_from() + + if select._where_criteria: + t = self._generate_delimited_and_list( + select._where_criteria, from_linter=from_linter, **kwargs + ) + if t: + text += " \nWHERE " + t + + if warn_linting: + assert from_linter is not None + from_linter.warn() + + if select._group_by_clauses: + text += self.group_by_clause(select, **kwargs) + + if select._having_criteria: + t = self._generate_delimited_and_list( + select._having_criteria, **kwargs + ) + if t: + text += " \nHAVING " + t + + if select._order_by_clauses: + text += self.order_by_clause(select, **kwargs) + + if select._has_row_limiting_clause: + text += self._row_limit_clause(select, **kwargs) + + if select._for_update_arg is not None: + text += self.for_update_clause(select, **kwargs) + + return text + + def _generate_prefixes(self, stmt, prefixes, **kw): + clause = " ".join( + prefix._compiler_dispatch(self, **kw) + for prefix, dialect_name in prefixes + if dialect_name in (None, "*") or dialect_name == self.dialect.name + ) + if clause: + clause += " " + return clause + + def _render_cte_clause( + self, + nesting_level=None, + include_following_stack=False, + ): + """ + include_following_stack + Also render the nesting CTEs on the next stack. Useful for + SQL structures like UNION or INSERT that can wrap SELECT + statements containing nesting CTEs. + """ + if not self.ctes: + return "" + + ctes: MutableMapping[CTE, str] + + if nesting_level and nesting_level > 1: + ctes = util.OrderedDict() + for cte in list(self.ctes.keys()): + cte_level, cte_name, cte_opts = self.level_name_by_cte[ + cte._get_reference_cte() + ] + nesting = cte.nesting or cte_opts.nesting + is_rendered_level = cte_level == nesting_level or ( + include_following_stack and cte_level == nesting_level + 1 + ) + if not (nesting and is_rendered_level): + continue + + ctes[cte] = self.ctes[cte] + + else: + ctes = self.ctes + + if not ctes: + return "" + ctes_recursive = any([cte.recursive for cte in ctes]) + + cte_text = self.get_cte_preamble(ctes_recursive) + " " + cte_text += ", \n".join([txt for txt in ctes.values()]) + cte_text += "\n " + + if nesting_level and nesting_level > 1: + for cte in list(ctes.keys()): + cte_level, cte_name, cte_opts = self.level_name_by_cte[ + cte._get_reference_cte() + ] + del self.ctes[cte] + del self.ctes_by_level_name[(cte_level, cte_name)] + del self.level_name_by_cte[cte._get_reference_cte()] + + return cte_text + + def get_cte_preamble(self, recursive): + if recursive: + return "WITH RECURSIVE" + else: + return "WITH" + + def get_select_precolumns(self, select, **kw): + """Called when building a ``SELECT`` statement, position is just + before column list. + + """ + if select._distinct_on: + util.warn_deprecated( + "DISTINCT ON is currently supported only by the PostgreSQL " + "dialect. Use of DISTINCT ON for other backends is currently " + "silently ignored, however this usage is deprecated, and will " + "raise CompileError in a future release for all backends " + "that do not support this syntax.", + version="1.4", + ) + return "DISTINCT " if select._distinct else "" + + def group_by_clause(self, select, **kw): + """allow dialects to customize how GROUP BY is rendered.""" + + group_by = self._generate_delimited_list( + select._group_by_clauses, OPERATORS[operators.comma_op], **kw + ) + if group_by: + return " GROUP BY " + group_by + else: + return "" + + def order_by_clause(self, select, **kw): + """allow dialects to customize how ORDER BY is rendered.""" + + order_by = self._generate_delimited_list( + select._order_by_clauses, OPERATORS[operators.comma_op], **kw + ) + + if order_by: + return " ORDER BY " + order_by + else: + return "" + + def for_update_clause(self, select, **kw): + return " FOR UPDATE" + + def returning_clause( + self, + stmt: UpdateBase, + returning_cols: Sequence[ColumnElement[Any]], + *, + populate_result_map: bool, + **kw: Any, + ) -> str: + columns = [ + self._label_returning_column( + stmt, + column, + populate_result_map, + fallback_label_name=fallback_label_name, + column_is_repeated=repeated, + name=name, + proxy_name=proxy_name, + **kw, + ) + for ( + name, + proxy_name, + fallback_label_name, + column, + repeated, + ) in stmt._generate_columns_plus_names( + True, cols=base._select_iterables(returning_cols) + ) + ] + + return "RETURNING " + ", ".join(columns) + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += "\n LIMIT -1" + text += " OFFSET " + self.process(select._offset_clause, **kw) + return text + + def fetch_clause( + self, + select, + fetch_clause=None, + require_offset=False, + use_literal_execute_for_simple_int=False, + **kw, + ): + if fetch_clause is None: + fetch_clause = select._fetch_clause + fetch_clause_options = select._fetch_clause_options + else: + fetch_clause_options = {"percent": False, "with_ties": False} + + text = "" + + if select._offset_clause is not None: + offset_clause = select._offset_clause + if ( + use_literal_execute_for_simple_int + and select._simple_int_clause(offset_clause) + ): + offset_clause = offset_clause.render_literal_execute() + offset_str = self.process(offset_clause, **kw) + text += "\n OFFSET %s ROWS" % offset_str + elif require_offset: + text += "\n OFFSET 0 ROWS" + + if fetch_clause is not None: + if ( + use_literal_execute_for_simple_int + and select._simple_int_clause(fetch_clause) + ): + fetch_clause = fetch_clause.render_literal_execute() + text += "\n FETCH FIRST %s%s ROWS %s" % ( + self.process(fetch_clause, **kw), + " PERCENT" if fetch_clause_options["percent"] else "", + "WITH TIES" if fetch_clause_options["with_ties"] else "ONLY", + ) + return text + + def visit_table( + self, + table, + asfrom=False, + iscrud=False, + ashint=False, + fromhints=None, + use_schema=True, + from_linter=None, + ambiguous_table_name_map=None, + **kwargs, + ): + if from_linter: + from_linter.froms[table] = table.fullname + + if asfrom or ashint: + effective_schema = self.preparer.schema_for_object(table) + + if use_schema and effective_schema: + ret = ( + self.preparer.quote_schema(effective_schema) + + "." + + self.preparer.quote(table.name) + ) + else: + ret = self.preparer.quote(table.name) + + if ( + not effective_schema + and ambiguous_table_name_map + and table.name in ambiguous_table_name_map + ): + anon_name = self._truncated_identifier( + "alias", ambiguous_table_name_map[table.name] + ) + + ret = ret + self.get_render_as_alias_suffix( + self.preparer.format_alias(None, anon_name) + ) + + if fromhints and table in fromhints: + ret = self.format_from_hint_text( + ret, table, fromhints[table], iscrud + ) + return ret + else: + return "" + + def visit_join(self, join, asfrom=False, from_linter=None, **kwargs): + if from_linter: + from_linter.edges.update( + itertools.product( + _de_clone(join.left._from_objects), + _de_clone(join.right._from_objects), + ) + ) + + if join.full: + join_type = " FULL OUTER JOIN " + elif join.isouter: + join_type = " LEFT OUTER JOIN " + else: + join_type = " JOIN " + return ( + join.left._compiler_dispatch( + self, asfrom=True, from_linter=from_linter, **kwargs + ) + + join_type + + join.right._compiler_dispatch( + self, asfrom=True, from_linter=from_linter, **kwargs + ) + + " ON " + # TODO: likely need asfrom=True here? + + join.onclause._compiler_dispatch( + self, from_linter=from_linter, **kwargs + ) + ) + + def _setup_crud_hints(self, stmt, table_text): + dialect_hints = { + table: hint_text + for (table, dialect), hint_text in stmt._hints.items() + if dialect in ("*", self.dialect.name) + } + if stmt.table in dialect_hints: + table_text = self.format_from_hint_text( + table_text, stmt.table, dialect_hints[stmt.table], True + ) + return dialect_hints, table_text + + # within the realm of "insertmanyvalues sentinel columns", + # these lookups match different kinds of Column() configurations + # to specific backend capabilities. they are broken into two + # lookups, one for autoincrement columns and the other for non + # autoincrement columns + _sentinel_col_non_autoinc_lookup = util.immutabledict( + { + _SentinelDefaultCharacterization.CLIENTSIDE: ( + InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT + ), + _SentinelDefaultCharacterization.SENTINEL_DEFAULT: ( + InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT + ), + _SentinelDefaultCharacterization.NONE: ( + InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT + ), + _SentinelDefaultCharacterization.IDENTITY: ( + InsertmanyvaluesSentinelOpts.IDENTITY + ), + _SentinelDefaultCharacterization.SEQUENCE: ( + InsertmanyvaluesSentinelOpts.SEQUENCE + ), + } + ) + _sentinel_col_autoinc_lookup = _sentinel_col_non_autoinc_lookup.union( + { + _SentinelDefaultCharacterization.NONE: ( + InsertmanyvaluesSentinelOpts.AUTOINCREMENT + ), + } + ) + + def _get_sentinel_column_for_table( + self, table: Table + ) -> Optional[Sequence[Column[Any]]]: + """given a :class:`.Table`, return a usable sentinel column or + columns for this dialect if any. + + Return None if no sentinel columns could be identified, or raise an + error if a column was marked as a sentinel explicitly but isn't + compatible with this dialect. + + """ + + sentinel_opts = self.dialect.insertmanyvalues_implicit_sentinel + sentinel_characteristics = table._sentinel_column_characteristics + + sent_cols = sentinel_characteristics.columns + + if sent_cols is None: + return None + + if sentinel_characteristics.is_autoinc: + bitmask = self._sentinel_col_autoinc_lookup.get( + sentinel_characteristics.default_characterization, 0 + ) + else: + bitmask = self._sentinel_col_non_autoinc_lookup.get( + sentinel_characteristics.default_characterization, 0 + ) + + if sentinel_opts & bitmask: + return sent_cols + + if sentinel_characteristics.is_explicit: + # a column was explicitly marked as insert_sentinel=True, + # however it is not compatible with this dialect. they should + # not indicate this column as a sentinel if they need to include + # this dialect. + + # TODO: do we want non-primary key explicit sentinel cols + # that can gracefully degrade for some backends? + # insert_sentinel="degrade" perhaps. not for the initial release. + # I am hoping people are generally not dealing with this sentinel + # business at all. + + # if is_explicit is True, there will be only one sentinel column. + + raise exc.InvalidRequestError( + f"Column {sent_cols[0]} can't be explicitly " + "marked as a sentinel column when using the " + f"{self.dialect.name} dialect, as the " + "particular type of default generation on this column is " + "not currently compatible with this dialect's specific " + f"INSERT..RETURNING syntax which can receive the " + "server-generated value in " + "a deterministic way. To remove this error, remove " + "insert_sentinel=True from primary key autoincrement " + "columns; these columns are automatically used as " + "sentinels for supported dialects in any case." + ) + + return None + + def _deliver_insertmanyvalues_batches( + self, + statement: str, + parameters: _DBAPIMultiExecuteParams, + compiled_parameters: List[_MutableCoreSingleExecuteParams], + generic_setinputsizes: Optional[_GenericSetInputSizesType], + batch_size: int, + sort_by_parameter_order: bool, + schema_translate_map: Optional[SchemaTranslateMapType], + ) -> Iterator[_InsertManyValuesBatch]: + imv = self._insertmanyvalues + assert imv is not None + + if not imv.sentinel_param_keys: + _sentinel_from_params = None + else: + _sentinel_from_params = operator.itemgetter( + *imv.sentinel_param_keys + ) + + lenparams = len(parameters) + if imv.is_default_expr and not self.dialect.supports_default_metavalue: + # backend doesn't support + # INSERT INTO table (pk_col) VALUES (DEFAULT), (DEFAULT), ... + # at the moment this is basically SQL Server due to + # not being able to use DEFAULT for identity column + # just yield out that many single statements! still + # faster than a whole connection.execute() call ;) + # + # note we still are taking advantage of the fact that we know + # we are using RETURNING. The generalized approach of fetching + # cursor.lastrowid etc. still goes through the more heavyweight + # "ExecutionContext per statement" system as it isn't usable + # as a generic "RETURNING" approach + use_row_at_a_time = True + downgraded = False + elif not self.dialect.supports_multivalues_insert or ( + sort_by_parameter_order + and self._result_columns + and (imv.sentinel_columns is None or imv.includes_upsert_behaviors) + ): + # deterministic order was requested and the compiler could + # not organize sentinel columns for this dialect/statement. + # use row at a time + use_row_at_a_time = True + downgraded = True + else: + use_row_at_a_time = False + downgraded = False + + if use_row_at_a_time: + for batchnum, (param, compiled_param) in enumerate( + cast( + "Sequence[Tuple[_DBAPISingleExecuteParams, _MutableCoreSingleExecuteParams]]", # noqa: E501 + zip(parameters, compiled_parameters), + ), + 1, + ): + yield _InsertManyValuesBatch( + statement, + param, + generic_setinputsizes, + [param], + ( + [_sentinel_from_params(compiled_param)] + if _sentinel_from_params + else [] + ), + 1, + batchnum, + lenparams, + sort_by_parameter_order, + downgraded, + ) + return + + if schema_translate_map: + rst = functools.partial( + self.preparer._render_schema_translates, + schema_translate_map=schema_translate_map, + ) + else: + rst = None + + imv_single_values_expr = imv.single_values_expr + if rst: + imv_single_values_expr = rst(imv_single_values_expr) + + executemany_values = f"({imv_single_values_expr})" + statement = statement.replace(executemany_values, "__EXECMANY_TOKEN__") + + # Use optional insertmanyvalues_max_parameters + # to further shrink the batch size so that there are no more than + # insertmanyvalues_max_parameters params. + # Currently used by SQL Server, which limits statements to 2100 bound + # parameters (actually 2099). + max_params = self.dialect.insertmanyvalues_max_parameters + if max_params: + total_num_of_params = len(self.bind_names) + num_params_per_batch = len(imv.insert_crud_params) + num_params_outside_of_batch = ( + total_num_of_params - num_params_per_batch + ) + batch_size = min( + batch_size, + ( + (max_params - num_params_outside_of_batch) + // num_params_per_batch + ), + ) + + batches = cast("List[Sequence[Any]]", list(parameters)) + compiled_batches = cast( + "List[Sequence[Any]]", list(compiled_parameters) + ) + + processed_setinputsizes: Optional[_GenericSetInputSizesType] = None + batchnum = 1 + total_batches = lenparams // batch_size + ( + 1 if lenparams % batch_size else 0 + ) + + insert_crud_params = imv.insert_crud_params + assert insert_crud_params is not None + + if rst: + insert_crud_params = [ + (col, key, rst(expr), st) + for col, key, expr, st in insert_crud_params + ] + + escaped_bind_names: Mapping[str, str] + expand_pos_lower_index = expand_pos_upper_index = 0 + + if not self.positional: + if self.escaped_bind_names: + escaped_bind_names = self.escaped_bind_names + else: + escaped_bind_names = {} + + all_keys = set(parameters[0]) + + def apply_placeholders(keys, formatted): + for key in keys: + key = escaped_bind_names.get(key, key) + formatted = formatted.replace( + self.bindtemplate % {"name": key}, + self.bindtemplate + % {"name": f"{key}__EXECMANY_INDEX__"}, + ) + return formatted + + if imv.embed_values_counter: + imv_values_counter = ", _IMV_VALUES_COUNTER" + else: + imv_values_counter = "" + formatted_values_clause = f"""({', '.join( + apply_placeholders(bind_keys, formatted) + for _, _, formatted, bind_keys in insert_crud_params + )}{imv_values_counter})""" + + keys_to_replace = all_keys.intersection( + escaped_bind_names.get(key, key) + for _, _, _, bind_keys in insert_crud_params + for key in bind_keys + ) + base_parameters = { + key: parameters[0][key] + for key in all_keys.difference(keys_to_replace) + } + executemany_values_w_comma = "" + else: + formatted_values_clause = "" + keys_to_replace = set() + base_parameters = {} + + if imv.embed_values_counter: + executemany_values_w_comma = ( + f"({imv_single_values_expr}, _IMV_VALUES_COUNTER), " + ) + else: + executemany_values_w_comma = f"({imv_single_values_expr}), " + + all_names_we_will_expand: Set[str] = set() + for elem in imv.insert_crud_params: + all_names_we_will_expand.update(elem[3]) + + # get the start and end position in a particular list + # of parameters where we will be doing the "expanding". + # statements can have params on either side or both sides, + # given RETURNING and CTEs + if all_names_we_will_expand: + positiontup = self.positiontup + assert positiontup is not None + + all_expand_positions = { + idx + for idx, name in enumerate(positiontup) + if name in all_names_we_will_expand + } + expand_pos_lower_index = min(all_expand_positions) + expand_pos_upper_index = max(all_expand_positions) + 1 + assert ( + len(all_expand_positions) + == expand_pos_upper_index - expand_pos_lower_index + ) + + if self._numeric_binds: + escaped = re.escape(self._numeric_binds_identifier_char) + executemany_values_w_comma = re.sub( + rf"{escaped}\d+", "%s", executemany_values_w_comma + ) + + while batches: + batch = batches[0:batch_size] + compiled_batch = compiled_batches[0:batch_size] + + batches[0:batch_size] = [] + compiled_batches[0:batch_size] = [] + + if batches: + current_batch_size = batch_size + else: + current_batch_size = len(batch) + + if generic_setinputsizes: + # if setinputsizes is present, expand this collection to + # suit the batch length as well + # currently this will be mssql+pyodbc for internal dialects + processed_setinputsizes = [ + (new_key, len_, typ) + for new_key, len_, typ in ( + (f"{key}_{index}", len_, typ) + for index in range(current_batch_size) + for key, len_, typ in generic_setinputsizes + ) + ] + + replaced_parameters: Any + if self.positional: + num_ins_params = imv.num_positional_params_counted + + batch_iterator: Iterable[Sequence[Any]] + extra_params_left: Sequence[Any] + extra_params_right: Sequence[Any] + + if num_ins_params == len(batch[0]): + extra_params_left = extra_params_right = () + batch_iterator = batch + else: + extra_params_left = batch[0][:expand_pos_lower_index] + extra_params_right = batch[0][expand_pos_upper_index:] + batch_iterator = ( + b[expand_pos_lower_index:expand_pos_upper_index] + for b in batch + ) + + if imv.embed_values_counter: + expanded_values_string = ( + "".join( + executemany_values_w_comma.replace( + "_IMV_VALUES_COUNTER", str(i) + ) + for i, _ in enumerate(batch) + ) + )[:-2] + else: + expanded_values_string = ( + (executemany_values_w_comma * current_batch_size) + )[:-2] + + if self._numeric_binds and num_ins_params > 0: + # numeric will always number the parameters inside of + # VALUES (and thus order self.positiontup) to be higher + # than non-VALUES parameters, no matter where in the + # statement those non-VALUES parameters appear (this is + # ensured in _process_numeric by numbering first all + # params that are not in _values_bindparam) + # therefore all extra params are always + # on the left side and numbered lower than the VALUES + # parameters + assert not extra_params_right + + start = expand_pos_lower_index + 1 + end = num_ins_params * (current_batch_size) + start + + # need to format here, since statement may contain + # unescaped %, while values_string contains just (%s, %s) + positions = tuple( + f"{self._numeric_binds_identifier_char}{i}" + for i in range(start, end) + ) + expanded_values_string = expanded_values_string % positions + + replaced_statement = statement.replace( + "__EXECMANY_TOKEN__", expanded_values_string + ) + + replaced_parameters = tuple( + itertools.chain.from_iterable(batch_iterator) + ) + + replaced_parameters = ( + extra_params_left + + replaced_parameters + + extra_params_right + ) + + else: + replaced_values_clauses = [] + replaced_parameters = base_parameters.copy() + + for i, param in enumerate(batch): + fmv = formatted_values_clause.replace( + "EXECMANY_INDEX__", str(i) + ) + if imv.embed_values_counter: + fmv = fmv.replace("_IMV_VALUES_COUNTER", str(i)) + + replaced_values_clauses.append(fmv) + replaced_parameters.update( + {f"{key}__{i}": param[key] for key in keys_to_replace} + ) + + replaced_statement = statement.replace( + "__EXECMANY_TOKEN__", + ", ".join(replaced_values_clauses), + ) + + yield _InsertManyValuesBatch( + replaced_statement, + replaced_parameters, + processed_setinputsizes, + batch, + ( + [_sentinel_from_params(cb) for cb in compiled_batch] + if _sentinel_from_params + else [] + ), + current_batch_size, + batchnum, + total_batches, + sort_by_parameter_order, + False, + ) + batchnum += 1 + + def visit_insert( + self, insert_stmt, visited_bindparam=None, visiting_cte=None, **kw + ): + compile_state = insert_stmt._compile_state_factory( + insert_stmt, self, **kw + ) + insert_stmt = compile_state.statement + + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + + if toplevel: + self.isinsert = True + if not self.dml_compile_state: + self.dml_compile_state = compile_state + if not self.compile_state: + self.compile_state = compile_state + + self.stack.append( + { + "correlate_froms": set(), + "asfrom_froms": set(), + "selectable": insert_stmt, + } + ) + + counted_bindparam = 0 + + # reset any incoming "visited_bindparam" collection + visited_bindparam = None + + # for positional, insertmanyvalues needs to know how many + # bound parameters are in the VALUES sequence; there's no simple + # rule because default expressions etc. can have zero or more + # params inside them. After multiple attempts to figure this out, + # this very simplistic "count after" works and is + # likely the least amount of callcounts, though looks clumsy + if self.positional and visiting_cte is None: + # if we are inside a CTE, don't count parameters + # here since they wont be for insertmanyvalues. keep + # visited_bindparam at None so no counting happens. + # see #9173 + visited_bindparam = [] + + crud_params_struct = crud._get_crud_params( + self, + insert_stmt, + compile_state, + toplevel, + visited_bindparam=visited_bindparam, + **kw, + ) + + if self.positional and visited_bindparam is not None: + counted_bindparam = len(visited_bindparam) + if self._numeric_binds: + if self._values_bindparam is not None: + self._values_bindparam += visited_bindparam + else: + self._values_bindparam = visited_bindparam + + crud_params_single = crud_params_struct.single_params + + if ( + not crud_params_single + and not self.dialect.supports_default_values + and not self.dialect.supports_default_metavalue + and not self.dialect.supports_empty_insert + ): + raise exc.CompileError( + "The '%s' dialect with current database " + "version settings does not support empty " + "inserts." % self.dialect.name + ) + + if compile_state._has_multi_parameters: + if not self.dialect.supports_multivalues_insert: + raise exc.CompileError( + "The '%s' dialect with current database " + "version settings does not support " + "in-place multirow inserts." % self.dialect.name + ) + elif ( + self.implicit_returning or insert_stmt._returning + ) and insert_stmt._sort_by_parameter_order: + raise exc.CompileError( + "RETURNING cannot be determinstically sorted when " + "using an INSERT which includes multi-row values()." + ) + crud_params_single = crud_params_struct.single_params + else: + crud_params_single = crud_params_struct.single_params + + preparer = self.preparer + supports_default_values = self.dialect.supports_default_values + + text = "INSERT " + + if insert_stmt._prefixes: + text += self._generate_prefixes( + insert_stmt, insert_stmt._prefixes, **kw + ) + + text += "INTO " + table_text = preparer.format_table(insert_stmt.table) + + if insert_stmt._hints: + _, table_text = self._setup_crud_hints(insert_stmt, table_text) + + if insert_stmt._independent_ctes: + self._dispatch_independent_ctes(insert_stmt, kw) + + text += table_text + + if crud_params_single or not supports_default_values: + text += " (%s)" % ", ".join( + [expr for _, expr, _, _ in crud_params_single] + ) + + # look for insertmanyvalues attributes that would have been configured + # by crud.py as it scanned through the columns to be part of the + # INSERT + use_insertmanyvalues = crud_params_struct.use_insertmanyvalues + named_sentinel_params: Optional[Sequence[str]] = None + add_sentinel_cols = None + implicit_sentinel = False + + returning_cols = self.implicit_returning or insert_stmt._returning + if returning_cols: + add_sentinel_cols = crud_params_struct.use_sentinel_columns + if add_sentinel_cols is not None: + assert use_insertmanyvalues + + # search for the sentinel column explicitly present + # in the INSERT columns list, and additionally check that + # this column has a bound parameter name set up that's in the + # parameter list. If both of these cases are present, it means + # we will have a client side value for the sentinel in each + # parameter set. + + _params_by_col = { + col: param_names + for col, _, _, param_names in crud_params_single + } + named_sentinel_params = [] + for _add_sentinel_col in add_sentinel_cols: + if _add_sentinel_col not in _params_by_col: + named_sentinel_params = None + break + param_name = self._within_exec_param_key_getter( + _add_sentinel_col + ) + if param_name not in _params_by_col[_add_sentinel_col]: + named_sentinel_params = None + break + named_sentinel_params.append(param_name) + + if named_sentinel_params is None: + # if we are not going to have a client side value for + # the sentinel in the parameter set, that means it's + # an autoincrement, an IDENTITY, or a server-side SQL + # expression like nextval('seqname'). So this is + # an "implicit" sentinel; we will look for it in + # RETURNING + # only, and then sort on it. For this case on PG, + # SQL Server we have to use a special INSERT form + # that guarantees the server side function lines up with + # the entries in the VALUES. + if ( + self.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.ANY_AUTOINCREMENT + ): + implicit_sentinel = True + else: + # here, we are not using a sentinel at all + # and we are likely the SQLite dialect. + # The first add_sentinel_col that we have should not + # be marked as "insert_sentinel=True". if it was, + # an error should have been raised in + # _get_sentinel_column_for_table. + assert not add_sentinel_cols[0]._insert_sentinel, ( + "sentinel selection rules should have prevented " + "us from getting here for this dialect" + ) + + # always put the sentinel columns last. even if they are + # in the returning list already, they will be there twice + # then. + returning_cols = list(returning_cols) + list(add_sentinel_cols) + + returning_clause = self.returning_clause( + insert_stmt, + returning_cols, + populate_result_map=toplevel, + ) + + if self.returning_precedes_values: + text += " " + returning_clause + + else: + returning_clause = None + + if insert_stmt.select is not None: + # placed here by crud.py + select_text = self.process( + self.stack[-1]["insert_from_select"], insert_into=True, **kw + ) + + if self.ctes and self.dialect.cte_follows_insert: + nesting_level = len(self.stack) if not toplevel else None + text += " %s%s" % ( + self._render_cte_clause( + nesting_level=nesting_level, + include_following_stack=True, + ), + select_text, + ) + else: + text += " %s" % select_text + elif not crud_params_single and supports_default_values: + text += " DEFAULT VALUES" + if use_insertmanyvalues: + self._insertmanyvalues = _InsertManyValues( + True, + self.dialect.default_metavalue_token, + cast( + "List[crud._CrudParamElementStr]", crud_params_single + ), + counted_bindparam, + sort_by_parameter_order=( + insert_stmt._sort_by_parameter_order + ), + includes_upsert_behaviors=( + insert_stmt._post_values_clause is not None + ), + sentinel_columns=add_sentinel_cols, + num_sentinel_columns=( + len(add_sentinel_cols) if add_sentinel_cols else 0 + ), + implicit_sentinel=implicit_sentinel, + ) + elif compile_state._has_multi_parameters: + text += " VALUES %s" % ( + ", ".join( + "(%s)" + % (", ".join(value for _, _, value, _ in crud_param_set)) + for crud_param_set in crud_params_struct.all_multi_params + ), + ) + else: + insert_single_values_expr = ", ".join( + [ + value + for _, _, value, _ in cast( + "List[crud._CrudParamElementStr]", + crud_params_single, + ) + ] + ) + + if use_insertmanyvalues: + if ( + implicit_sentinel + and ( + self.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT + ) + # this is checking if we have + # INSERT INTO table (id) VALUES (DEFAULT). + and not (crud_params_struct.is_default_metavalue_only) + ): + # if we have a sentinel column that is server generated, + # then for selected backends render the VALUES list as a + # subquery. This is the orderable form supported by + # PostgreSQL and SQL Server. + embed_sentinel_value = True + + render_bind_casts = ( + self.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.RENDER_SELECT_COL_CASTS + ) + + colnames = ", ".join( + f"p{i}" for i, _ in enumerate(crud_params_single) + ) + + if render_bind_casts: + # render casts for the SELECT list. For PG, we are + # already rendering bind casts in the parameter list, + # selectively for the more "tricky" types like ARRAY. + # however, even for the "easy" types, if the parameter + # is NULL for every entry, PG gives up and says + # "it must be TEXT", which fails for other easy types + # like ints. So we cast on this side too. + colnames_w_cast = ", ".join( + self.render_bind_cast( + col.type, + col.type._unwrapped_dialect_impl(self.dialect), + f"p{i}", + ) + for i, (col, *_) in enumerate(crud_params_single) + ) + else: + colnames_w_cast = colnames + + text += ( + f" SELECT {colnames_w_cast} FROM " + f"(VALUES ({insert_single_values_expr})) " + f"AS imp_sen({colnames}, sen_counter) " + "ORDER BY sen_counter" + ) + else: + # otherwise, if no sentinel or backend doesn't support + # orderable subquery form, use a plain VALUES list + embed_sentinel_value = False + text += f" VALUES ({insert_single_values_expr})" + + self._insertmanyvalues = _InsertManyValues( + is_default_expr=False, + single_values_expr=insert_single_values_expr, + insert_crud_params=cast( + "List[crud._CrudParamElementStr]", + crud_params_single, + ), + num_positional_params_counted=counted_bindparam, + sort_by_parameter_order=( + insert_stmt._sort_by_parameter_order + ), + includes_upsert_behaviors=( + insert_stmt._post_values_clause is not None + ), + sentinel_columns=add_sentinel_cols, + num_sentinel_columns=( + len(add_sentinel_cols) if add_sentinel_cols else 0 + ), + sentinel_param_keys=named_sentinel_params, + implicit_sentinel=implicit_sentinel, + embed_values_counter=embed_sentinel_value, + ) + + else: + text += f" VALUES ({insert_single_values_expr})" + + if insert_stmt._post_values_clause is not None: + post_values_clause = self.process( + insert_stmt._post_values_clause, **kw + ) + if post_values_clause: + text += " " + post_values_clause + + if returning_clause and not self.returning_precedes_values: + text += " " + returning_clause + + if self.ctes and not self.dialect.cte_follows_insert: + nesting_level = len(self.stack) if not toplevel else None + text = ( + self._render_cte_clause( + nesting_level=nesting_level, + include_following_stack=True, + ) + + text + ) + + self.stack.pop(-1) + + return text + + def update_limit_clause(self, update_stmt): + """Provide a hook for MySQL to add LIMIT to the UPDATE""" + return None + + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): + """Provide a hook to override the initial table clause + in an UPDATE statement. + + MySQL overrides this. + + """ + kw["asfrom"] = True + return from_table._compiler_dispatch(self, iscrud=True, **kw) + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + """Provide a hook to override the generation of an + UPDATE..FROM clause. + + MySQL and MSSQL override this. + + """ + raise NotImplementedError( + "This backend does not support multiple-table " + "criteria within UPDATE" + ) + + def visit_update(self, update_stmt, visiting_cte=None, **kw): + compile_state = update_stmt._compile_state_factory( + update_stmt, self, **kw + ) + update_stmt = compile_state.statement + + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + + if toplevel: + self.isupdate = True + if not self.dml_compile_state: + self.dml_compile_state = compile_state + if not self.compile_state: + self.compile_state = compile_state + + if self.linting & COLLECT_CARTESIAN_PRODUCTS: + from_linter = FromLinter({}, set()) + warn_linting = self.linting & WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + extra_froms = compile_state._extra_froms + is_multitable = bool(extra_froms) + + if is_multitable: + # main table might be a JOIN + main_froms = set(_from_objects(update_stmt.table)) + render_extra_froms = [ + f for f in extra_froms if f not in main_froms + ] + correlate_froms = main_froms.union(extra_froms) + else: + render_extra_froms = [] + correlate_froms = {update_stmt.table} + + self.stack.append( + { + "correlate_froms": correlate_froms, + "asfrom_froms": correlate_froms, + "selectable": update_stmt, + } + ) + + text = "UPDATE " + + if update_stmt._prefixes: + text += self._generate_prefixes( + update_stmt, update_stmt._prefixes, **kw + ) + + table_text = self.update_tables_clause( + update_stmt, + update_stmt.table, + render_extra_froms, + from_linter=from_linter, + **kw, + ) + crud_params_struct = crud._get_crud_params( + self, update_stmt, compile_state, toplevel, **kw + ) + crud_params = crud_params_struct.single_params + + if update_stmt._hints: + dialect_hints, table_text = self._setup_crud_hints( + update_stmt, table_text + ) + else: + dialect_hints = None + + if update_stmt._independent_ctes: + self._dispatch_independent_ctes(update_stmt, kw) + + text += table_text + + text += " SET " + text += ", ".join( + expr + "=" + value + for _, expr, value, _ in cast( + "List[Tuple[Any, str, str, Any]]", crud_params + ) + ) + + if self.implicit_returning or update_stmt._returning: + if self.returning_precedes_values: + text += " " + self.returning_clause( + update_stmt, + self.implicit_returning or update_stmt._returning, + populate_result_map=toplevel, + ) + + if extra_froms: + extra_from_text = self.update_from_clause( + update_stmt, + update_stmt.table, + render_extra_froms, + dialect_hints, + from_linter=from_linter, + **kw, + ) + if extra_from_text: + text += " " + extra_from_text + + if update_stmt._where_criteria: + t = self._generate_delimited_and_list( + update_stmt._where_criteria, from_linter=from_linter, **kw + ) + if t: + text += " WHERE " + t + + limit_clause = self.update_limit_clause(update_stmt) + if limit_clause: + text += " " + limit_clause + + if ( + self.implicit_returning or update_stmt._returning + ) and not self.returning_precedes_values: + text += " " + self.returning_clause( + update_stmt, + self.implicit_returning or update_stmt._returning, + populate_result_map=toplevel, + ) + + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + if warn_linting: + assert from_linter is not None + from_linter.warn(stmt_type="UPDATE") + + self.stack.pop(-1) + + return text + + def delete_extra_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + """Provide a hook to override the generation of an + DELETE..FROM clause. + + This can be used to implement DELETE..USING for example. + + MySQL and MSSQL override this. + + """ + raise NotImplementedError( + "This backend does not support multiple-table " + "criteria within DELETE" + ) + + def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): + return from_table._compiler_dispatch( + self, asfrom=True, iscrud=True, **kw + ) + + def visit_delete(self, delete_stmt, visiting_cte=None, **kw): + compile_state = delete_stmt._compile_state_factory( + delete_stmt, self, **kw + ) + delete_stmt = compile_state.statement + + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + + if toplevel: + self.isdelete = True + if not self.dml_compile_state: + self.dml_compile_state = compile_state + if not self.compile_state: + self.compile_state = compile_state + + if self.linting & COLLECT_CARTESIAN_PRODUCTS: + from_linter = FromLinter({}, set()) + warn_linting = self.linting & WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + extra_froms = compile_state._extra_froms + + correlate_froms = {delete_stmt.table}.union(extra_froms) + self.stack.append( + { + "correlate_froms": correlate_froms, + "asfrom_froms": correlate_froms, + "selectable": delete_stmt, + } + ) + + text = "DELETE " + + if delete_stmt._prefixes: + text += self._generate_prefixes( + delete_stmt, delete_stmt._prefixes, **kw + ) + + text += "FROM " + + try: + table_text = self.delete_table_clause( + delete_stmt, + delete_stmt.table, + extra_froms, + from_linter=from_linter, + ) + except TypeError: + # anticipate 3rd party dialects that don't include **kw + # TODO: remove in 2.1 + table_text = self.delete_table_clause( + delete_stmt, delete_stmt.table, extra_froms + ) + if from_linter: + _ = self.process(delete_stmt.table, from_linter=from_linter) + + crud._get_crud_params(self, delete_stmt, compile_state, toplevel, **kw) + + if delete_stmt._hints: + dialect_hints, table_text = self._setup_crud_hints( + delete_stmt, table_text + ) + else: + dialect_hints = None + + if delete_stmt._independent_ctes: + self._dispatch_independent_ctes(delete_stmt, kw) + + text += table_text + + if ( + self.implicit_returning or delete_stmt._returning + ) and self.returning_precedes_values: + text += " " + self.returning_clause( + delete_stmt, + self.implicit_returning or delete_stmt._returning, + populate_result_map=toplevel, + ) + + if extra_froms: + extra_from_text = self.delete_extra_from_clause( + delete_stmt, + delete_stmt.table, + extra_froms, + dialect_hints, + from_linter=from_linter, + **kw, + ) + if extra_from_text: + text += " " + extra_from_text + + if delete_stmt._where_criteria: + t = self._generate_delimited_and_list( + delete_stmt._where_criteria, from_linter=from_linter, **kw + ) + if t: + text += " WHERE " + t + + if ( + self.implicit_returning or delete_stmt._returning + ) and not self.returning_precedes_values: + text += " " + self.returning_clause( + delete_stmt, + self.implicit_returning or delete_stmt._returning, + populate_result_map=toplevel, + ) + + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + if warn_linting: + assert from_linter is not None + from_linter.warn(stmt_type="DELETE") + + self.stack.pop(-1) + + return text + + def visit_savepoint(self, savepoint_stmt, **kw): + return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt, **kw): + return "ROLLBACK TO SAVEPOINT %s" % self.preparer.format_savepoint( + savepoint_stmt + ) + + def visit_release_savepoint(self, savepoint_stmt, **kw): + return "RELEASE SAVEPOINT %s" % self.preparer.format_savepoint( + savepoint_stmt + ) + + +class StrSQLCompiler(SQLCompiler): + """A :class:`.SQLCompiler` subclass which allows a small selection + of non-standard SQL features to render into a string value. + + The :class:`.StrSQLCompiler` is invoked whenever a Core expression + element is directly stringified without calling upon the + :meth:`_expression.ClauseElement.compile` method. + It can render a limited set + of non-standard SQL constructs to assist in basic stringification, + however for more substantial custom or dialect-specific SQL constructs, + it will be necessary to make use of + :meth:`_expression.ClauseElement.compile` + directly. + + .. seealso:: + + :ref:`faq_sql_expression_string` + + """ + + def _fallback_column_name(self, column): + return "" + + @util.preload_module("sqlalchemy.engine.url") + def visit_unsupported_compilation(self, element, err, **kw): + if element.stringify_dialect != "default": + url = util.preloaded.engine_url + dialect = url.URL.create(element.stringify_dialect).get_dialect()() + + compiler = dialect.statement_compiler( + dialect, None, _supporting_against=self + ) + if not isinstance(compiler, StrSQLCompiler): + return compiler.process(element, **kw) + + return super().visit_unsupported_compilation(element, err) + + def visit_getitem_binary(self, binary, operator, **kw): + return "%s[%s]" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_json_getitem_op_binary(self, binary, operator, **kw): + return self.visit_getitem_binary(binary, operator, **kw) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + return self.visit_getitem_binary(binary, operator, **kw) + + def visit_sequence(self, sequence, **kw): + return ( + f"" + ) + + def returning_clause( + self, + stmt: UpdateBase, + returning_cols: Sequence[ColumnElement[Any]], + *, + populate_result_map: bool, + **kw: Any, + ) -> str: + columns = [ + self._label_select_column(None, c, True, False, {}) + for c in base._select_iterables(returning_cols) + ] + return "RETURNING " + ", ".join(columns) + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + kw["asfrom"] = True + return "FROM " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def delete_extra_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + kw["asfrom"] = True + return ", " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def visit_empty_set_expr(self, element_types, **kw): + return "SELECT 1 WHERE 1!=1" + + def get_from_hint_text(self, table, text): + return "[%s]" % text + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " ", **kw) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " ", **kw) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + return "(%s, %s)" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + + def visit_try_cast(self, cast, **kwargs): + return "TRY_CAST(%s AS %s)" % ( + cast.clause._compiler_dispatch(self, **kwargs), + cast.typeclause._compiler_dispatch(self, **kwargs), + ) + + +class DDLCompiler(Compiled): + is_ddl = True + + if TYPE_CHECKING: + + def __init__( + self, + dialect: Dialect, + statement: ExecutableDDLElement, + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + render_schema_translate: bool = ..., + compile_kwargs: Mapping[str, Any] = ..., + ): ... + + @util.memoized_property + def sql_compiler(self): + return self.dialect.statement_compiler( + self.dialect, None, schema_translate_map=self.schema_translate_map + ) + + @util.memoized_property + def type_compiler(self): + return self.dialect.type_compiler_instance + + def construct_params( + self, + params: Optional[_CoreSingleExecuteParams] = None, + extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None, + escape_names: bool = True, + ) -> Optional[_MutableCoreSingleExecuteParams]: + return None + + def visit_ddl(self, ddl, **kwargs): + # table events can substitute table and schema name + context = ddl.context + if isinstance(ddl.target, schema.Table): + context = context.copy() + + preparer = self.preparer + path = preparer.format_table_seq(ddl.target) + if len(path) == 1: + table, sch = path[0], "" + else: + table, sch = path[-1], path[0] + + context.setdefault("table", table) + context.setdefault("schema", sch) + context.setdefault("fullname", preparer.format_table(ddl.target)) + + return self.sql_compiler.post_process_text(ddl.statement % context) + + def visit_create_schema(self, create, **kw): + text = "CREATE SCHEMA " + if create.if_not_exists: + text += "IF NOT EXISTS " + return text + self.preparer.format_schema(create.element) + + def visit_drop_schema(self, drop, **kw): + text = "DROP SCHEMA " + if drop.if_exists: + text += "IF EXISTS " + text += self.preparer.format_schema(drop.element) + if drop.cascade: + text += " CASCADE" + return text + + def visit_create_table(self, create, **kw): + table = create.element + preparer = self.preparer + + text = "\nCREATE " + if table._prefixes: + text += " ".join(table._prefixes) + " " + + text += "TABLE " + if create.if_not_exists: + text += "IF NOT EXISTS " + + text += preparer.format_table(table) + " " + + create_table_suffix = self.create_table_suffix(table) + if create_table_suffix: + text += create_table_suffix + " " + + text += "(" + + separator = "\n" + + # if only one primary key, specify it along with the column + first_pk = False + for create_column in create.columns: + column = create_column.element + try: + processed = self.process( + create_column, first_pk=column.primary_key and not first_pk + ) + if processed is not None: + text += separator + separator = ", \n" + text += "\t" + processed + if column.primary_key: + first_pk = True + except exc.CompileError as ce: + raise exc.CompileError( + "(in table '%s', column '%s'): %s" + % (table.description, column.name, ce.args[0]) + ) from ce + + const = self.create_table_constraints( + table, + _include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa + ) + if const: + text += separator + "\t" + const + + text += "\n)%s\n\n" % self.post_create_table(table) + return text + + def visit_create_column(self, create, first_pk=False, **kw): + column = create.element + + if column.system: + return None + + text = self.get_column_specification(column, first_pk=first_pk) + const = " ".join( + self.process(constraint) for constraint in column.constraints + ) + if const: + text += " " + const + + return text + + def create_table_constraints( + self, table, _include_foreign_key_constraints=None, **kw + ): + # On some DB order is significant: visit PK first, then the + # other constraints (engine.ReflectionTest.testbasic failed on FB2) + constraints = [] + if table.primary_key: + constraints.append(table.primary_key) + + all_fkcs = table.foreign_key_constraints + if _include_foreign_key_constraints is not None: + omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints) + else: + omit_fkcs = set() + + constraints.extend( + [ + c + for c in table._sorted_constraints + if c is not table.primary_key and c not in omit_fkcs + ] + ) + + return ", \n\t".join( + p + for p in ( + self.process(constraint) + for constraint in constraints + if (constraint._should_create_for_compiler(self)) + and ( + not self.dialect.supports_alter + or not getattr(constraint, "use_alter", False) + ) + ) + if p is not None + ) + + def visit_drop_table(self, drop, **kw): + text = "\nDROP TABLE " + if drop.if_exists: + text += "IF EXISTS " + return text + self.preparer.format_table(drop.element) + + def visit_drop_view(self, drop, **kw): + return "\nDROP VIEW " + self.preparer.format_table(drop.element) + + def _verify_index_table(self, index): + if index.table is None: + raise exc.CompileError( + "Index '%s' is not associated with any table." % index.name + ) + + def visit_create_index( + self, create, include_schema=False, include_table_schema=True, **kw + ): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + if index.name is None: + raise exc.CompileError( + "CREATE INDEX requires that the index have a name" + ) + + text += "INDEX " + if create.if_not_exists: + text += "IF NOT EXISTS " + + text += "%s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=include_schema), + preparer.format_table( + index.table, use_schema=include_table_schema + ), + ", ".join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True + ) + for expr in index.expressions + ), + ) + return text + + def visit_drop_index(self, drop, **kw): + index = drop.element + + if index.name is None: + raise exc.CompileError( + "DROP INDEX requires that the index have a name" + ) + text = "\nDROP INDEX " + if drop.if_exists: + text += "IF EXISTS " + + return text + self._prepared_index_name(index, include_schema=True) + + def _prepared_index_name(self, index, include_schema=False): + if index.table is not None: + effective_schema = self.preparer.schema_for_object(index.table) + else: + effective_schema = None + if include_schema and effective_schema: + schema_name = self.preparer.quote_schema(effective_schema) + else: + schema_name = None + + index_name = self.preparer.format_index(index) + + if schema_name: + index_name = schema_name + "." + index_name + return index_name + + def visit_add_constraint(self, create, **kw): + return "ALTER TABLE %s ADD %s" % ( + self.preparer.format_table(create.element.table), + self.process(create.element), + ) + + def visit_set_table_comment(self, create, **kw): + return "COMMENT ON TABLE %s IS %s" % ( + self.preparer.format_table(create.element), + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.String() + ), + ) + + def visit_drop_table_comment(self, drop, **kw): + return "COMMENT ON TABLE %s IS NULL" % self.preparer.format_table( + drop.element + ) + + def visit_set_column_comment(self, create, **kw): + return "COMMENT ON COLUMN %s IS %s" % ( + self.preparer.format_column( + create.element, use_table=True, use_schema=True + ), + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.String() + ), + ) + + def visit_drop_column_comment(self, drop, **kw): + return "COMMENT ON COLUMN %s IS NULL" % self.preparer.format_column( + drop.element, use_table=True + ) + + def visit_set_constraint_comment(self, create, **kw): + raise exc.UnsupportedCompilationError(self, type(create)) + + def visit_drop_constraint_comment(self, drop, **kw): + raise exc.UnsupportedCompilationError(self, type(drop)) + + def get_identity_options(self, identity_options): + text = [] + if identity_options.increment is not None: + text.append("INCREMENT BY %d" % identity_options.increment) + if identity_options.start is not None: + text.append("START WITH %d" % identity_options.start) + if identity_options.minvalue is not None: + text.append("MINVALUE %d" % identity_options.minvalue) + if identity_options.maxvalue is not None: + text.append("MAXVALUE %d" % identity_options.maxvalue) + if identity_options.nominvalue is not None: + text.append("NO MINVALUE") + if identity_options.nomaxvalue is not None: + text.append("NO MAXVALUE") + if identity_options.cache is not None: + text.append("CACHE %d" % identity_options.cache) + if identity_options.cycle is not None: + text.append("CYCLE" if identity_options.cycle else "NO CYCLE") + return " ".join(text) + + def visit_create_sequence(self, create, prefix=None, **kw): + text = "CREATE SEQUENCE " + if create.if_not_exists: + text += "IF NOT EXISTS " + text += self.preparer.format_sequence(create.element) + + if prefix: + text += prefix + options = self.get_identity_options(create.element) + if options: + text += " " + options + return text + + def visit_drop_sequence(self, drop, **kw): + text = "DROP SEQUENCE " + if drop.if_exists: + text += "IF EXISTS " + return text + self.preparer.format_sequence(drop.element) + + def visit_drop_constraint(self, drop, **kw): + constraint = drop.element + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + else: + formatted_name = None + + if formatted_name is None: + raise exc.CompileError( + "Can't emit DROP CONSTRAINT for constraint %r; " + "it has no name" % drop.element + ) + return "ALTER TABLE %s DROP CONSTRAINT %s%s%s" % ( + self.preparer.format_table(drop.element.table), + "IF EXISTS " if drop.if_exists else "", + formatted_name, + " CASCADE" if drop.cascade else "", + ) + + def get_column_specification(self, column, **kwargs): + colspec = ( + self.preparer.format_column(column) + + " " + + self.dialect.type_compiler_instance.process( + column.type, type_expression=column + ) + ) + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if column.computed is not None: + colspec += " " + self.process(column.computed) + + if ( + column.identity is not None + and self.dialect.supports_identity_columns + ): + colspec += " " + self.process(column.identity) + + if not column.nullable and ( + not column.identity or not self.dialect.supports_identity_columns + ): + colspec += " NOT NULL" + return colspec + + def create_table_suffix(self, table): + return "" + + def post_create_table(self, table): + return "" + + def get_column_default_string(self, column): + if isinstance(column.server_default, schema.DefaultClause): + return self.render_default_string(column.server_default.arg) + else: + return None + + def render_default_string(self, default): + if isinstance(default, str): + return self.sql_compiler.render_literal_value( + default, sqltypes.STRINGTYPE + ) + else: + return self.sql_compiler.process(default, literal_binds=True) + + def visit_table_or_column_check_constraint(self, constraint, **kw): + if constraint.is_column_level: + return self.visit_column_check_constraint(constraint) + else: + return self.visit_check_constraint(constraint) + + def visit_check_constraint(self, constraint, **kw): + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "CHECK (%s)" % self.sql_compiler.process( + constraint.sqltext, include_table=False, literal_binds=True + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_column_check_constraint(self, constraint, **kw): + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "CHECK (%s)" % self.sql_compiler.process( + constraint.sqltext, include_table=False, literal_binds=True + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_primary_key_constraint(self, constraint, **kw): + if len(constraint) == 0: + return "" + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "PRIMARY KEY " + text += "(%s)" % ", ".join( + self.preparer.quote(c.name) + for c in ( + constraint.columns_autoinc_first + if constraint._implicit_generated + else constraint.columns + ) + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_foreign_key_constraint(self, constraint, **kw): + preparer = self.preparer + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + remote_table = list(constraint.elements)[0].column.table + text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % ( + ", ".join( + preparer.quote(f.parent.name) for f in constraint.elements + ), + self.define_constraint_remote_table( + constraint, remote_table, preparer + ), + ", ".join( + preparer.quote(f.column.name) for f in constraint.elements + ), + ) + text += self.define_constraint_match(constraint) + text += self.define_constraint_cascades(constraint) + text += self.define_constraint_deferrability(constraint) + return text + + def define_constraint_remote_table(self, constraint, table, preparer): + """Format the remote table clause of a CREATE CONSTRAINT clause.""" + + return preparer.format_table(table) + + def visit_unique_constraint(self, constraint, **kw): + if len(constraint) == 0: + return "" + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "UNIQUE %s(%s)" % ( + self.define_unique_constraint_distinct(constraint, **kw), + ", ".join(self.preparer.quote(c.name) for c in constraint), + ) + text += self.define_constraint_deferrability(constraint) + return text + + def define_unique_constraint_distinct(self, constraint, **kw): + return "" + + def define_constraint_cascades(self, constraint): + text = "" + if constraint.ondelete is not None: + text += " ON DELETE %s" % self.preparer.validate_sql_phrase( + constraint.ondelete, FK_ON_DELETE + ) + if constraint.onupdate is not None: + text += " ON UPDATE %s" % self.preparer.validate_sql_phrase( + constraint.onupdate, FK_ON_UPDATE + ) + return text + + def define_constraint_deferrability(self, constraint): + text = "" + if constraint.deferrable is not None: + if constraint.deferrable: + text += " DEFERRABLE" + else: + text += " NOT DEFERRABLE" + if constraint.initially is not None: + text += " INITIALLY %s" % self.preparer.validate_sql_phrase( + constraint.initially, FK_INITIALLY + ) + return text + + def define_constraint_match(self, constraint): + text = "" + if constraint.match is not None: + text += " MATCH %s" % constraint.match + return text + + def visit_computed_column(self, generated, **kw): + text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process( + generated.sqltext, include_table=False, literal_binds=True + ) + if generated.persisted is True: + text += " STORED" + elif generated.persisted is False: + text += " VIRTUAL" + return text + + def visit_identity_column(self, identity, **kw): + text = "GENERATED %s AS IDENTITY" % ( + "ALWAYS" if identity.always else "BY DEFAULT", + ) + options = self.get_identity_options(identity) + if options: + text += " (%s)" % options + return text + + +class GenericTypeCompiler(TypeCompiler): + def visit_FLOAT(self, type_, **kw): + return "FLOAT" + + def visit_DOUBLE(self, type_, **kw): + return "DOUBLE" + + def visit_DOUBLE_PRECISION(self, type_, **kw): + return "DOUBLE PRECISION" + + def visit_REAL(self, type_, **kw): + return "REAL" + + def visit_NUMERIC(self, type_, **kw): + if type_.precision is None: + return "NUMERIC" + elif type_.scale is None: + return "NUMERIC(%(precision)s)" % {"precision": type_.precision} + else: + return "NUMERIC(%(precision)s, %(scale)s)" % { + "precision": type_.precision, + "scale": type_.scale, + } + + def visit_DECIMAL(self, type_, **kw): + if type_.precision is None: + return "DECIMAL" + elif type_.scale is None: + return "DECIMAL(%(precision)s)" % {"precision": type_.precision} + else: + return "DECIMAL(%(precision)s, %(scale)s)" % { + "precision": type_.precision, + "scale": type_.scale, + } + + def visit_INTEGER(self, type_, **kw): + return "INTEGER" + + def visit_SMALLINT(self, type_, **kw): + return "SMALLINT" + + def visit_BIGINT(self, type_, **kw): + return "BIGINT" + + def visit_TIMESTAMP(self, type_, **kw): + return "TIMESTAMP" + + def visit_DATETIME(self, type_, **kw): + return "DATETIME" + + def visit_DATE(self, type_, **kw): + return "DATE" + + def visit_TIME(self, type_, **kw): + return "TIME" + + def visit_CLOB(self, type_, **kw): + return "CLOB" + + def visit_NCLOB(self, type_, **kw): + return "NCLOB" + + def _render_string_type(self, type_, name, length_override=None): + text = name + if length_override: + text += "(%d)" % length_override + elif type_.length: + text += "(%d)" % type_.length + if type_.collation: + text += ' COLLATE "%s"' % type_.collation + return text + + def visit_CHAR(self, type_, **kw): + return self._render_string_type(type_, "CHAR") + + def visit_NCHAR(self, type_, **kw): + return self._render_string_type(type_, "NCHAR") + + def visit_VARCHAR(self, type_, **kw): + return self._render_string_type(type_, "VARCHAR") + + def visit_NVARCHAR(self, type_, **kw): + return self._render_string_type(type_, "NVARCHAR") + + def visit_TEXT(self, type_, **kw): + return self._render_string_type(type_, "TEXT") + + def visit_UUID(self, type_, **kw): + return "UUID" + + def visit_BLOB(self, type_, **kw): + return "BLOB" + + def visit_BINARY(self, type_, **kw): + return "BINARY" + (type_.length and "(%d)" % type_.length or "") + + def visit_VARBINARY(self, type_, **kw): + return "VARBINARY" + (type_.length and "(%d)" % type_.length or "") + + def visit_BOOLEAN(self, type_, **kw): + return "BOOLEAN" + + def visit_uuid(self, type_, **kw): + if not type_.native_uuid or not self.dialect.supports_native_uuid: + return self._render_string_type(type_, "CHAR", length_override=32) + else: + return self.visit_UUID(type_, **kw) + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) + + def visit_boolean(self, type_, **kw): + return self.visit_BOOLEAN(type_, **kw) + + def visit_time(self, type_, **kw): + return self.visit_TIME(type_, **kw) + + def visit_datetime(self, type_, **kw): + return self.visit_DATETIME(type_, **kw) + + def visit_date(self, type_, **kw): + return self.visit_DATE(type_, **kw) + + def visit_big_integer(self, type_, **kw): + return self.visit_BIGINT(type_, **kw) + + def visit_small_integer(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) + + def visit_integer(self, type_, **kw): + return self.visit_INTEGER(type_, **kw) + + def visit_real(self, type_, **kw): + return self.visit_REAL(type_, **kw) + + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) + + def visit_double(self, type_, **kw): + return self.visit_DOUBLE(type_, **kw) + + def visit_numeric(self, type_, **kw): + return self.visit_NUMERIC(type_, **kw) + + def visit_string(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_unicode(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) + + def visit_enum(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_null(self, type_, **kw): + raise exc.CompileError( + "Can't generate DDL for %r; " + "did you forget to specify a " + "type on this Column?" % type_ + ) + + def visit_type_decorator(self, type_, **kw): + return self.process(type_.type_engine(self.dialect), **kw) + + def visit_user_defined(self, type_, **kw): + return type_.get_col_spec(**kw) + + +class StrSQLTypeCompiler(GenericTypeCompiler): + def process(self, type_, **kw): + try: + _compiler_dispatch = type_._compiler_dispatch + except AttributeError: + return self._visit_unknown(type_, **kw) + else: + return _compiler_dispatch(self, **kw) + + def __getattr__(self, key): + if key.startswith("visit_"): + return self._visit_unknown + else: + raise AttributeError(key) + + def _visit_unknown(self, type_, **kw): + if type_.__class__.__name__ == type_.__class__.__name__.upper(): + return type_.__class__.__name__ + else: + return repr(type_) + + def visit_null(self, type_, **kw): + return "NULL" + + def visit_user_defined(self, type_, **kw): + try: + get_col_spec = type_.get_col_spec + except AttributeError: + return repr(type_) + else: + return get_col_spec(**kw) + + +class _SchemaForObjectCallable(Protocol): + def __call__(self, __obj: Any) -> str: ... + + +class _BindNameForColProtocol(Protocol): + def __call__(self, col: ColumnClause[Any]) -> str: ... + + +class IdentifierPreparer: + """Handle quoting and case-folding of identifiers based on options.""" + + reserved_words = RESERVED_WORDS + + legal_characters = LEGAL_CHARACTERS + + illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS + + initial_quote: str + + final_quote: str + + _strings: MutableMapping[str, str] + + schema_for_object: _SchemaForObjectCallable = operator.attrgetter("schema") + """Return the .schema attribute for an object. + + For the default IdentifierPreparer, the schema for an object is always + the value of the ".schema" attribute. if the preparer is replaced + with one that has a non-empty schema_translate_map, the value of the + ".schema" attribute is rendered a symbol that will be converted to a + real schema name from the mapping post-compile. + + """ + + _includes_none_schema_translate: bool = False + + def __init__( + self, + dialect, + initial_quote='"', + final_quote=None, + escape_quote='"', + quote_case_sensitive_collations=True, + omit_schema=False, + ): + """Construct a new ``IdentifierPreparer`` object. + + initial_quote + Character that begins a delimited identifier. + + final_quote + Character that ends a delimited identifier. Defaults to + `initial_quote`. + + omit_schema + Prevent prepending schema name. Useful for databases that do + not support schemae. + """ + + self.dialect = dialect + self.initial_quote = initial_quote + self.final_quote = final_quote or self.initial_quote + self.escape_quote = escape_quote + self.escape_to_quote = self.escape_quote * 2 + self.omit_schema = omit_schema + self.quote_case_sensitive_collations = quote_case_sensitive_collations + self._strings = {} + self._double_percents = self.dialect.paramstyle in ( + "format", + "pyformat", + ) + + def _with_schema_translate(self, schema_translate_map): + prep = self.__class__.__new__(self.__class__) + prep.__dict__.update(self.__dict__) + + includes_none = None in schema_translate_map + + def symbol_getter(obj): + name = obj.schema + if obj._use_schema_map and (name is not None or includes_none): + if name is not None and ("[" in name or "]" in name): + raise exc.CompileError( + "Square bracket characters ([]) not supported " + "in schema translate name '%s'" % name + ) + return quoted_name( + "__[SCHEMA_%s]" % (name or "_none"), quote=False + ) + else: + return obj.schema + + prep.schema_for_object = symbol_getter + prep._includes_none_schema_translate = includes_none + return prep + + def _render_schema_translates(self, statement, schema_translate_map): + d = schema_translate_map + if None in d: + if not self._includes_none_schema_translate: + raise exc.InvalidRequestError( + "schema translate map which previously did not have " + "`None` present as a key now has `None` present; compiled " + "statement may lack adequate placeholders. Please use " + "consistent keys in successive " + "schema_translate_map dictionaries." + ) + + d["_none"] = d[None] + + def replace(m): + name = m.group(2) + if name in d: + effective_schema = d[name] + else: + if name in (None, "_none"): + raise exc.InvalidRequestError( + "schema translate map which previously had `None` " + "present as a key now no longer has it present; don't " + "know how to apply schema for compiled statement. " + "Please use consistent keys in successive " + "schema_translate_map dictionaries." + ) + effective_schema = name + + if not effective_schema: + effective_schema = self.dialect.default_schema_name + if not effective_schema: + # TODO: no coverage here + raise exc.CompileError( + "Dialect has no default schema name; can't " + "use None as dynamic schema target." + ) + return self.quote_schema(effective_schema) + + return re.sub(r"(__\[SCHEMA_([^\]]+)\])", replace, statement) + + def _escape_identifier(self, value: str) -> str: + """Escape an identifier. + + Subclasses should override this to provide database-dependent + escaping behavior. + """ + + value = value.replace(self.escape_quote, self.escape_to_quote) + if self._double_percents: + value = value.replace("%", "%%") + return value + + def _unescape_identifier(self, value: str) -> str: + """Canonicalize an escaped identifier. + + Subclasses should override this to provide database-dependent + unescaping behavior that reverses _escape_identifier. + """ + + return value.replace(self.escape_to_quote, self.escape_quote) + + def validate_sql_phrase(self, element, reg): + """keyword sequence filter. + + a filter for elements that are intended to represent keyword sequences, + such as "INITIALLY", "INITIALLY DEFERRED", etc. no special characters + should be present. + + .. versionadded:: 1.3 + + """ + + if element is not None and not reg.match(element): + raise exc.CompileError( + "Unexpected SQL phrase: %r (matching against %r)" + % (element, reg.pattern) + ) + return element + + def quote_identifier(self, value: str) -> str: + """Quote an identifier. + + Subclasses should override this to provide database-dependent + quoting behavior. + """ + + return ( + self.initial_quote + + self._escape_identifier(value) + + self.final_quote + ) + + def _requires_quotes(self, value: str) -> bool: + """Return True if the given identifier requires quoting.""" + lc_value = value.lower() + return ( + lc_value in self.reserved_words + or value[0] in self.illegal_initial_characters + or not self.legal_characters.match(str(value)) + or (lc_value != value) + ) + + def _requires_quotes_illegal_chars(self, value): + """Return True if the given identifier requires quoting, but + not taking case convention into account.""" + return not self.legal_characters.match(str(value)) + + def quote_schema(self, schema: str, force: Any = None) -> str: + """Conditionally quote a schema name. + + + The name is quoted if it is a reserved word, contains quote-necessary + characters, or is an instance of :class:`.quoted_name` which includes + ``quote`` set to ``True``. + + Subclasses can override this to provide database-dependent + quoting behavior for schema names. + + :param schema: string schema name + :param force: unused + + .. deprecated:: 0.9 + + The :paramref:`.IdentifierPreparer.quote_schema.force` + parameter is deprecated and will be removed in a future + release. This flag has no effect on the behavior of the + :meth:`.IdentifierPreparer.quote` method; please refer to + :class:`.quoted_name`. + + """ + if force is not None: + # not using the util.deprecated_params() decorator in this + # case because of the additional function call overhead on this + # very performance-critical spot. + util.warn_deprecated( + "The IdentifierPreparer.quote_schema.force parameter is " + "deprecated and will be removed in a future release. This " + "flag has no effect on the behavior of the " + "IdentifierPreparer.quote method; please refer to " + "quoted_name().", + # deprecated 0.9. warning from 1.3 + version="0.9", + ) + + return self.quote(schema) + + def quote(self, ident: str, force: Any = None) -> str: + """Conditionally quote an identifier. + + The identifier is quoted if it is a reserved word, contains + quote-necessary characters, or is an instance of + :class:`.quoted_name` which includes ``quote`` set to ``True``. + + Subclasses can override this to provide database-dependent + quoting behavior for identifier names. + + :param ident: string identifier + :param force: unused + + .. deprecated:: 0.9 + + The :paramref:`.IdentifierPreparer.quote.force` + parameter is deprecated and will be removed in a future + release. This flag has no effect on the behavior of the + :meth:`.IdentifierPreparer.quote` method; please refer to + :class:`.quoted_name`. + + """ + if force is not None: + # not using the util.deprecated_params() decorator in this + # case because of the additional function call overhead on this + # very performance-critical spot. + util.warn_deprecated( + "The IdentifierPreparer.quote.force parameter is " + "deprecated and will be removed in a future release. This " + "flag has no effect on the behavior of the " + "IdentifierPreparer.quote method; please refer to " + "quoted_name().", + # deprecated 0.9. warning from 1.3 + version="0.9", + ) + + force = getattr(ident, "quote", None) + + if force is None: + if ident in self._strings: + return self._strings[ident] + else: + if self._requires_quotes(ident): + self._strings[ident] = self.quote_identifier(ident) + else: + self._strings[ident] = ident + return self._strings[ident] + elif force: + return self.quote_identifier(ident) + else: + return ident + + def format_collation(self, collation_name): + if self.quote_case_sensitive_collations: + return self.quote(collation_name) + else: + return collation_name + + def format_sequence(self, sequence, use_schema=True): + name = self.quote(sequence.name) + + effective_schema = self.schema_for_object(sequence) + + if ( + not self.omit_schema + and use_schema + and effective_schema is not None + ): + name = self.quote_schema(effective_schema) + "." + name + return name + + def format_label( + self, label: Label[Any], name: Optional[str] = None + ) -> str: + return self.quote(name or label.name) + + def format_alias( + self, alias: Optional[AliasedReturnsRows], name: Optional[str] = None + ) -> str: + if name is None: + assert alias is not None + return self.quote(alias.name) + else: + return self.quote(name) + + def format_savepoint(self, savepoint, name=None): + # Running the savepoint name through quoting is unnecessary + # for all known dialects. This is here to support potential + # third party use cases + ident = name or savepoint.ident + if self._requires_quotes(ident): + ident = self.quote_identifier(ident) + return ident + + @util.preload_module("sqlalchemy.sql.naming") + def format_constraint(self, constraint, _alembic_quote=True): + naming = util.preloaded.sql_naming + + if constraint.name is _NONE_NAME: + name = naming._constraint_name_for_table( + constraint, constraint.table + ) + + if name is None: + return None + else: + name = constraint.name + + if constraint.__visit_name__ == "index": + return self.truncate_and_render_index_name( + name, _alembic_quote=_alembic_quote + ) + else: + return self.truncate_and_render_constraint_name( + name, _alembic_quote=_alembic_quote + ) + + def truncate_and_render_index_name(self, name, _alembic_quote=True): + # calculate these at format time so that ad-hoc changes + # to dialect.max_identifier_length etc. can be reflected + # as IdentifierPreparer is long lived + max_ = ( + self.dialect.max_index_name_length + or self.dialect.max_identifier_length + ) + return self._truncate_and_render_maxlen_name( + name, max_, _alembic_quote + ) + + def truncate_and_render_constraint_name(self, name, _alembic_quote=True): + # calculate these at format time so that ad-hoc changes + # to dialect.max_identifier_length etc. can be reflected + # as IdentifierPreparer is long lived + max_ = ( + self.dialect.max_constraint_name_length + or self.dialect.max_identifier_length + ) + return self._truncate_and_render_maxlen_name( + name, max_, _alembic_quote + ) + + def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote): + if isinstance(name, elements._truncated_label): + if len(name) > max_: + name = name[0 : max_ - 8] + "_" + util.md5_hex(name)[-4:] + else: + self.dialect.validate_identifier(name) + + if not _alembic_quote: + return name + else: + return self.quote(name) + + def format_index(self, index): + return self.format_constraint(index) + + def format_table(self, table, use_schema=True, name=None): + """Prepare a quoted table and schema name.""" + + if name is None: + name = table.name + + result = self.quote(name) + + effective_schema = self.schema_for_object(table) + + if not self.omit_schema and use_schema and effective_schema: + result = self.quote_schema(effective_schema) + "." + result + return result + + def format_schema(self, name): + """Prepare a quoted schema name.""" + + return self.quote(name) + + def format_label_name( + self, + name, + anon_map=None, + ): + """Prepare a quoted column name.""" + + if anon_map is not None and isinstance( + name, elements._truncated_label + ): + name = name.apply_map(anon_map) + + return self.quote(name) + + def format_column( + self, + column, + use_table=False, + name=None, + table_name=None, + use_schema=False, + anon_map=None, + ): + """Prepare a quoted column name.""" + + if name is None: + name = column.name + + if anon_map is not None and isinstance( + name, elements._truncated_label + ): + name = name.apply_map(anon_map) + + if not getattr(column, "is_literal", False): + if use_table: + return ( + self.format_table( + column.table, use_schema=use_schema, name=table_name + ) + + "." + + self.quote(name) + ) + else: + return self.quote(name) + else: + # literal textual elements get stuck into ColumnClause a lot, + # which shouldn't get quoted + + if use_table: + return ( + self.format_table( + column.table, use_schema=use_schema, name=table_name + ) + + "." + + name + ) + else: + return name + + def format_table_seq(self, table, use_schema=True): + """Format table name and schema as a tuple.""" + + # Dialects with more levels in their fully qualified references + # ('database', 'owner', etc.) could override this and return + # a longer sequence. + + effective_schema = self.schema_for_object(table) + + if not self.omit_schema and use_schema and effective_schema: + return ( + self.quote_schema(effective_schema), + self.format_table(table, use_schema=False), + ) + else: + return (self.format_table(table, use_schema=False),) + + @util.memoized_property + def _r_identifiers(self): + initial, final, escaped_final = ( + re.escape(s) + for s in ( + self.initial_quote, + self.final_quote, + self._escape_identifier(self.final_quote), + ) + ) + r = re.compile( + r"(?:" + r"(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s" + r"|([^\.]+))(?=\.|$))+" + % {"initial": initial, "final": final, "escaped": escaped_final} + ) + return r + + def unformat_identifiers(self, identifiers): + """Unpack 'schema.table.column'-like strings into components.""" + + r = self._r_identifiers + return [ + self._unescape_identifier(i) + for i in [a or b for a, b in r.findall(identifiers)] + ] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py new file mode 100644 index 00000000..d1426658 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py @@ -0,0 +1,1669 @@ +# sql/crud.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Functions used by compiler.py to determine the parameters rendered +within INSERT and UPDATE statements. + +""" +from __future__ import annotations + +import functools +import operator +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import MutableMapping +from typing import NamedTuple +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from . import coercions +from . import dml +from . import elements +from . import roles +from .base import _DefaultDescriptionTuple +from .dml import isinsert as _compile_state_isinsert +from .elements import ColumnClause +from .schema import default_is_clause_element +from .schema import default_is_sequence +from .selectable import Select +from .selectable import TableClause +from .. import exc +from .. import util +from ..util.typing import Literal + +if TYPE_CHECKING: + from .compiler import _BindNameForColProtocol + from .compiler import SQLCompiler + from .dml import _DMLColumnElement + from .dml import DMLState + from .dml import ValuesBase + from .elements import ColumnElement + from .elements import KeyedColumnElement + from .schema import _SQLExprDefault + from .schema import Column + +REQUIRED = util.symbol( + "REQUIRED", + """ +Placeholder for the value within a :class:`.BindParameter` +which is required to be present when the statement is passed +to :meth:`_engine.Connection.execute`. + +This symbol is typically used when a :func:`_expression.insert` +or :func:`_expression.update` statement is compiled without parameter +values present. + +""", +) + + +def _as_dml_column(c: ColumnElement[Any]) -> ColumnClause[Any]: + if not isinstance(c, ColumnClause): + raise exc.CompileError( + f"Can't create DML statement against column expression {c!r}" + ) + return c + + +_CrudParamElement = Tuple[ + "ColumnElement[Any]", + str, # column name + Optional[ + Union[str, "_SQLExprDefault"] + ], # bound parameter string or SQL expression to apply + Iterable[str], +] +_CrudParamElementStr = Tuple[ + "KeyedColumnElement[Any]", + str, # column name + str, # bound parameter string + Iterable[str], +] +_CrudParamElementSQLExpr = Tuple[ + "ColumnClause[Any]", + str, + "_SQLExprDefault", # SQL expression to apply + Iterable[str], +] + +_CrudParamSequence = List[_CrudParamElement] + + +class _CrudParams(NamedTuple): + single_params: _CrudParamSequence + all_multi_params: List[Sequence[_CrudParamElementStr]] + is_default_metavalue_only: bool = False + use_insertmanyvalues: bool = False + use_sentinel_columns: Optional[Sequence[Column[Any]]] = None + + +def _get_crud_params( + compiler: SQLCompiler, + stmt: ValuesBase, + compile_state: DMLState, + toplevel: bool, + **kw: Any, +) -> _CrudParams: + """create a set of tuples representing column/string pairs for use + in an INSERT or UPDATE statement. + + Also generates the Compiled object's postfetch, prefetch, and + returning column collections, used for default handling and ultimately + populating the CursorResult's prefetch_cols() and postfetch_cols() + collections. + + """ + + # note: the _get_crud_params() system was written with the notion in mind + # that INSERT, UPDATE, DELETE are always the top level statement and + # that there is only one of them. With the addition of CTEs that can + # make use of DML, this assumption is no longer accurate; the DML + # statement is not necessarily the top-level "row returning" thing + # and it is also theoretically possible (fortunately nobody has asked yet) + # to have a single statement with multiple DMLs inside of it via CTEs. + + # the current _get_crud_params() design doesn't accommodate these cases + # right now. It "just works" for a CTE that has a single DML inside of + # it, and for a CTE with multiple DML, it's not clear what would happen. + + # overall, the "compiler.XYZ" collections here would need to be in a + # per-DML structure of some kind, and DefaultDialect would need to + # navigate these collections on a per-statement basis, with additional + # emphasis on the "toplevel returning data" statement. However we + # still need to run through _get_crud_params() for all DML as we have + # Python / SQL generated column defaults that need to be rendered. + + # if there is user need for this kind of thing, it's likely a post 2.0 + # kind of change as it would require deep changes to DefaultDialect + # as well as here. + + compiler.postfetch = [] + compiler.insert_prefetch = [] + compiler.update_prefetch = [] + compiler.implicit_returning = [] + + visiting_cte = kw.get("visiting_cte", None) + if visiting_cte is not None: + # for insert -> CTE -> insert, don't populate an incoming + # _crud_accumulate_bind_names collection; the INSERT we process here + # will not be inline within the VALUES of the enclosing INSERT as the + # CTE is placed on the outside. See issue #9173 + kw.pop("accumulate_bind_names", None) + assert ( + "accumulate_bind_names" not in kw + ), "Don't know how to handle insert within insert without a CTE" + + # getters - these are normally just column.key, + # but in the case of mysql multi-table update, the rules for + # .key must conditionally take tablename into account + ( + _column_as_key, + _getattr_col_key, + _col_bind_name, + ) = _key_getters_for_crud_column(compiler, stmt, compile_state) + + compiler._get_bind_name_for_col = _col_bind_name + + if stmt._returning and stmt._return_defaults: + raise exc.CompileError( + "Can't compile statement that includes returning() and " + "return_defaults() simultaneously" + ) + + if compile_state.isdelete: + _setup_delete_return_defaults( + compiler, + stmt, + compile_state, + (), + _getattr_col_key, + _column_as_key, + _col_bind_name, + (), + (), + toplevel, + kw, + ) + return _CrudParams([], []) + + # no parameters in the statement, no parameters in the + # compiled params - return binds for all columns + if compiler.column_keys is None and compile_state._no_parameters: + return _CrudParams( + [ + ( + c, + compiler.preparer.format_column(c), + _create_bind_param(compiler, c, None, required=True), + (c.key,), + ) + for c in stmt.table.columns + if not c._omit_from_statements + ], + [], + ) + + stmt_parameter_tuples: Optional[ + List[Tuple[Union[str, ColumnClause[Any]], Any]] + ] + spd: Optional[MutableMapping[_DMLColumnElement, Any]] + + if ( + _compile_state_isinsert(compile_state) + and compile_state._has_multi_parameters + ): + mp = compile_state._multi_parameters + assert mp is not None + spd = mp[0] + stmt_parameter_tuples = list(spd.items()) + spd_str_key = {_column_as_key(key) for key in spd} + elif compile_state._ordered_values: + spd = compile_state._dict_parameters + stmt_parameter_tuples = compile_state._ordered_values + assert spd is not None + spd_str_key = {_column_as_key(key) for key in spd} + elif compile_state._dict_parameters: + spd = compile_state._dict_parameters + stmt_parameter_tuples = list(spd.items()) + spd_str_key = {_column_as_key(key) for key in spd} + else: + stmt_parameter_tuples = spd = spd_str_key = None + + # if we have statement parameters - set defaults in the + # compiled params + if compiler.column_keys is None: + parameters = {} + elif stmt_parameter_tuples: + assert spd_str_key is not None + parameters = { + _column_as_key(key): REQUIRED + for key in compiler.column_keys + if key not in spd_str_key + } + else: + parameters = { + _column_as_key(key): REQUIRED for key in compiler.column_keys + } + + # create a list of column assignment clauses as tuples + values: List[_CrudParamElement] = [] + + if stmt_parameter_tuples is not None: + _get_stmt_parameter_tuples_params( + compiler, + compile_state, + parameters, + stmt_parameter_tuples, + _column_as_key, + values, + kw, + ) + + check_columns: Dict[str, ColumnClause[Any]] = {} + + # special logic that only occurs for multi-table UPDATE + # statements + if dml.isupdate(compile_state) and compile_state.is_multitable: + _get_update_multitable_params( + compiler, + stmt, + compile_state, + stmt_parameter_tuples, + check_columns, + _col_bind_name, + _getattr_col_key, + values, + kw, + ) + + if _compile_state_isinsert(compile_state) and stmt._select_names: + # is an insert from select, is not a multiparams + + assert not compile_state._has_multi_parameters + + _scan_insert_from_select_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, + ) + use_insertmanyvalues = False + use_sentinel_columns = None + else: + use_insertmanyvalues, use_sentinel_columns = _scan_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, + ) + + if parameters and stmt_parameter_tuples: + check = ( + set(parameters) + .intersection(_column_as_key(k) for k, v in stmt_parameter_tuples) + .difference(check_columns) + ) + if check: + raise exc.CompileError( + "Unconsumed column names: %s" + % (", ".join("%s" % (c,) for c in check)) + ) + + is_default_metavalue_only = False + + if ( + _compile_state_isinsert(compile_state) + and compile_state._has_multi_parameters + ): + # is a multiparams, is not an insert from a select + assert not stmt._select_names + multi_extended_values = _extend_values_for_multiparams( + compiler, + stmt, + compile_state, + cast( + "Sequence[_CrudParamElementStr]", + values, + ), + cast("Callable[..., str]", _column_as_key), + kw, + ) + return _CrudParams(values, multi_extended_values) + elif ( + not values + and compiler.for_executemany + and compiler.dialect.supports_default_metavalue + ): + # convert an "INSERT DEFAULT VALUES" + # into INSERT (firstcol) VALUES (DEFAULT) which can be turned + # into an in-place multi values. This supports + # insert_executemany_returning mode :) + values = [ + ( + _as_dml_column(stmt.table.columns[0]), + compiler.preparer.format_column(stmt.table.columns[0]), + compiler.dialect.default_metavalue_token, + (), + ) + ] + is_default_metavalue_only = True + + return _CrudParams( + values, + [], + is_default_metavalue_only=is_default_metavalue_only, + use_insertmanyvalues=use_insertmanyvalues, + use_sentinel_columns=use_sentinel_columns, + ) + + +@overload +def _create_bind_param( + compiler: SQLCompiler, + col: ColumnElement[Any], + value: Any, + process: Literal[True] = ..., + required: bool = False, + name: Optional[str] = None, + **kw: Any, +) -> str: ... + + +@overload +def _create_bind_param( + compiler: SQLCompiler, + col: ColumnElement[Any], + value: Any, + **kw: Any, +) -> str: ... + + +def _create_bind_param( + compiler: SQLCompiler, + col: ColumnElement[Any], + value: Any, + process: bool = True, + required: bool = False, + name: Optional[str] = None, + **kw: Any, +) -> Union[str, elements.BindParameter[Any]]: + if name is None: + name = col.key + bindparam = elements.BindParameter( + name, value, type_=col.type, required=required + ) + bindparam._is_crud = True + if process: + return bindparam._compiler_dispatch(compiler, **kw) + else: + return bindparam + + +def _handle_values_anonymous_param(compiler, col, value, name, **kw): + # the insert() and update() constructs as of 1.4 will now produce anonymous + # bindparam() objects in the values() collections up front when given plain + # literal values. This is so that cache key behaviors, which need to + # produce bound parameters in deterministic order without invoking any + # compilation here, can be applied to these constructs when they include + # values() (but not yet multi-values, which are not included in caching + # right now). + # + # in order to produce the desired "crud" style name for these parameters, + # which will also be targetable in engine/default.py through the usual + # conventions, apply our desired name to these unique parameters by + # populating the compiler truncated names cache with the desired name, + # rather than having + # compiler.visit_bindparam()->compiler._truncated_identifier make up a + # name. Saves on call counts also. + + # for INSERT/UPDATE that's a CTE, we don't need names to match to + # external parameters and these would also conflict in the case where + # multiple insert/update are combined together using CTEs + is_cte = "visiting_cte" in kw + + if ( + not is_cte + and value.unique + and isinstance(value.key, elements._truncated_label) + ): + compiler.truncated_names[("bindparam", value.key)] = name + + if value.type._isnull: + # either unique parameter, or other bound parameters that were + # passed in directly + # set type to that of the column unconditionally + value = value._with_binary_element_type(col.type) + + return value._compiler_dispatch(compiler, **kw) + + +def _key_getters_for_crud_column( + compiler: SQLCompiler, stmt: ValuesBase, compile_state: DMLState +) -> Tuple[ + Callable[[Union[str, ColumnClause[Any]]], Union[str, Tuple[str, str]]], + Callable[[ColumnClause[Any]], Union[str, Tuple[str, str]]], + _BindNameForColProtocol, +]: + if dml.isupdate(compile_state) and compile_state._extra_froms: + # when extra tables are present, refer to the columns + # in those extra tables as table-qualified, including in + # dictionaries and when rendering bind param names. + # the "main" table of the statement remains unqualified, + # allowing the most compatibility with a non-multi-table + # statement. + _et = set(compile_state._extra_froms) + + c_key_role = functools.partial( + coercions.expect_as_key, roles.DMLColumnRole + ) + + def _column_as_key( + key: Union[ColumnClause[Any], str] + ) -> Union[str, Tuple[str, str]]: + str_key = c_key_role(key) + if hasattr(key, "table") and key.table in _et: + return (key.table.name, str_key) # type: ignore + else: + return str_key + + def _getattr_col_key( + col: ColumnClause[Any], + ) -> Union[str, Tuple[str, str]]: + if col.table in _et: + return (col.table.name, col.key) # type: ignore + else: + return col.key + + def _col_bind_name(col: ColumnClause[Any]) -> str: + if col.table in _et: + if TYPE_CHECKING: + assert isinstance(col.table, TableClause) + return "%s_%s" % (col.table.name, col.key) + else: + return col.key + + else: + _column_as_key = functools.partial( + coercions.expect_as_key, roles.DMLColumnRole + ) + _getattr_col_key = _col_bind_name = operator.attrgetter("key") # type: ignore # noqa: E501 + + return _column_as_key, _getattr_col_key, _col_bind_name + + +def _scan_insert_from_select_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, +): + cols = [stmt.table.c[_column_as_key(name)] for name in stmt._select_names] + + assert compiler.stack[-1]["selectable"] is stmt + + compiler.stack[-1]["insert_from_select"] = stmt.select + + add_select_cols: List[_CrudParamElementSQLExpr] = [] + if stmt.include_insert_from_select_defaults: + col_set = set(cols) + for col in stmt.table.columns: + # omit columns that were not in the SELECT statement. + # this will omit columns marked as omit_from_statements naturally, + # as long as that col was not explicit in the SELECT. + # if an omit_from_statements col has a "default" on it, then + # we need to include it, as these defaults should still fire off. + # but, if it has that default and it's the "sentinel" default, + # we don't do sentinel default operations for insert_from_select + # here so we again omit it. + if ( + col not in col_set + and col.default + and not col.default.is_sentinel + ): + cols.append(col) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + parameters.pop(col_key) + values.append((c, compiler.preparer.format_column(c), None, ())) + else: + _append_param_insert_select_hasdefault( + compiler, stmt, c, add_select_cols, kw + ) + + if add_select_cols: + values.extend(add_select_cols) + ins_from_select = compiler.stack[-1]["insert_from_select"] + if not isinstance(ins_from_select, Select): + raise exc.CompileError( + f"Can't extend statement for INSERT..FROM SELECT to include " + f"additional default-holding column(s) " + f"""{ + ', '.join(repr(key) for _, key, _, _ in add_select_cols) + }. Convert the selectable to a subquery() first, or pass """ + "include_defaults=False to Insert.from_select() to skip these " + "columns." + ) + ins_from_select = ins_from_select._generate() + # copy raw_columns + ins_from_select._raw_columns = list(ins_from_select._raw_columns) + [ + expr for _, _, expr, _ in add_select_cols + ] + compiler.stack[-1]["insert_from_select"] = ins_from_select + + +def _scan_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, +): + ( + need_pks, + implicit_returning, + implicit_return_defaults, + postfetch_lastrowid, + use_insertmanyvalues, + use_sentinel_columns, + ) = _get_returning_modifiers(compiler, stmt, compile_state, toplevel) + + assert compile_state.isupdate or compile_state.isinsert + + if compile_state._parameter_ordering: + parameter_ordering = [ + _column_as_key(key) for key in compile_state._parameter_ordering + ] + ordered_keys = set(parameter_ordering) + cols = [ + stmt.table.c[key] + for key in parameter_ordering + if isinstance(key, str) and key in stmt.table.c + ] + [c for c in stmt.table.c if c.key not in ordered_keys] + + else: + cols = stmt.table.columns + + isinsert = _compile_state_isinsert(compile_state) + if isinsert and not compile_state._has_multi_parameters: + # new rules for #7998. fetch lastrowid or implicit returning + # for autoincrement column even if parameter is NULL, for DBs that + # override NULL param for primary key (sqlite, mysql/mariadb) + autoincrement_col = stmt.table._autoincrement_column + insert_null_pk_still_autoincrements = ( + compiler.dialect.insert_null_pk_still_autoincrements + ) + else: + autoincrement_col = insert_null_pk_still_autoincrements = None + + if stmt._supplemental_returning: + supplemental_returning = set(stmt._supplemental_returning) + else: + supplemental_returning = set() + + compiler_implicit_returning = compiler.implicit_returning + + # TODO - see TODO(return_defaults_columns) below + # cols_in_params = set() + + for c in cols: + # scan through every column in the target table + + col_key = _getattr_col_key(c) + + if col_key in parameters and col_key not in check_columns: + # parameter is present for the column. use that. + + _append_param_parameter( + compiler, + stmt, + compile_state, + c, + col_key, + parameters, + _col_bind_name, + implicit_returning, + implicit_return_defaults, + postfetch_lastrowid, + values, + autoincrement_col, + insert_null_pk_still_autoincrements, + kw, + ) + + # TODO - see TODO(return_defaults_columns) below + # cols_in_params.add(c) + + elif isinsert: + # no parameter is present and it's an insert. + + if c.primary_key and need_pks: + # it's a primary key column, it will need to be generated by a + # default generator of some kind, and the statement expects + # inserted_primary_key to be available. + + if implicit_returning: + # we can use RETURNING, find out how to invoke this + # column and get the value where RETURNING is an option. + # we can inline server-side functions in this case. + + _append_param_insert_pk_returning( + compiler, stmt, c, values, kw + ) + else: + # otherwise, find out how to invoke this column + # and get its value where RETURNING is not an option. + # if we have to invoke a server-side function, we need + # to pre-execute it. or if this is a straight + # autoincrement column and the dialect supports it + # we can use cursor.lastrowid. + + _append_param_insert_pk_no_returning( + compiler, stmt, c, values, kw + ) + + elif c.default is not None: + # column has a default, but it's not a pk column, or it is but + # we don't need to get the pk back. + if not c.default.is_sentinel or ( + use_sentinel_columns is not None + ): + _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw + ) + + elif c.server_default is not None: + # column has a DDL-level default, and is either not a pk + # column or we don't need the pk. + if implicit_return_defaults and c in implicit_return_defaults: + compiler_implicit_returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + + elif implicit_return_defaults and c in implicit_return_defaults: + compiler_implicit_returning.append(c) + + elif ( + c.primary_key + and c is not stmt.table._autoincrement_column + and not c.nullable + ): + _warn_pk_with_no_anticipated_value(c) + + elif compile_state.isupdate: + # no parameter is present and it's an insert. + + _append_param_update( + compiler, + compile_state, + stmt, + c, + implicit_return_defaults, + values, + kw, + ) + + # adding supplemental cols to implicit_returning in table + # order so that order is maintained between multiple INSERT + # statements which may have different parameters included, but all + # have the same RETURNING clause + if ( + c in supplemental_returning + and c not in compiler_implicit_returning + ): + compiler_implicit_returning.append(c) + + if supplemental_returning: + # we should have gotten every col into implicit_returning, + # however supplemental returning can also have SQL functions etc. + # in it + remaining_supplemental = supplemental_returning.difference( + compiler_implicit_returning + ) + compiler_implicit_returning.extend( + c + for c in stmt._supplemental_returning + if c in remaining_supplemental + ) + + # TODO(return_defaults_columns): there can still be more columns in + # _return_defaults_columns in the case that they are from something like an + # aliased of the table. we can add them here, however this breaks other ORM + # things. so this is for another day. see + # test/orm/dml/test_update_delete_where.py -> test_update_from_alias + + # if stmt._return_defaults_columns: + # compiler_implicit_returning.extend( + # set(stmt._return_defaults_columns) + # .difference(compiler_implicit_returning) + # .difference(cols_in_params) + # ) + + return (use_insertmanyvalues, use_sentinel_columns) + + +def _setup_delete_return_defaults( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, +): + (_, _, implicit_return_defaults, *_) = _get_returning_modifiers( + compiler, stmt, compile_state, toplevel + ) + + if not implicit_return_defaults: + return + + if stmt._return_defaults_columns: + compiler.implicit_returning.extend(implicit_return_defaults) + + if stmt._supplemental_returning: + ir_set = set(compiler.implicit_returning) + compiler.implicit_returning.extend( + c for c in stmt._supplemental_returning if c not in ir_set + ) + + +def _append_param_parameter( + compiler, + stmt, + compile_state, + c, + col_key, + parameters, + _col_bind_name, + implicit_returning, + implicit_return_defaults, + postfetch_lastrowid, + values, + autoincrement_col, + insert_null_pk_still_autoincrements, + kw, +): + value = parameters.pop(col_key) + + col_value = compiler.preparer.format_column( + c, use_table=compile_state.include_table_with_column_exprs + ) + + accumulated_bind_names: Set[str] = set() + + if coercions._is_literal(value): + if ( + insert_null_pk_still_autoincrements + and c.primary_key + and c is autoincrement_col + ): + # support use case for #7998, fetch autoincrement cols + # even if value was given. + + if postfetch_lastrowid: + compiler.postfetch_lastrowid = True + elif implicit_returning: + compiler.implicit_returning.append(c) + + value = _create_bind_param( + compiler, + c, + value, + required=value is REQUIRED, + name=( + _col_bind_name(c) + if not _compile_state_isinsert(compile_state) + or not compile_state._has_multi_parameters + else "%s_m0" % _col_bind_name(c) + ), + accumulate_bind_names=accumulated_bind_names, + **kw, + ) + elif value._is_bind_parameter: + if ( + insert_null_pk_still_autoincrements + and value.value is None + and c.primary_key + and c is autoincrement_col + ): + # support use case for #7998, fetch autoincrement cols + # even if value was given + if implicit_returning: + compiler.implicit_returning.append(c) + elif compiler.dialect.postfetch_lastrowid: + compiler.postfetch_lastrowid = True + + value = _handle_values_anonymous_param( + compiler, + c, + value, + name=( + _col_bind_name(c) + if not _compile_state_isinsert(compile_state) + or not compile_state._has_multi_parameters + else "%s_m0" % _col_bind_name(c) + ), + accumulate_bind_names=accumulated_bind_names, + **kw, + ) + else: + # value is a SQL expression + value = compiler.process( + value.self_group(), + accumulate_bind_names=accumulated_bind_names, + **kw, + ) + + if compile_state.isupdate: + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + + else: + compiler.postfetch.append(c) + else: + if c.primary_key: + if implicit_returning: + compiler.implicit_returning.append(c) + elif compiler.dialect.postfetch_lastrowid: + compiler.postfetch_lastrowid = True + + elif implicit_return_defaults and (c in implicit_return_defaults): + compiler.implicit_returning.append(c) + + else: + # postfetch specifically means, "we can SELECT the row we just + # inserted by primary key to get back the server generated + # defaults". so by definition this can't be used to get the + # primary key value back, because we need to have it ahead of + # time. + + compiler.postfetch.append(c) + + values.append((c, col_value, value, accumulated_bind_names)) + + +def _append_param_insert_pk_returning(compiler, stmt, c, values, kw): + """Create a primary key expression in the INSERT statement where + we want to populate result.inserted_primary_key and RETURNING + is available. + + """ + if c.default is not None: + if c.default.is_sequence: + if compiler.dialect.supports_sequences and ( + not c.default.optional + or not compiler.dialect.sequences_optional + ): + accumulated_bind_names: Set[str] = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default, + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + compiler.implicit_returning.append(c) + elif c.default.is_clause_element: + accumulated_bind_names = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default.arg.self_group(), + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + compiler.implicit_returning.append(c) + else: + # client side default. OK we can't use RETURNING, need to + # do a "prefetch", which in fact fetches the default value + # on the Python side + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + elif c is stmt.table._autoincrement_column or c.server_default is not None: + compiler.implicit_returning.append(c) + elif not c.nullable: + # no .default, no .server_default, not autoincrement, we have + # no indication this primary key column will have any value + _warn_pk_with_no_anticipated_value(c) + + +def _append_param_insert_pk_no_returning(compiler, stmt, c, values, kw): + """Create a primary key expression in the INSERT statement where + we want to populate result.inserted_primary_key and we cannot use + RETURNING. + + Depending on the kind of default here we may create a bound parameter + in the INSERT statement and pre-execute a default generation function, + or we may use cursor.lastrowid if supported by the dialect. + + + """ + + if ( + # column has a Python-side default + c.default is not None + and ( + # and it either is not a sequence, or it is and we support + # sequences and want to invoke it + not c.default.is_sequence + or ( + compiler.dialect.supports_sequences + and ( + not c.default.optional + or not compiler.dialect.sequences_optional + ) + ) + ) + ) or ( + # column is the "autoincrement column" + c is stmt.table._autoincrement_column + and ( + # dialect can't use cursor.lastrowid + not compiler.dialect.postfetch_lastrowid + and ( + # column has a Sequence and we support those + ( + c.default is not None + and c.default.is_sequence + and compiler.dialect.supports_sequences + ) + or + # column has no default on it, but dialect can run the + # "autoincrement" mechanism explicitly, e.g. PostgreSQL + # SERIAL we know the sequence name + ( + c.default is None + and compiler.dialect.preexecute_autoincrement_sequences + ) + ) + ) + ): + # do a pre-execute of the default + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + elif ( + c.default is None + and c.server_default is None + and not c.nullable + and c is not stmt.table._autoincrement_column + ): + # no .default, no .server_default, not autoincrement, we have + # no indication this primary key column will have any value + _warn_pk_with_no_anticipated_value(c) + elif compiler.dialect.postfetch_lastrowid: + # finally, where it seems like there will be a generated primary key + # value and we haven't set up any other way to fetch it, and the + # dialect supports cursor.lastrowid, switch on the lastrowid flag so + # that the DefaultExecutionContext calls upon cursor.lastrowid + compiler.postfetch_lastrowid = True + + +def _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw +): + if c.default.is_sequence: + if compiler.dialect.supports_sequences and ( + not c.default.optional or not compiler.dialect.sequences_optional + ): + accumulated_bind_names: Set[str] = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default, + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif c.default.is_clause_element: + accumulated_bind_names = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default.arg.self_group(), + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + elif not c.primary_key: + # don't add primary key column to postfetch + compiler.postfetch.append(c) + else: + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + + +def _append_param_insert_select_hasdefault( + compiler: SQLCompiler, + stmt: ValuesBase, + c: ColumnClause[Any], + values: List[_CrudParamElementSQLExpr], + kw: Dict[str, Any], +) -> None: + if default_is_sequence(c.default): + if compiler.dialect.supports_sequences and ( + not c.default.optional or not compiler.dialect.sequences_optional + ): + values.append( + ( + c, + compiler.preparer.format_column(c), + c.default.next_value(), + (), + ) + ) + elif default_is_clause_element(c.default): + values.append( + ( + c, + compiler.preparer.format_column(c), + c.default.arg.self_group(), + (), + ) + ) + else: + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param( + compiler, c, process=False, **kw + ), + (c.key,), + ) + ) + + +def _append_param_update( + compiler, compile_state, stmt, c, implicit_return_defaults, values, kw +): + include_table = compile_state.include_table_with_column_exprs + if c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + ( + c, + compiler.preparer.format_column( + c, + use_table=include_table, + ), + compiler.process(c.onupdate.arg.self_group(), **kw), + (), + ) + ) + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + else: + compiler.postfetch.append(c) + else: + values.append( + ( + c, + compiler.preparer.format_column( + c, + use_table=include_table, + ), + _create_update_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + elif c.server_onupdate is not None: + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + else: + compiler.postfetch.append(c) + elif ( + implicit_return_defaults + and (stmt._return_defaults_columns or not stmt._return_defaults) + and c in implicit_return_defaults + ): + compiler.implicit_returning.append(c) + + +@overload +def _create_insert_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[True] = ..., + **kw: Any, +) -> str: ... + + +@overload +def _create_insert_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[False], + **kw: Any, +) -> elements.BindParameter[Any]: ... + + +def _create_insert_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: bool = True, + name: Optional[str] = None, + **kw: Any, +) -> Union[elements.BindParameter[Any], str]: + param = _create_bind_param( + compiler, c, None, process=process, name=name, **kw + ) + compiler.insert_prefetch.append(c) # type: ignore + return param + + +@overload +def _create_update_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[True] = ..., + **kw: Any, +) -> str: ... + + +@overload +def _create_update_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[False], + **kw: Any, +) -> elements.BindParameter[Any]: ... + + +def _create_update_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: bool = True, + name: Optional[str] = None, + **kw: Any, +) -> Union[elements.BindParameter[Any], str]: + param = _create_bind_param( + compiler, c, None, process=process, name=name, **kw + ) + compiler.update_prefetch.append(c) # type: ignore + return param + + +class _multiparam_column(elements.ColumnElement[Any]): + _is_multiparam_column = True + + def __init__(self, original, index): + self.index = index + self.key = "%s_m%d" % (original.key, index + 1) + self.original = original + self.default = original.default + self.type = original.type + + def compare(self, other, **kw): + raise NotImplementedError() + + def _copy_internals(self, **kw): + raise NotImplementedError() + + def __eq__(self, other): + return ( + isinstance(other, _multiparam_column) + and other.key == self.key + and other.original == self.original + ) + + @util.memoized_property + def _default_description_tuple(self) -> _DefaultDescriptionTuple: + """used by default.py -> _process_execute_defaults()""" + + return _DefaultDescriptionTuple._from_column_default(self.default) + + @util.memoized_property + def _onupdate_description_tuple(self) -> _DefaultDescriptionTuple: + """used by default.py -> _process_execute_defaults()""" + + return _DefaultDescriptionTuple._from_column_default(self.onupdate) + + +def _process_multiparam_default_bind( + compiler: SQLCompiler, + stmt: ValuesBase, + c: KeyedColumnElement[Any], + index: int, + kw: Dict[str, Any], +) -> str: + if not c.default: + raise exc.CompileError( + "INSERT value for column %s is explicitly rendered as a bound" + "parameter in the VALUES clause; " + "a Python-side value or SQL expression is required" % c + ) + elif default_is_clause_element(c.default): + return compiler.process(c.default.arg.self_group(), **kw) + elif c.default.is_sequence: + # these conditions would have been established + # by append_param_insert_(?:hasdefault|pk_returning|pk_no_returning) + # in order for us to be here, so these don't need to be + # checked + # assert compiler.dialect.supports_sequences and ( + # not c.default.optional + # or not compiler.dialect.sequences_optional + # ) + return compiler.process(c.default, **kw) + else: + col = _multiparam_column(c, index) + assert isinstance(stmt, dml.Insert) + return _create_insert_prefetch_bind_param( + compiler, col, process=True, **kw + ) + + +def _get_update_multitable_params( + compiler, + stmt, + compile_state, + stmt_parameter_tuples, + check_columns, + _col_bind_name, + _getattr_col_key, + values, + kw, +): + normalized_params = { + coercions.expect(roles.DMLColumnRole, c): param + for c, param in stmt_parameter_tuples or () + } + + include_table = compile_state.include_table_with_column_exprs + + affected_tables = set() + for t in compile_state._extra_froms: + for c in t.c: + if c in normalized_params: + affected_tables.add(t) + check_columns[_getattr_col_key(c)] = c + value = normalized_params[c] + + col_value = compiler.process(c, include_table=include_table) + if coercions._is_literal(value): + value = _create_bind_param( + compiler, + c, + value, + required=value is REQUIRED, + name=_col_bind_name(c), + **kw, # TODO: no test coverage for literal binds here + ) + accumulated_bind_names: Iterable[str] = (c.key,) + elif value._is_bind_parameter: + cbn = _col_bind_name(c) + value = _handle_values_anonymous_param( + compiler, c, value, name=cbn, **kw + ) + accumulated_bind_names = (cbn,) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + accumulated_bind_names = () + values.append((c, col_value, value, accumulated_bind_names)) + # determine tables which are actually to be updated - process onupdate + # and server_onupdate for these + for t in affected_tables: + for c in t.c: + if c in normalized_params: + continue + elif c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + ( + c, + compiler.process(c, include_table=include_table), + compiler.process( + c.onupdate.arg.self_group(), **kw + ), + (), + ) + ) + compiler.postfetch.append(c) + else: + values.append( + ( + c, + compiler.process(c, include_table=include_table), + _create_update_prefetch_bind_param( + compiler, c, name=_col_bind_name(c), **kw + ), + (c.key,), + ) + ) + elif c.server_onupdate is not None: + compiler.postfetch.append(c) + + +def _extend_values_for_multiparams( + compiler: SQLCompiler, + stmt: ValuesBase, + compile_state: DMLState, + initial_values: Sequence[_CrudParamElementStr], + _column_as_key: Callable[..., str], + kw: Dict[str, Any], +) -> List[Sequence[_CrudParamElementStr]]: + values_0 = initial_values + values = [initial_values] + + mp = compile_state._multi_parameters + assert mp is not None + for i, row in enumerate(mp[1:]): + extension: List[_CrudParamElementStr] = [] + + row = {_column_as_key(key): v for key, v in row.items()} + + for col, col_expr, param, accumulated_names in values_0: + if col.key in row: + key = col.key + + if coercions._is_literal(row[key]): + new_param = _create_bind_param( + compiler, + col, + row[key], + name="%s_m%d" % (col.key, i + 1), + **kw, + ) + else: + new_param = compiler.process(row[key].self_group(), **kw) + else: + new_param = _process_multiparam_default_bind( + compiler, stmt, col, i, kw + ) + + extension.append((col, col_expr, new_param, accumulated_names)) + + values.append(extension) + + return values + + +def _get_stmt_parameter_tuples_params( + compiler, + compile_state, + parameters, + stmt_parameter_tuples, + _column_as_key, + values, + kw, +): + for k, v in stmt_parameter_tuples: + colkey = _column_as_key(k) + if colkey is not None: + parameters.setdefault(colkey, v) + else: + # a non-Column expression on the left side; + # add it to values() in an "as-is" state, + # coercing right side to bound param + + # note one of the main use cases for this is array slice + # updates on PostgreSQL, as the left side is also an expression. + + col_expr = compiler.process( + k, include_table=compile_state.include_table_with_column_exprs + ) + + if coercions._is_literal(v): + v = compiler.process( + elements.BindParameter(None, v, type_=k.type), **kw + ) + else: + if v._is_bind_parameter and v.type._isnull: + # either unique parameter, or other bound parameters that + # were passed in directly + # set type to that of the column unconditionally + v = v._with_binary_element_type(k.type) + + v = compiler.process(v.self_group(), **kw) + + # TODO: not sure if accumulated_bind_names applies here + values.append((k, col_expr, v, ())) + + +def _get_returning_modifiers(compiler, stmt, compile_state, toplevel): + """determines RETURNING strategy, if any, for the statement. + + This is where it's determined what we need to fetch from the + INSERT or UPDATE statement after it's invoked. + + """ + + dialect = compiler.dialect + + need_pks = ( + toplevel + and _compile_state_isinsert(compile_state) + and not stmt._inline + and ( + not compiler.for_executemany + or (dialect.insert_executemany_returning and stmt._return_defaults) + ) + and not stmt._returning + # and (not stmt._returning or stmt._return_defaults) + and not compile_state._has_multi_parameters + ) + + # check if we have access to simple cursor.lastrowid. we can use that + # after the INSERT if that's all we need. + postfetch_lastrowid = ( + need_pks + and dialect.postfetch_lastrowid + and stmt.table._autoincrement_column is not None + ) + + # see if we want to add RETURNING to an INSERT in order to get + # primary key columns back. This would be instead of postfetch_lastrowid + # if that's set. + implicit_returning = ( + # statement itself can veto it + need_pks + # the dialect can veto it if it just doesnt support RETURNING + # with INSERT + and dialect.insert_returning + # user-defined implicit_returning on Table can veto it + and compile_state._primary_table.implicit_returning + # the compile_state can veto it (SQlite uses this to disable + # RETURNING for an ON CONFLICT insert, as SQLite does not return + # for rows that were updated, which is wrong) + and compile_state._supports_implicit_returning + and ( + # since we support MariaDB and SQLite which also support lastrowid, + # decide if we should use lastrowid or RETURNING. for insert + # that didnt call return_defaults() and has just one set of + # parameters, we can use lastrowid. this is more "traditional" + # and a lot of weird use cases are supported by it. + # SQLite lastrowid times 3x faster than returning, + # Mariadb lastrowid 2x faster than returning + (not postfetch_lastrowid or dialect.favor_returning_over_lastrowid) + or compile_state._has_multi_parameters + or stmt._return_defaults + ) + ) + if implicit_returning: + postfetch_lastrowid = False + + if _compile_state_isinsert(compile_state): + should_implicit_return_defaults = ( + implicit_returning and stmt._return_defaults + ) + explicit_returning = ( + should_implicit_return_defaults + or stmt._returning + or stmt._supplemental_returning + ) + use_insertmanyvalues = ( + toplevel + and compiler.for_executemany + and dialect.use_insertmanyvalues + and ( + explicit_returning or dialect.use_insertmanyvalues_wo_returning + ) + ) + + use_sentinel_columns = None + if ( + use_insertmanyvalues + and explicit_returning + and stmt._sort_by_parameter_order + ): + use_sentinel_columns = compiler._get_sentinel_column_for_table( + stmt.table + ) + + elif compile_state.isupdate: + should_implicit_return_defaults = ( + stmt._return_defaults + and compile_state._primary_table.implicit_returning + and compile_state._supports_implicit_returning + and dialect.update_returning + ) + use_insertmanyvalues = False + use_sentinel_columns = None + elif compile_state.isdelete: + should_implicit_return_defaults = ( + stmt._return_defaults + and compile_state._primary_table.implicit_returning + and compile_state._supports_implicit_returning + and dialect.delete_returning + ) + use_insertmanyvalues = False + use_sentinel_columns = None + else: + should_implicit_return_defaults = False # pragma: no cover + use_insertmanyvalues = False + use_sentinel_columns = None + + if should_implicit_return_defaults: + if not stmt._return_defaults_columns: + # TODO: this is weird. See #9685 where we have to + # take an extra step to prevent this from happening. why + # would this ever be *all* columns? but if we set to blank, then + # that seems to break things also in the ORM. So we should + # try to clean this up and figure out what return_defaults + # needs to do w/ the ORM etc. here + implicit_return_defaults = set(stmt.table.c) + else: + implicit_return_defaults = set(stmt._return_defaults_columns) + else: + implicit_return_defaults = None + + return ( + need_pks, + implicit_returning or should_implicit_return_defaults, + implicit_return_defaults, + postfetch_lastrowid, + use_insertmanyvalues, + use_sentinel_columns, + ) + + +def _warn_pk_with_no_anticipated_value(c): + msg = ( + "Column '%s.%s' is marked as a member of the " + "primary key for table '%s', " + "but has no Python-side or server-side default generator indicated, " + "nor does it indicate 'autoincrement=True' or 'nullable=True', " + "and no explicit value is passed. " + "Primary key columns typically may not store NULL." + % (c.table.fullname, c.name, c.table.fullname) + ) + if len(c.table.primary_key) > 1: + msg += ( + " Note that as of SQLAlchemy 1.1, 'autoincrement=True' must be " + "indicated explicitly for composite (e.g. multicolumn) primary " + "keys if AUTO_INCREMENT/SERIAL/IDENTITY " + "behavior is expected for one of the columns in the primary key. " + "CREATE TABLE statements are impacted by this change as well on " + "most backends." + ) + util.warn(msg) diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py new file mode 100644 index 00000000..d9e3f673 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py @@ -0,0 +1,1378 @@ +# sql/ddl.py +# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +""" +Provides the hierarchy of DDL-defining schema items as well as routines +to invoke them for a create/drop call. + +""" +from __future__ import annotations + +import contextlib +import typing +from typing import Any +from typing import Callable +from typing import Iterable +from typing import List +from typing import Optional +from typing import Sequence as typing_Sequence +from typing import Tuple + +from . import roles +from .base import _generative +from .base import Executable +from .base import SchemaVisitor +from .elements import ClauseElement +from .. import exc +from .. import util +from ..util import topological +from ..util.typing import Protocol +from ..util.typing import Self + +if typing.TYPE_CHECKING: + from .compiler import Compiled + from .compiler import DDLCompiler + from .elements import BindParameter + from .schema import Constraint + from .schema import ForeignKeyConstraint + from .schema import SchemaItem + from .schema import Sequence + from .schema import Table + from .selectable import TableClause + from ..engine.base import Connection + from ..engine.interfaces import CacheStats + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import Dialect + from ..engine.interfaces import SchemaTranslateMapType + + +class BaseDDLElement(ClauseElement): + """The root of DDL constructs, including those that are sub-elements + within the "create table" and other processes. + + .. versionadded:: 2.0 + + """ + + _hierarchy_supports_caching = False + """disable cache warnings for all _DDLCompiles subclasses. """ + + def _compiler(self, dialect, **kw): + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + + return dialect.ddl_compiler(dialect, self, **kw) + + def _compile_w_cache( + self, + dialect: Dialect, + *, + compiled_cache: Optional[CompiledCacheType], + column_keys: List[str], + for_executemany: bool = False, + schema_translate_map: Optional[SchemaTranslateMapType] = None, + **kw: Any, + ) -> Tuple[ + Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats + ]: + raise NotImplementedError() + + +class DDLIfCallable(Protocol): + def __call__( + self, + ddl: BaseDDLElement, + target: SchemaItem, + bind: Optional[Connection], + tables: Optional[List[Table]] = None, + state: Optional[Any] = None, + *, + dialect: Dialect, + compiler: Optional[DDLCompiler] = ..., + checkfirst: bool, + ) -> bool: ... + + +class DDLIf(typing.NamedTuple): + dialect: Optional[str] + callable_: Optional[DDLIfCallable] + state: Optional[Any] + + def _should_execute( + self, + ddl: BaseDDLElement, + target: SchemaItem, + bind: Optional[Connection], + compiler: Optional[DDLCompiler] = None, + **kw: Any, + ) -> bool: + if bind is not None: + dialect = bind.dialect + elif compiler is not None: + dialect = compiler.dialect + else: + assert False, "compiler or dialect is required" + + if isinstance(self.dialect, str): + if self.dialect != dialect.name: + return False + elif isinstance(self.dialect, (tuple, list, set)): + if dialect.name not in self.dialect: + return False + if self.callable_ is not None and not self.callable_( + ddl, + target, + bind, + state=self.state, + dialect=dialect, + compiler=compiler, + **kw, + ): + return False + + return True + + +class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement): + """Base class for standalone executable DDL expression constructs. + + This class is the base for the general purpose :class:`.DDL` class, + as well as the various create/drop clause constructs such as + :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`, + etc. + + .. versionchanged:: 2.0 :class:`.ExecutableDDLElement` is renamed from + :class:`.DDLElement`, which still exists for backwards compatibility. + + :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events, + introduced in :ref:`event_toplevel`. An instance of one is + itself an event receiving callable:: + + event.listen( + users, + 'after_create', + AddConstraint(constraint).execute_if(dialect='postgresql') + ) + + .. seealso:: + + :class:`.DDL` + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + :ref:`schema_ddl_sequences` + + """ + + _ddl_if: Optional[DDLIf] = None + target: Optional[SchemaItem] = None + + def _execute_on_connection( + self, connection, distilled_params, execution_options + ): + return connection._execute_ddl( + self, distilled_params, execution_options + ) + + @_generative + def against(self, target: SchemaItem) -> Self: + """Return a copy of this :class:`_schema.ExecutableDDLElement` which + will include the given target. + + This essentially applies the given item to the ``.target`` attribute of + the returned :class:`_schema.ExecutableDDLElement` object. This target + is then usable by event handlers and compilation routines in order to + provide services such as tokenization of a DDL string in terms of a + particular :class:`_schema.Table`. + + When a :class:`_schema.ExecutableDDLElement` object is established as + an event handler for the :meth:`_events.DDLEvents.before_create` or + :meth:`_events.DDLEvents.after_create` events, and the event then + occurs for a given target such as a :class:`_schema.Constraint` or + :class:`_schema.Table`, that target is established with a copy of the + :class:`_schema.ExecutableDDLElement` object using this method, which + then proceeds to the :meth:`_schema.ExecutableDDLElement.execute` + method in order to invoke the actual DDL instruction. + + :param target: a :class:`_schema.SchemaItem` that will be the subject + of a DDL operation. + + :return: a copy of this :class:`_schema.ExecutableDDLElement` with the + ``.target`` attribute assigned to the given + :class:`_schema.SchemaItem`. + + .. seealso:: + + :class:`_schema.DDL` - uses tokenization against the "target" when + processing the DDL string. + + """ + self.target = target + return self + + @_generative + def execute_if( + self, + dialect: Optional[str] = None, + callable_: Optional[DDLIfCallable] = None, + state: Optional[Any] = None, + ) -> Self: + r"""Return a callable that will execute this + :class:`_ddl.ExecutableDDLElement` conditionally within an event + handler. + + Used to provide a wrapper for event listening:: + + event.listen( + metadata, + 'before_create', + DDL("my_ddl").execute_if(dialect='postgresql') + ) + + :param dialect: May be a string or tuple of strings. + If a string, it will be compared to the name of the + executing database dialect:: + + DDL('something').execute_if(dialect='postgresql') + + If a tuple, specifies multiple dialect names:: + + DDL('something').execute_if(dialect=('postgresql', 'mysql')) + + :param callable\_: A callable, which will be invoked with + three positional arguments as well as optional keyword + arguments: + + :ddl: + This DDL element. + + :target: + The :class:`_schema.Table` or :class:`_schema.MetaData` + object which is the + target of this event. May be None if the DDL is executed + explicitly. + + :bind: + The :class:`_engine.Connection` being used for DDL execution. + May be None if this construct is being created inline within + a table, in which case ``compiler`` will be present. + + :tables: + Optional keyword argument - a list of Table objects which are to + be created/ dropped within a MetaData.create_all() or drop_all() + method call. + + :dialect: keyword argument, but always present - the + :class:`.Dialect` involved in the operation. + + :compiler: keyword argument. Will be ``None`` for an engine + level DDL invocation, but will refer to a :class:`.DDLCompiler` + if this DDL element is being created inline within a table. + + :state: + Optional keyword argument - will be the ``state`` argument + passed to this function. + + :checkfirst: + Keyword argument, will be True if the 'checkfirst' flag was + set during the call to ``create()``, ``create_all()``, + ``drop()``, ``drop_all()``. + + If the callable returns a True value, the DDL statement will be + executed. + + :param state: any value which will be passed to the callable\_ + as the ``state`` keyword argument. + + .. seealso:: + + :meth:`.SchemaItem.ddl_if` + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + """ + self._ddl_if = DDLIf(dialect, callable_, state) + return self + + def _should_execute(self, target, bind, **kw): + if self._ddl_if is None: + return True + else: + return self._ddl_if._should_execute(self, target, bind, **kw) + + def _invoke_with(self, bind): + if self._should_execute(self.target, bind): + return bind.execute(self) + + def __call__(self, target, bind, **kw): + """Execute the DDL as a ddl_listener.""" + + self.against(target)._invoke_with(bind) + + def _generate(self): + s = self.__class__.__new__(self.__class__) + s.__dict__ = self.__dict__.copy() + return s + + +DDLElement = ExecutableDDLElement +""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`.""" + + +class DDL(ExecutableDDLElement): + """A literal DDL statement. + + Specifies literal SQL DDL to be executed by the database. DDL objects + function as DDL event listeners, and can be subscribed to those events + listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or + :class:`_schema.MetaData` objects as targets. + Basic templating support allows + a single DDL instance to handle repetitive tasks for multiple tables. + + Examples:: + + from sqlalchemy import event, DDL + + tbl = Table('users', metadata, Column('uid', Integer)) + event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger')) + + spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE') + event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb')) + + drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE') + connection.execute(drop_spow) + + When operating on Table events, the following ``statement`` + string substitutions are available:: + + %(table)s - the Table name, with any required quoting applied + %(schema)s - the schema name, with any required quoting applied + %(fullname)s - the Table name including schema, quoted if needed + + The DDL's "context", if any, will be combined with the standard + substitutions noted above. Keys present in the context will override + the standard substitutions. + + """ + + __visit_name__ = "ddl" + + def __init__(self, statement, context=None): + """Create a DDL statement. + + :param statement: + A string or unicode string to be executed. Statements will be + processed with Python's string formatting operator using + a fixed set of string substitutions, as well as additional + substitutions provided by the optional :paramref:`.DDL.context` + parameter. + + A literal '%' in a statement must be escaped as '%%'. + + SQL bind parameters are not available in DDL statements. + + :param context: + Optional dictionary, defaults to None. These values will be + available for use in string substitutions on the DDL statement. + + .. seealso:: + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + """ + + if not isinstance(statement, str): + raise exc.ArgumentError( + "Expected a string or unicode SQL statement, got '%r'" + % statement + ) + + self.statement = statement + self.context = context or {} + + def __repr__(self): + parts = [repr(self.statement)] + if self.context: + parts.append(f"context={self.context}") + + return "<%s@%s; %s>" % ( + type(self).__name__, + id(self), + ", ".join(parts), + ) + + +class _CreateDropBase(ExecutableDDLElement): + """Base class for DDL constructs that represent CREATE and DROP or + equivalents. + + The common theme of _CreateDropBase is a single + ``element`` attribute which refers to the element + to be created or dropped. + + """ + + def __init__( + self, + element, + ): + self.element = self.target = element + self._ddl_if = getattr(element, "_ddl_if", None) + + @property + def stringify_dialect(self): + return self.element.create_drop_stringify_dialect + + def _create_rule_disable(self, compiler): + """Allow disable of _create_rule using a callable. + + Pass to _create_rule using + util.portable_instancemethod(self._create_rule_disable) + to retain serializability. + + """ + return False + + +class _CreateBase(_CreateDropBase): + def __init__(self, element, if_not_exists=False): + super().__init__(element) + self.if_not_exists = if_not_exists + + +class _DropBase(_CreateDropBase): + def __init__(self, element, if_exists=False): + super().__init__(element) + self.if_exists = if_exists + + +class CreateSchema(_CreateBase): + """Represent a CREATE SCHEMA statement. + + The argument here is the string name of the schema. + + """ + + __visit_name__ = "create_schema" + + stringify_dialect = "default" + + def __init__( + self, + name, + if_not_exists=False, + ): + """Create a new :class:`.CreateSchema` construct.""" + + super().__init__(element=name, if_not_exists=if_not_exists) + + +class DropSchema(_DropBase): + """Represent a DROP SCHEMA statement. + + The argument here is the string name of the schema. + + """ + + __visit_name__ = "drop_schema" + + stringify_dialect = "default" + + def __init__( + self, + name, + cascade=False, + if_exists=False, + ): + """Create a new :class:`.DropSchema` construct.""" + + super().__init__(element=name, if_exists=if_exists) + self.cascade = cascade + + +class CreateTable(_CreateBase): + """Represent a CREATE TABLE statement.""" + + __visit_name__ = "create_table" + + def __init__( + self, + element: Table, + include_foreign_key_constraints: Optional[ + typing_Sequence[ForeignKeyConstraint] + ] = None, + if_not_exists: bool = False, + ): + """Create a :class:`.CreateTable` construct. + + :param element: a :class:`_schema.Table` that's the subject + of the CREATE + :param on: See the description for 'on' in :class:`.DDL`. + :param include_foreign_key_constraints: optional sequence of + :class:`_schema.ForeignKeyConstraint` objects that will be included + inline within the CREATE construct; if omitted, all foreign key + constraints that do not specify use_alter=True are included. + + :param if_not_exists: if True, an IF NOT EXISTS operator will be + applied to the construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_not_exists=if_not_exists) + self.columns = [CreateColumn(column) for column in element.columns] + self.include_foreign_key_constraints = include_foreign_key_constraints + + +class _DropView(_DropBase): + """Semi-public 'DROP VIEW' construct. + + Used by the test suite for dialect-agnostic drops of views. + This object will eventually be part of a public "view" API. + + """ + + __visit_name__ = "drop_view" + + +class CreateConstraint(BaseDDLElement): + def __init__(self, element: Constraint): + self.element = element + + +class CreateColumn(BaseDDLElement): + """Represent a :class:`_schema.Column` + as rendered in a CREATE TABLE statement, + via the :class:`.CreateTable` construct. + + This is provided to support custom column DDL within the generation + of CREATE TABLE statements, by using the + compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel` + to extend :class:`.CreateColumn`. + + Typical integration is to examine the incoming :class:`_schema.Column` + object, and to redirect compilation if a particular flag or condition + is found:: + + from sqlalchemy import schema + from sqlalchemy.ext.compiler import compiles + + @compiles(schema.CreateColumn) + def compile(element, compiler, **kw): + column = element.element + + if "special" not in column.info: + return compiler.visit_create_column(element, **kw) + + text = "%s SPECIAL DIRECTIVE %s" % ( + column.name, + compiler.type_compiler.process(column.type) + ) + default = compiler.get_column_default_string(column) + if default is not None: + text += " DEFAULT " + default + + if not column.nullable: + text += " NOT NULL" + + if column.constraints: + text += " ".join( + compiler.process(const) + for const in column.constraints) + return text + + The above construct can be applied to a :class:`_schema.Table` + as follows:: + + from sqlalchemy import Table, Metadata, Column, Integer, String + from sqlalchemy import schema + + metadata = MetaData() + + table = Table('mytable', MetaData(), + Column('x', Integer, info={"special":True}, primary_key=True), + Column('y', String(50)), + Column('z', String(20), info={"special":True}) + ) + + metadata.create_all(conn) + + Above, the directives we've added to the :attr:`_schema.Column.info` + collection + will be detected by our custom compilation scheme:: + + CREATE TABLE mytable ( + x SPECIAL DIRECTIVE INTEGER NOT NULL, + y VARCHAR(50), + z SPECIAL DIRECTIVE VARCHAR(20), + PRIMARY KEY (x) + ) + + The :class:`.CreateColumn` construct can also be used to skip certain + columns when producing a ``CREATE TABLE``. This is accomplished by + creating a compilation rule that conditionally returns ``None``. + This is essentially how to produce the same effect as using the + ``system=True`` argument on :class:`_schema.Column`, which marks a column + as an implicitly-present "system" column. + + For example, suppose we wish to produce a :class:`_schema.Table` + which skips + rendering of the PostgreSQL ``xmin`` column against the PostgreSQL + backend, but on other backends does render it, in anticipation of a + triggered rule. A conditional compilation rule could skip this name only + on PostgreSQL:: + + from sqlalchemy.schema import CreateColumn + + @compiles(CreateColumn, "postgresql") + def skip_xmin(element, compiler, **kw): + if element.element.name == 'xmin': + return None + else: + return compiler.visit_create_column(element, **kw) + + + my_table = Table('mytable', metadata, + Column('id', Integer, primary_key=True), + Column('xmin', Integer) + ) + + Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE`` + which only includes the ``id`` column in the string; the ``xmin`` column + will be omitted, but only against the PostgreSQL backend. + + """ + + __visit_name__ = "create_column" + + def __init__(self, element): + self.element = element + + +class DropTable(_DropBase): + """Represent a DROP TABLE statement.""" + + __visit_name__ = "drop_table" + + def __init__(self, element: Table, if_exists: bool = False): + """Create a :class:`.DropTable` construct. + + :param element: a :class:`_schema.Table` that's the subject + of the DROP. + :param on: See the description for 'on' in :class:`.DDL`. + :param if_exists: if True, an IF EXISTS operator will be applied to the + construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_exists=if_exists) + + +class CreateSequence(_CreateBase): + """Represent a CREATE SEQUENCE statement.""" + + __visit_name__ = "create_sequence" + + def __init__(self, element: Sequence, if_not_exists: bool = False): + super().__init__(element, if_not_exists=if_not_exists) + + +class DropSequence(_DropBase): + """Represent a DROP SEQUENCE statement.""" + + __visit_name__ = "drop_sequence" + + def __init__(self, element: Sequence, if_exists: bool = False): + super().__init__(element, if_exists=if_exists) + + +class CreateIndex(_CreateBase): + """Represent a CREATE INDEX statement.""" + + __visit_name__ = "create_index" + + def __init__(self, element, if_not_exists=False): + """Create a :class:`.Createindex` construct. + + :param element: a :class:`_schema.Index` that's the subject + of the CREATE. + :param if_not_exists: if True, an IF NOT EXISTS operator will be + applied to the construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_not_exists=if_not_exists) + + +class DropIndex(_DropBase): + """Represent a DROP INDEX statement.""" + + __visit_name__ = "drop_index" + + def __init__(self, element, if_exists=False): + """Create a :class:`.DropIndex` construct. + + :param element: a :class:`_schema.Index` that's the subject + of the DROP. + :param if_exists: if True, an IF EXISTS operator will be applied to the + construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_exists=if_exists) + + +class AddConstraint(_CreateBase): + """Represent an ALTER TABLE ADD CONSTRAINT statement.""" + + __visit_name__ = "add_constraint" + + def __init__(self, element): + super().__init__(element) + element._create_rule = util.portable_instancemethod( + self._create_rule_disable + ) + + +class DropConstraint(_DropBase): + """Represent an ALTER TABLE DROP CONSTRAINT statement.""" + + __visit_name__ = "drop_constraint" + + def __init__(self, element, cascade=False, if_exists=False, **kw): + self.cascade = cascade + super().__init__(element, if_exists=if_exists, **kw) + element._create_rule = util.portable_instancemethod( + self._create_rule_disable + ) + + +class SetTableComment(_CreateDropBase): + """Represent a COMMENT ON TABLE IS statement.""" + + __visit_name__ = "set_table_comment" + + +class DropTableComment(_CreateDropBase): + """Represent a COMMENT ON TABLE '' statement. + + Note this varies a lot across database backends. + + """ + + __visit_name__ = "drop_table_comment" + + +class SetColumnComment(_CreateDropBase): + """Represent a COMMENT ON COLUMN IS statement.""" + + __visit_name__ = "set_column_comment" + + +class DropColumnComment(_CreateDropBase): + """Represent a COMMENT ON COLUMN IS NULL statement.""" + + __visit_name__ = "drop_column_comment" + + +class SetConstraintComment(_CreateDropBase): + """Represent a COMMENT ON CONSTRAINT IS statement.""" + + __visit_name__ = "set_constraint_comment" + + +class DropConstraintComment(_CreateDropBase): + """Represent a COMMENT ON CONSTRAINT IS NULL statement.""" + + __visit_name__ = "drop_constraint_comment" + + +class InvokeDDLBase(SchemaVisitor): + def __init__(self, connection): + self.connection = connection + + @contextlib.contextmanager + def with_ddl_events(self, target, **kw): + """helper context manager that will apply appropriate DDL events + to a CREATE or DROP operation.""" + + raise NotImplementedError() + + +class InvokeCreateDDLBase(InvokeDDLBase): + @contextlib.contextmanager + def with_ddl_events(self, target, **kw): + """helper context manager that will apply appropriate DDL events + to a CREATE or DROP operation.""" + + target.dispatch.before_create( + target, self.connection, _ddl_runner=self, **kw + ) + yield + target.dispatch.after_create( + target, self.connection, _ddl_runner=self, **kw + ) + + +class InvokeDropDDLBase(InvokeDDLBase): + @contextlib.contextmanager + def with_ddl_events(self, target, **kw): + """helper context manager that will apply appropriate DDL events + to a CREATE or DROP operation.""" + + target.dispatch.before_drop( + target, self.connection, _ddl_runner=self, **kw + ) + yield + target.dispatch.after_drop( + target, self.connection, _ddl_runner=self, **kw + ) + + +class SchemaGenerator(InvokeCreateDDLBase): + def __init__( + self, dialect, connection, checkfirst=False, tables=None, **kwargs + ): + super().__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + self.memo = {} + + def _can_create_table(self, table): + self.dialect.validate_identifier(table.name) + effective_schema = self.connection.schema_for_object(table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or not self.dialect.has_table( + self.connection, table.name, schema=effective_schema + ) + + def _can_create_index(self, index): + effective_schema = self.connection.schema_for_object(index.table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or not self.dialect.has_index( + self.connection, + index.table.name, + index.name, + schema=effective_schema, + ) + + def _can_create_sequence(self, sequence): + effective_schema = self.connection.schema_for_object(sequence) + + return self.dialect.supports_sequences and ( + (not self.dialect.sequences_optional or not sequence.optional) + and ( + not self.checkfirst + or not self.dialect.has_sequence( + self.connection, sequence.name, schema=effective_schema + ) + ) + ) + + def visit_metadata(self, metadata): + if self.tables is not None: + tables = self.tables + else: + tables = list(metadata.tables.values()) + + collection = sort_tables_and_constraints( + [t for t in tables if self._can_create_table(t)] + ) + + seq_coll = [ + s + for s in metadata._sequences.values() + if s.column is None and self._can_create_sequence(s) + ] + + event_collection = [t for (t, fks) in collection if t is not None] + + with self.with_ddl_events( + metadata, + tables=event_collection, + checkfirst=self.checkfirst, + ): + for seq in seq_coll: + self.traverse_single(seq, create_ok=True) + + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, + create_ok=True, + include_foreign_key_constraints=fkcs, + _is_metadata_operation=True, + ) + else: + for fkc in fkcs: + self.traverse_single(fkc) + + def visit_table( + self, + table, + create_ok=False, + include_foreign_key_constraints=None, + _is_metadata_operation=False, + ): + if not create_ok and not self._can_create_table(table): + return + + with self.with_ddl_events( + table, + checkfirst=self.checkfirst, + _is_metadata_operation=_is_metadata_operation, + ): + for column in table.columns: + if column.default is not None: + self.traverse_single(column.default) + + if not self.dialect.supports_alter: + # e.g., don't omit any foreign key constraints + include_foreign_key_constraints = None + + CreateTable( + table, + include_foreign_key_constraints=( + include_foreign_key_constraints + ), + )._invoke_with(self.connection) + + if hasattr(table, "indexes"): + for index in table.indexes: + self.traverse_single(index, create_ok=True) + + if ( + self.dialect.supports_comments + and not self.dialect.inline_comments + ): + if table.comment is not None: + SetTableComment(table)._invoke_with(self.connection) + + for column in table.columns: + if column.comment is not None: + SetColumnComment(column)._invoke_with(self.connection) + + if self.dialect.supports_constraint_comments: + for constraint in table.constraints: + if constraint.comment is not None: + self.connection.execute( + SetConstraintComment(constraint) + ) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + + with self.with_ddl_events(constraint): + AddConstraint(constraint)._invoke_with(self.connection) + + def visit_sequence(self, sequence, create_ok=False): + if not create_ok and not self._can_create_sequence(sequence): + return + with self.with_ddl_events(sequence): + CreateSequence(sequence)._invoke_with(self.connection) + + def visit_index(self, index, create_ok=False): + if not create_ok and not self._can_create_index(index): + return + with self.with_ddl_events(index): + CreateIndex(index)._invoke_with(self.connection) + + +class SchemaDropper(InvokeDropDDLBase): + def __init__( + self, dialect, connection, checkfirst=False, tables=None, **kwargs + ): + super().__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + self.memo = {} + + def visit_metadata(self, metadata): + if self.tables is not None: + tables = self.tables + else: + tables = list(metadata.tables.values()) + + try: + unsorted_tables = [t for t in tables if self._can_drop_table(t)] + collection = list( + reversed( + sort_tables_and_constraints( + unsorted_tables, + filter_fn=lambda constraint: ( + False + if not self.dialect.supports_alter + or constraint.name is None + else None + ), + ) + ) + ) + except exc.CircularDependencyError as err2: + if not self.dialect.supports_alter: + util.warn( + "Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s; and backend does " + "not support ALTER. To restore at least a partial sort, " + "apply use_alter=True to ForeignKey and " + "ForeignKeyConstraint " + "objects involved in the cycle to mark these as known " + "cycles that will be ignored." + % (", ".join(sorted([t.fullname for t in err2.cycles]))) + ) + collection = [(t, ()) for t in unsorted_tables] + else: + raise exc.CircularDependencyError( + err2.args[0], + err2.cycles, + err2.edges, + msg="Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s. Please ensure " + "that the ForeignKey and ForeignKeyConstraint objects " + "involved in the cycle have " + "names so that they can be dropped using " + "DROP CONSTRAINT." + % (", ".join(sorted([t.fullname for t in err2.cycles]))), + ) from err2 + + seq_coll = [ + s + for s in metadata._sequences.values() + if self._can_drop_sequence(s) + ] + + event_collection = [t for (t, fks) in collection if t is not None] + + with self.with_ddl_events( + metadata, + tables=event_collection, + checkfirst=self.checkfirst, + ): + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, + drop_ok=True, + _is_metadata_operation=True, + _ignore_sequences=seq_coll, + ) + else: + for fkc in fkcs: + self.traverse_single(fkc) + + for seq in seq_coll: + self.traverse_single(seq, drop_ok=seq.column is None) + + def _can_drop_table(self, table): + self.dialect.validate_identifier(table.name) + effective_schema = self.connection.schema_for_object(table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or self.dialect.has_table( + self.connection, table.name, schema=effective_schema + ) + + def _can_drop_index(self, index): + effective_schema = self.connection.schema_for_object(index.table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or self.dialect.has_index( + self.connection, + index.table.name, + index.name, + schema=effective_schema, + ) + + def _can_drop_sequence(self, sequence): + effective_schema = self.connection.schema_for_object(sequence) + return self.dialect.supports_sequences and ( + (not self.dialect.sequences_optional or not sequence.optional) + and ( + not self.checkfirst + or self.dialect.has_sequence( + self.connection, sequence.name, schema=effective_schema + ) + ) + ) + + def visit_index(self, index, drop_ok=False): + if not drop_ok and not self._can_drop_index(index): + return + + with self.with_ddl_events(index): + DropIndex(index)(index, self.connection) + + def visit_table( + self, + table, + drop_ok=False, + _is_metadata_operation=False, + _ignore_sequences=(), + ): + if not drop_ok and not self._can_drop_table(table): + return + + with self.with_ddl_events( + table, + checkfirst=self.checkfirst, + _is_metadata_operation=_is_metadata_operation, + ): + DropTable(table)._invoke_with(self.connection) + + # traverse client side defaults which may refer to server-side + # sequences. noting that some of these client side defaults may + # also be set up as server side defaults + # (see https://docs.sqlalchemy.org/en/ + # latest/core/defaults.html + # #associating-a-sequence-as-the-server-side- + # default), so have to be dropped after the table is dropped. + for column in table.columns: + if ( + column.default is not None + and column.default not in _ignore_sequences + ): + self.traverse_single(column.default) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + with self.with_ddl_events(constraint): + DropConstraint(constraint)._invoke_with(self.connection) + + def visit_sequence(self, sequence, drop_ok=False): + if not drop_ok and not self._can_drop_sequence(sequence): + return + with self.with_ddl_events(sequence): + DropSequence(sequence)._invoke_with(self.connection) + + +def sort_tables( + tables: Iterable[TableClause], + skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None, + extra_dependencies: Optional[ + typing_Sequence[Tuple[TableClause, TableClause]] + ] = None, +) -> List[Table]: + """Sort a collection of :class:`_schema.Table` objects based on + dependency. + + This is a dependency-ordered sort which will emit :class:`_schema.Table` + objects such that they will follow their dependent :class:`_schema.Table` + objects. + Tables are dependent on another based on the presence of + :class:`_schema.ForeignKeyConstraint` + objects as well as explicit dependencies + added by :meth:`_schema.Table.add_is_dependent_on`. + + .. warning:: + + The :func:`._schema.sort_tables` function cannot by itself + accommodate automatic resolution of dependency cycles between + tables, which are usually caused by mutually dependent foreign key + constraints. When these cycles are detected, the foreign keys + of these tables are omitted from consideration in the sort. + A warning is emitted when this condition occurs, which will be an + exception raise in a future release. Tables which are not part + of the cycle will still be returned in dependency order. + + To resolve these cycles, the + :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be + applied to those constraints which create a cycle. Alternatively, + the :func:`_schema.sort_tables_and_constraints` function will + automatically return foreign key constraints in a separate + collection when cycles are detected so that they may be applied + to a schema separately. + + .. versionchanged:: 1.3.17 - a warning is emitted when + :func:`_schema.sort_tables` cannot perform a proper sort due to + cyclical dependencies. This will be an exception in a future + release. Additionally, the sort will continue to return + other tables not involved in the cycle in dependency order + which was not the case previously. + + :param tables: a sequence of :class:`_schema.Table` objects. + + :param skip_fn: optional callable which will be passed a + :class:`_schema.ForeignKeyConstraint` object; if it returns True, this + constraint will not be considered as a dependency. Note this is + **different** from the same parameter in + :func:`.sort_tables_and_constraints`, which is + instead passed the owning :class:`_schema.ForeignKeyConstraint` object. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables_and_constraints` + + :attr:`_schema.MetaData.sorted_tables` - uses this function to sort + + + """ + + if skip_fn is not None: + fixed_skip_fn = skip_fn + + def _skip_fn(fkc): + for fk in fkc.elements: + if fixed_skip_fn(fk): + return True + else: + return None + + else: + _skip_fn = None # type: ignore + + return [ + t + for (t, fkcs) in sort_tables_and_constraints( + tables, + filter_fn=_skip_fn, + extra_dependencies=extra_dependencies, + _warn_for_cycles=True, + ) + if t is not None + ] + + +def sort_tables_and_constraints( + tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False +): + """Sort a collection of :class:`_schema.Table` / + :class:`_schema.ForeignKeyConstraint` + objects. + + This is a dependency-ordered sort which will emit tuples of + ``(Table, [ForeignKeyConstraint, ...])`` such that each + :class:`_schema.Table` follows its dependent :class:`_schema.Table` + objects. + Remaining :class:`_schema.ForeignKeyConstraint` + objects that are separate due to + dependency rules not satisfied by the sort are emitted afterwards + as ``(None, [ForeignKeyConstraint ...])``. + + Tables are dependent on another based on the presence of + :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies + added by :meth:`_schema.Table.add_is_dependent_on`, + as well as dependencies + stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` + and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` + parameters. + + :param tables: a sequence of :class:`_schema.Table` objects. + + :param filter_fn: optional callable which will be passed a + :class:`_schema.ForeignKeyConstraint` object, + and returns a value based on + whether this constraint should definitely be included or excluded as + an inline constraint, or neither. If it returns False, the constraint + will definitely be included as a dependency that cannot be subject + to ALTER; if True, it will **only** be included as an ALTER result at + the end. Returning None means the constraint is included in the + table-based result unless it is detected as part of a dependency cycle. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables` + + + """ + + fixed_dependencies = set() + mutable_dependencies = set() + + if extra_dependencies is not None: + fixed_dependencies.update(extra_dependencies) + + remaining_fkcs = set() + for table in tables: + for fkc in table.foreign_key_constraints: + if fkc.use_alter is True: + remaining_fkcs.add(fkc) + continue + + if filter_fn: + filtered = filter_fn(fkc) + + if filtered is True: + remaining_fkcs.add(fkc) + continue + + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.add((dependent_on, table)) + + fixed_dependencies.update( + (parent, table) for parent in table._extra_dependencies + ) + + try: + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), + tables, + ) + ) + except exc.CircularDependencyError as err: + if _warn_for_cycles: + util.warn( + "Cannot correctly sort tables; there are unresolvable cycles " + 'between tables "%s", which is usually caused by mutually ' + "dependent foreign key constraints. Foreign key constraints " + "involving these tables will not be considered; this warning " + "may raise an error in a future release." + % (", ".join(sorted(t.fullname for t in err.cycles)),) + ) + for edge in err.edges: + if edge in mutable_dependencies: + table = edge[1] + if table not in err.cycles: + continue + can_remove = [ + fkc + for fkc in table.foreign_key_constraints + if filter_fn is None or filter_fn(fkc) is not False + ] + remaining_fkcs.update(can_remove) + for fkc in can_remove: + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.discard((dependent_on, table)) + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), + tables, + ) + ) + + return [ + (table, table.foreign_key_constraints.difference(remaining_fkcs)) + for table in candidate_sort + ] + [(None, list(remaining_fkcs))] diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py new file mode 100644 index 00000000..76131bca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py @@ -0,0 +1,552 @@ +# sql/default_comparator.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Default implementation of SQL comparison operations. +""" + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import NoReturn +from typing import Optional +from typing import Tuple +from typing import Type +from typing import Union + +from . import coercions +from . import operators +from . import roles +from . import type_api +from .elements import and_ +from .elements import BinaryExpression +from .elements import ClauseElement +from .elements import CollationClause +from .elements import CollectionAggregate +from .elements import ExpressionClauseList +from .elements import False_ +from .elements import Null +from .elements import OperatorExpression +from .elements import or_ +from .elements import True_ +from .elements import UnaryExpression +from .operators import OperatorType +from .. import exc +from .. import util + +_T = typing.TypeVar("_T", bound=Any) + +if typing.TYPE_CHECKING: + from .elements import ColumnElement + from .operators import custom_op + from .type_api import TypeEngine + + +def _boolean_compare( + expr: ColumnElement[Any], + op: OperatorType, + obj: Any, + *, + negate_op: Optional[OperatorType] = None, + reverse: bool = False, + _python_is_types: Tuple[Type[Any], ...] = (type(None), bool), + result_type: Optional[TypeEngine[bool]] = None, + **kwargs: Any, +) -> OperatorExpression[bool]: + if result_type is None: + result_type = type_api.BOOLEANTYPE + + if isinstance(obj, _python_is_types + (Null, True_, False_)): + # allow x ==/!= True/False to be treated as a literal. + # this comes out to "== / != true/false" or "1/0" if those + # constants aren't supported and works on all platforms + if op in (operators.eq, operators.ne) and isinstance( + obj, (bool, True_, False_) + ): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + elif op in ( + operators.is_distinct_from, + operators.is_not_distinct_from, + ): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + elif expr._is_collection_aggregate: + obj = coercions.expect( + roles.ConstExprRole, element=obj, operator=op, expr=expr + ) + else: + # all other None uses IS, IS NOT + if op in (operators.eq, operators.is_): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + operators.is_, + negate=operators.is_not, + type_=result_type, + ) + elif op in (operators.ne, operators.is_not): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + operators.is_not, + negate=operators.is_, + type_=result_type, + ) + else: + raise exc.ArgumentError( + "Only '=', '!=', 'is_()', 'is_not()', " + "'is_distinct_from()', 'is_not_distinct_from()' " + "operators can be used with None/True/False" + ) + else: + obj = coercions.expect( + roles.BinaryElementRole, element=obj, operator=op, expr=expr + ) + + if reverse: + return OperatorExpression._construct_for_op( + obj, + expr, + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + else: + return OperatorExpression._construct_for_op( + expr, + obj, + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + + +def _custom_op_operate( + expr: ColumnElement[Any], + op: custom_op[Any], + obj: Any, + reverse: bool = False, + result_type: Optional[TypeEngine[Any]] = None, + **kw: Any, +) -> ColumnElement[Any]: + if result_type is None: + if op.return_type: + result_type = op.return_type + elif op.is_comparison: + result_type = type_api.BOOLEANTYPE + + return _binary_operate( + expr, op, obj, reverse=reverse, result_type=result_type, **kw + ) + + +def _binary_operate( + expr: ColumnElement[Any], + op: OperatorType, + obj: roles.BinaryElementRole[Any], + *, + reverse: bool = False, + result_type: Optional[TypeEngine[_T]] = None, + **kw: Any, +) -> OperatorExpression[_T]: + coerced_obj = coercions.expect( + roles.BinaryElementRole, obj, expr=expr, operator=op + ) + + if reverse: + left, right = coerced_obj, expr + else: + left, right = expr, coerced_obj + + if result_type is None: + op, result_type = left.comparator._adapt_expression( + op, right.comparator + ) + + return OperatorExpression._construct_for_op( + left, right, op, type_=result_type, modifiers=kw + ) + + +def _conjunction_operate( + expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any +) -> ColumnElement[Any]: + if op is operators.and_: + return and_(expr, other) + elif op is operators.or_: + return or_(expr, other) + else: + raise NotImplementedError() + + +def _scalar( + expr: ColumnElement[Any], + op: OperatorType, + fn: Callable[[ColumnElement[Any]], ColumnElement[Any]], + **kw: Any, +) -> ColumnElement[Any]: + return fn(expr) + + +def _in_impl( + expr: ColumnElement[Any], + op: OperatorType, + seq_or_selectable: ClauseElement, + negate_op: OperatorType, + **kw: Any, +) -> ColumnElement[Any]: + seq_or_selectable = coercions.expect( + roles.InElementRole, seq_or_selectable, expr=expr, operator=op + ) + if "in_ops" in seq_or_selectable._annotations: + op, negate_op = seq_or_selectable._annotations["in_ops"] + + return _boolean_compare( + expr, op, seq_or_selectable, negate_op=negate_op, **kw + ) + + +def _getitem_impl( + expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any +) -> ColumnElement[Any]: + if ( + isinstance(expr.type, type_api.INDEXABLE) + or isinstance(expr.type, type_api.TypeDecorator) + and isinstance(expr.type.impl_instance, type_api.INDEXABLE) + ): + other = coercions.expect( + roles.BinaryElementRole, other, expr=expr, operator=op + ) + return _binary_operate(expr, op, other, **kw) + else: + _unsupported_impl(expr, op, other, **kw) + + +def _unsupported_impl( + expr: ColumnElement[Any], op: OperatorType, *arg: Any, **kw: Any +) -> NoReturn: + raise NotImplementedError( + "Operator '%s' is not supported on this expression" % op.__name__ + ) + + +def _inv_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.__inv__`.""" + + # undocumented element currently used by the ORM for + # relationship.contains() + if hasattr(expr, "negation_clause"): + return expr.negation_clause + else: + return expr._negate() + + +def _neg_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.__neg__`.""" + return UnaryExpression(expr, operator=operators.neg, type_=expr.type) + + +def _bitwise_not_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.bitwise_not`.""" + + return UnaryExpression( + expr, operator=operators.bitwise_not_op, type_=expr.type + ) + + +def _match_impl( + expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.match`.""" + + return _boolean_compare( + expr, + operators.match_op, + coercions.expect( + roles.BinaryElementRole, + other, + expr=expr, + operator=operators.match_op, + ), + result_type=type_api.MATCHTYPE, + negate_op=( + operators.not_match_op + if op is operators.match_op + else operators.match_op + ), + **kw, + ) + + +def _distinct_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.distinct`.""" + return UnaryExpression( + expr, operator=operators.distinct_op, type_=expr.type + ) + + +def _between_impl( + expr: ColumnElement[Any], + op: OperatorType, + cleft: Any, + cright: Any, + **kw: Any, +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.between`.""" + return BinaryExpression( + expr, + ExpressionClauseList._construct_for_list( + operators.and_, + type_api.NULLTYPE, + coercions.expect( + roles.BinaryElementRole, + cleft, + expr=expr, + operator=operators.and_, + ), + coercions.expect( + roles.BinaryElementRole, + cright, + expr=expr, + operator=operators.and_, + ), + group=False, + ), + op, + negate=( + operators.not_between_op + if op is operators.between_op + else operators.between_op + ), + modifiers=kw, + ) + + +def _collate_impl( + expr: ColumnElement[str], op: OperatorType, collation: str, **kw: Any +) -> ColumnElement[str]: + return CollationClause._create_collation_expression(expr, collation) + + +def _regexp_match_impl( + expr: ColumnElement[str], + op: OperatorType, + pattern: Any, + flags: Optional[str], + **kw: Any, +) -> ColumnElement[Any]: + return BinaryExpression( + expr, + coercions.expect( + roles.BinaryElementRole, + pattern, + expr=expr, + operator=operators.comma_op, + ), + op, + negate=operators.not_regexp_match_op, + modifiers={"flags": flags}, + ) + + +def _regexp_replace_impl( + expr: ColumnElement[Any], + op: OperatorType, + pattern: Any, + replacement: Any, + flags: Optional[str], + **kw: Any, +) -> ColumnElement[Any]: + return BinaryExpression( + expr, + ExpressionClauseList._construct_for_list( + operators.comma_op, + type_api.NULLTYPE, + coercions.expect( + roles.BinaryElementRole, + pattern, + expr=expr, + operator=operators.comma_op, + ), + coercions.expect( + roles.BinaryElementRole, + replacement, + expr=expr, + operator=operators.comma_op, + ), + group=False, + ), + op, + modifiers={"flags": flags}, + ) + + +# a mapping of operators with the method they use, along with +# additional keyword arguments to be passed +operator_lookup: Dict[ + str, + Tuple[ + Callable[..., ColumnElement[Any]], + util.immutabledict[ + str, Union[OperatorType, Callable[..., ColumnElement[Any]]] + ], + ], +] = { + "and_": (_conjunction_operate, util.EMPTY_DICT), + "or_": (_conjunction_operate, util.EMPTY_DICT), + "inv": (_inv_impl, util.EMPTY_DICT), + "add": (_binary_operate, util.EMPTY_DICT), + "mul": (_binary_operate, util.EMPTY_DICT), + "sub": (_binary_operate, util.EMPTY_DICT), + "div": (_binary_operate, util.EMPTY_DICT), + "mod": (_binary_operate, util.EMPTY_DICT), + "bitwise_xor_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_or_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_and_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_not_op": (_bitwise_not_impl, util.EMPTY_DICT), + "bitwise_lshift_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_rshift_op": (_binary_operate, util.EMPTY_DICT), + "truediv": (_binary_operate, util.EMPTY_DICT), + "floordiv": (_binary_operate, util.EMPTY_DICT), + "custom_op": (_custom_op_operate, util.EMPTY_DICT), + "json_path_getitem_op": (_binary_operate, util.EMPTY_DICT), + "json_getitem_op": (_binary_operate, util.EMPTY_DICT), + "concat_op": (_binary_operate, util.EMPTY_DICT), + "any_op": ( + _scalar, + util.immutabledict({"fn": CollectionAggregate._create_any}), + ), + "all_op": ( + _scalar, + util.immutabledict({"fn": CollectionAggregate._create_all}), + ), + "lt": (_boolean_compare, util.immutabledict({"negate_op": operators.ge})), + "le": (_boolean_compare, util.immutabledict({"negate_op": operators.gt})), + "ne": (_boolean_compare, util.immutabledict({"negate_op": operators.eq})), + "gt": (_boolean_compare, util.immutabledict({"negate_op": operators.le})), + "ge": (_boolean_compare, util.immutabledict({"negate_op": operators.lt})), + "eq": (_boolean_compare, util.immutabledict({"negate_op": operators.ne})), + "is_distinct_from": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_not_distinct_from}), + ), + "is_not_distinct_from": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_distinct_from}), + ), + "like_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_like_op}), + ), + "ilike_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_ilike_op}), + ), + "not_like_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.like_op}), + ), + "not_ilike_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.ilike_op}), + ), + "contains_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_contains_op}), + ), + "icontains_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_icontains_op}), + ), + "startswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_startswith_op}), + ), + "istartswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_istartswith_op}), + ), + "endswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_endswith_op}), + ), + "iendswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_iendswith_op}), + ), + "desc_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_desc}), + ), + "asc_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_asc}), + ), + "nulls_first_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_nulls_first}), + ), + "nulls_last_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_nulls_last}), + ), + "in_op": ( + _in_impl, + util.immutabledict({"negate_op": operators.not_in_op}), + ), + "not_in_op": ( + _in_impl, + util.immutabledict({"negate_op": operators.in_op}), + ), + "is_": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_}), + ), + "is_not": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_not}), + ), + "collate": (_collate_impl, util.EMPTY_DICT), + "match_op": (_match_impl, util.EMPTY_DICT), + "not_match_op": (_match_impl, util.EMPTY_DICT), + "distinct_op": (_distinct_impl, util.EMPTY_DICT), + "between_op": (_between_impl, util.EMPTY_DICT), + "not_between_op": (_between_impl, util.EMPTY_DICT), + "neg": (_neg_impl, util.EMPTY_DICT), + "getitem": (_getitem_impl, util.EMPTY_DICT), + "lshift": (_unsupported_impl, util.EMPTY_DICT), + "rshift": (_unsupported_impl, util.EMPTY_DICT), + "contains": (_unsupported_impl, util.EMPTY_DICT), + "regexp_match_op": (_regexp_match_impl, util.EMPTY_DICT), + "not_regexp_match_op": (_regexp_match_impl, util.EMPTY_DICT), + "regexp_replace_op": (_regexp_replace_impl, util.EMPTY_DICT), +} diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py new file mode 100644 index 00000000..779be1da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py @@ -0,0 +1,1817 @@ +# sql/dml.py +# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +""" +Provide :class:`_expression.Insert`, :class:`_expression.Update` and +:class:`_expression.Delete`. + +""" +from __future__ import annotations + +import collections.abc as collections_abc +import operator +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import roles +from . import util as sql_util +from ._typing import _TP +from ._typing import _unexpected_kw +from ._typing import is_column_element +from ._typing import is_named_from_clause +from .base import _entity_namespace_key +from .base import _exclusive_against +from .base import _from_objects +from .base import _generative +from .base import _select_iterables +from .base import ColumnCollection +from .base import CompileState +from .base import DialectKWArgs +from .base import Executable +from .base import Generative +from .base import HasCompileState +from .elements import BooleanClauseList +from .elements import ClauseElement +from .elements import ColumnClause +from .elements import ColumnElement +from .elements import Null +from .selectable import Alias +from .selectable import ExecutableReturnsRows +from .selectable import FromClause +from .selectable import HasCTE +from .selectable import HasPrefixes +from .selectable import Join +from .selectable import SelectLabelStyle +from .selectable import TableClause +from .selectable import TypedReturnsRows +from .sqltypes import NullType +from .visitors import InternalTraversal +from .. import exc +from .. import util +from ..util.typing import Self +from ..util.typing import TypeGuard + +if TYPE_CHECKING: + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnsClauseArgument + from ._typing import _DMLColumnArgument + from ._typing import _DMLColumnKeyMapping + from ._typing import _DMLTableArgument + from ._typing import _T0 # noqa + from ._typing import _T1 # noqa + from ._typing import _T2 # noqa + from ._typing import _T3 # noqa + from ._typing import _T4 # noqa + from ._typing import _T5 # noqa + from ._typing import _T6 # noqa + from ._typing import _T7 # noqa + from ._typing import _TypedColumnClauseArgument as _TCCA # noqa + from .base import ReadOnlyColumnCollection + from .compiler import SQLCompiler + from .elements import KeyedColumnElement + from .selectable import _ColumnsClauseElement + from .selectable import _SelectIterable + from .selectable import Select + from .selectable import Selectable + + def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]: ... + + def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]: ... + + def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]: ... + +else: + isupdate = operator.attrgetter("isupdate") + isdelete = operator.attrgetter("isdelete") + isinsert = operator.attrgetter("isinsert") + + +_T = TypeVar("_T", bound=Any) + +_DMLColumnElement = Union[str, ColumnClause[Any]] +_DMLTableElement = Union[TableClause, Alias, Join] + + +class DMLState(CompileState): + _no_parameters = True + _dict_parameters: Optional[MutableMapping[_DMLColumnElement, Any]] = None + _multi_parameters: Optional[ + List[MutableMapping[_DMLColumnElement, Any]] + ] = None + _ordered_values: Optional[List[Tuple[_DMLColumnElement, Any]]] = None + _parameter_ordering: Optional[List[_DMLColumnElement]] = None + _primary_table: FromClause + _supports_implicit_returning = True + + isupdate = False + isdelete = False + isinsert = False + + statement: UpdateBase + + def __init__( + self, statement: UpdateBase, compiler: SQLCompiler, **kw: Any + ): + raise NotImplementedError() + + @classmethod + def get_entity_description(cls, statement: UpdateBase) -> Dict[str, Any]: + return { + "name": ( + statement.table.name + if is_named_from_clause(statement.table) + else None + ), + "table": statement.table, + } + + @classmethod + def get_returning_column_descriptions( + cls, statement: UpdateBase + ) -> List[Dict[str, Any]]: + return [ + { + "name": c.key, + "type": c.type, + "expr": c, + } + for c in statement._all_selected_columns + ] + + @property + def dml_table(self) -> _DMLTableElement: + return self.statement.table + + if TYPE_CHECKING: + + @classmethod + def get_plugin_class(cls, statement: Executable) -> Type[DMLState]: ... + + @classmethod + def _get_multi_crud_kv_pairs( + cls, + statement: UpdateBase, + multi_kv_iterator: Iterable[Dict[_DMLColumnArgument, Any]], + ) -> List[Dict[_DMLColumnElement, Any]]: + return [ + { + coercions.expect(roles.DMLColumnRole, k): v + for k, v in mapping.items() + } + for mapping in multi_kv_iterator + ] + + @classmethod + def _get_crud_kv_pairs( + cls, + statement: UpdateBase, + kv_iterator: Iterable[Tuple[_DMLColumnArgument, Any]], + needs_to_be_cacheable: bool, + ) -> List[Tuple[_DMLColumnElement, Any]]: + return [ + ( + coercions.expect(roles.DMLColumnRole, k), + ( + v + if not needs_to_be_cacheable + else coercions.expect( + roles.ExpressionElementRole, + v, + type_=NullType(), + is_crud=True, + ) + ), + ) + for k, v in kv_iterator + ] + + def _make_extra_froms( + self, statement: DMLWhereBase + ) -> Tuple[FromClause, List[FromClause]]: + froms: List[FromClause] = [] + + all_tables = list(sql_util.tables_from_leftmost(statement.table)) + primary_table = all_tables[0] + seen = {primary_table} + + consider = statement._where_criteria + if self._dict_parameters: + consider += tuple(self._dict_parameters.values()) + + for crit in consider: + for item in _from_objects(crit): + if not seen.intersection(item._cloned_set): + froms.append(item) + seen.update(item._cloned_set) + + froms.extend(all_tables[1:]) + return primary_table, froms + + def _process_values(self, statement: ValuesBase) -> None: + if self._no_parameters: + self._dict_parameters = statement._values + self._no_parameters = False + + def _process_select_values(self, statement: ValuesBase) -> None: + assert statement._select_names is not None + parameters: MutableMapping[_DMLColumnElement, Any] = { + name: Null() for name in statement._select_names + } + + if self._no_parameters: + self._no_parameters = False + self._dict_parameters = parameters + else: + # this condition normally not reachable as the Insert + # does not allow this construction to occur + assert False, "This statement already has parameters" + + def _no_multi_values_supported(self, statement: ValuesBase) -> NoReturn: + raise exc.InvalidRequestError( + "%s construct does not support " + "multiple parameter sets." % statement.__visit_name__.upper() + ) + + def _cant_mix_formats_error(self) -> NoReturn: + raise exc.InvalidRequestError( + "Can't mix single and multiple VALUES " + "formats in one INSERT statement; one style appends to a " + "list while the other replaces values, so the intent is " + "ambiguous." + ) + + +@CompileState.plugin_for("default", "insert") +class InsertDMLState(DMLState): + isinsert = True + + include_table_with_column_exprs = False + + _has_multi_parameters = False + + def __init__( + self, + statement: Insert, + compiler: SQLCompiler, + disable_implicit_returning: bool = False, + **kw: Any, + ): + self.statement = statement + self._primary_table = statement.table + + if disable_implicit_returning: + self._supports_implicit_returning = False + + self.isinsert = True + if statement._select_names: + self._process_select_values(statement) + if statement._values is not None: + self._process_values(statement) + if statement._multi_values: + self._process_multi_values(statement) + + @util.memoized_property + def _insert_col_keys(self) -> List[str]: + # this is also done in crud.py -> _key_getters_for_crud_column + return [ + coercions.expect(roles.DMLColumnRole, col, as_key=True) + for col in self._dict_parameters or () + ] + + def _process_values(self, statement: ValuesBase) -> None: + if self._no_parameters: + self._has_multi_parameters = False + self._dict_parameters = statement._values + self._no_parameters = False + elif self._has_multi_parameters: + self._cant_mix_formats_error() + + def _process_multi_values(self, statement: ValuesBase) -> None: + for parameters in statement._multi_values: + multi_parameters: List[MutableMapping[_DMLColumnElement, Any]] = [ + ( + { + c.key: value + for c, value in zip(statement.table.c, parameter_set) + } + if isinstance(parameter_set, collections_abc.Sequence) + else parameter_set + ) + for parameter_set in parameters + ] + + if self._no_parameters: + self._no_parameters = False + self._has_multi_parameters = True + self._multi_parameters = multi_parameters + self._dict_parameters = self._multi_parameters[0] + elif not self._has_multi_parameters: + self._cant_mix_formats_error() + else: + assert self._multi_parameters + self._multi_parameters.extend(multi_parameters) + + +@CompileState.plugin_for("default", "update") +class UpdateDMLState(DMLState): + isupdate = True + + include_table_with_column_exprs = False + + def __init__(self, statement: Update, compiler: SQLCompiler, **kw: Any): + self.statement = statement + + self.isupdate = True + if statement._ordered_values is not None: + self._process_ordered_values(statement) + elif statement._values is not None: + self._process_values(statement) + elif statement._multi_values: + self._no_multi_values_supported(statement) + t, ef = self._make_extra_froms(statement) + self._primary_table = t + self._extra_froms = ef + + self.is_multitable = mt = ef + self.include_table_with_column_exprs = bool( + mt and compiler.render_table_with_column_in_update_from + ) + + def _process_ordered_values(self, statement: ValuesBase) -> None: + parameters = statement._ordered_values + + if self._no_parameters: + self._no_parameters = False + assert parameters is not None + self._dict_parameters = dict(parameters) + self._ordered_values = parameters + self._parameter_ordering = [key for key, value in parameters] + else: + raise exc.InvalidRequestError( + "Can only invoke ordered_values() once, and not mixed " + "with any other values() call" + ) + + +@CompileState.plugin_for("default", "delete") +class DeleteDMLState(DMLState): + isdelete = True + + def __init__(self, statement: Delete, compiler: SQLCompiler, **kw: Any): + self.statement = statement + + self.isdelete = True + t, ef = self._make_extra_froms(statement) + self._primary_table = t + self._extra_froms = ef + self.is_multitable = ef + + +class UpdateBase( + roles.DMLRole, + HasCTE, + HasCompileState, + DialectKWArgs, + HasPrefixes, + Generative, + ExecutableReturnsRows, + ClauseElement, +): + """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.""" + + __visit_name__ = "update_base" + + _hints: util.immutabledict[Tuple[_DMLTableElement, str], str] = ( + util.EMPTY_DICT + ) + named_with_column = False + + _label_style: SelectLabelStyle = ( + SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY + ) + table: _DMLTableElement + + _return_defaults = False + _return_defaults_columns: Optional[Tuple[_ColumnsClauseElement, ...]] = ( + None + ) + _supplemental_returning: Optional[Tuple[_ColumnsClauseElement, ...]] = None + _returning: Tuple[_ColumnsClauseElement, ...] = () + + is_dml = True + + def _generate_fromclause_column_proxies( + self, fromclause: FromClause + ) -> None: + fromclause._columns._populate_separate_keys( + col._make_proxy(fromclause) + for col in self._all_selected_columns + if is_column_element(col) + ) + + def params(self, *arg: Any, **kw: Any) -> NoReturn: + """Set the parameters for the statement. + + This method raises ``NotImplementedError`` on the base class, + and is overridden by :class:`.ValuesBase` to provide the + SET/VALUES clause of UPDATE and INSERT. + + """ + raise NotImplementedError( + "params() is not supported for INSERT/UPDATE/DELETE statements." + " To set the values for an INSERT or UPDATE statement, use" + " stmt.values(**parameters)." + ) + + @_generative + def with_dialect_options(self, **opt: Any) -> Self: + """Add dialect options to this INSERT/UPDATE/DELETE object. + + e.g.:: + + upd = table.update().dialect_options(mysql_limit=10) + + .. versionadded: 1.4 - this method supersedes the dialect options + associated with the constructor. + + + """ + self._validate_dialect_kwargs(opt) + return self + + @_generative + def return_defaults( + self, + *cols: _DMLColumnArgument, + supplemental_cols: Optional[Iterable[_DMLColumnArgument]] = None, + sort_by_parameter_order: bool = False, + ) -> Self: + """Make use of a :term:`RETURNING` clause for the purpose + of fetching server-side expressions and defaults, for supporting + backends only. + + .. deepalchemy:: + + The :meth:`.UpdateBase.return_defaults` method is used by the ORM + for its internal work in fetching newly generated primary key + and server default values, in particular to provide the underyling + implementation of the :paramref:`_orm.Mapper.eager_defaults` + ORM feature as well as to allow RETURNING support with bulk + ORM inserts. Its behavior is fairly idiosyncratic + and is not really intended for general use. End users should + stick with using :meth:`.UpdateBase.returning` in order to + add RETURNING clauses to their INSERT, UPDATE and DELETE + statements. + + Normally, a single row INSERT statement will automatically populate the + :attr:`.CursorResult.inserted_primary_key` attribute when executed, + which stores the primary key of the row that was just inserted in the + form of a :class:`.Row` object with column names as named tuple keys + (and the :attr:`.Row._mapping` view fully populated as well). The + dialect in use chooses the strategy to use in order to populate this + data; if it was generated using server-side defaults and / or SQL + expressions, dialect-specific approaches such as ``cursor.lastrowid`` + or ``RETURNING`` are typically used to acquire the new primary key + value. + + However, when the statement is modified by calling + :meth:`.UpdateBase.return_defaults` before executing the statement, + additional behaviors take place **only** for backends that support + RETURNING and for :class:`.Table` objects that maintain the + :paramref:`.Table.implicit_returning` parameter at its default value of + ``True``. In these cases, when the :class:`.CursorResult` is returned + from the statement's execution, not only will + :attr:`.CursorResult.inserted_primary_key` be populated as always, the + :attr:`.CursorResult.returned_defaults` attribute will also be + populated with a :class:`.Row` named-tuple representing the full range + of server generated + values from that single row, including values for any columns that + specify :paramref:`_schema.Column.server_default` or which make use of + :paramref:`_schema.Column.default` using a SQL expression. + + When invoking INSERT statements with multiple rows using + :ref:`insertmanyvalues `, the + :meth:`.UpdateBase.return_defaults` modifier will have the effect of + the :attr:`_engine.CursorResult.inserted_primary_key_rows` and + :attr:`_engine.CursorResult.returned_defaults_rows` attributes being + fully populated with lists of :class:`.Row` objects representing newly + inserted primary key values as well as newly inserted server generated + values for each row inserted. The + :attr:`.CursorResult.inserted_primary_key` and + :attr:`.CursorResult.returned_defaults` attributes will also continue + to be populated with the first row of these two collections. + + If the backend does not support RETURNING or the :class:`.Table` in use + has disabled :paramref:`.Table.implicit_returning`, then no RETURNING + clause is added and no additional data is fetched, however the + INSERT, UPDATE or DELETE statement proceeds normally. + + E.g.:: + + stmt = table.insert().values(data='newdata').return_defaults() + + result = connection.execute(stmt) + + server_created_at = result.returned_defaults['created_at'] + + When used against an UPDATE statement + :meth:`.UpdateBase.return_defaults` instead looks for columns that + include :paramref:`_schema.Column.onupdate` or + :paramref:`_schema.Column.server_onupdate` parameters assigned, when + constructing the columns that will be included in the RETURNING clause + by default if explicit columns were not specified. When used against a + DELETE statement, no columns are included in RETURNING by default, they + instead must be specified explicitly as there are no columns that + normally change values when a DELETE statement proceeds. + + .. versionadded:: 2.0 :meth:`.UpdateBase.return_defaults` is supported + for DELETE statements also and has been moved from + :class:`.ValuesBase` to :class:`.UpdateBase`. + + The :meth:`.UpdateBase.return_defaults` method is mutually exclusive + against the :meth:`.UpdateBase.returning` method and errors will be + raised during the SQL compilation process if both are used at the same + time on one statement. The RETURNING clause of the INSERT, UPDATE or + DELETE statement is therefore controlled by only one of these methods + at a time. + + The :meth:`.UpdateBase.return_defaults` method differs from + :meth:`.UpdateBase.returning` in these ways: + + 1. :meth:`.UpdateBase.return_defaults` method causes the + :attr:`.CursorResult.returned_defaults` collection to be populated + with the first row from the RETURNING result. This attribute is not + populated when using :meth:`.UpdateBase.returning`. + + 2. :meth:`.UpdateBase.return_defaults` is compatible with existing + logic used to fetch auto-generated primary key values that are then + populated into the :attr:`.CursorResult.inserted_primary_key` + attribute. By contrast, using :meth:`.UpdateBase.returning` will + have the effect of the :attr:`.CursorResult.inserted_primary_key` + attribute being left unpopulated. + + 3. :meth:`.UpdateBase.return_defaults` can be called against any + backend. Backends that don't support RETURNING will skip the usage + of the feature, rather than raising an exception, *unless* + ``supplemental_cols`` is passed. The return value + of :attr:`_engine.CursorResult.returned_defaults` will be ``None`` + for backends that don't support RETURNING or for which the target + :class:`.Table` sets :paramref:`.Table.implicit_returning` to + ``False``. + + 4. An INSERT statement invoked with executemany() is supported if the + backend database driver supports the + :ref:`insertmanyvalues ` + feature which is now supported by most SQLAlchemy-included backends. + When executemany is used, the + :attr:`_engine.CursorResult.returned_defaults_rows` and + :attr:`_engine.CursorResult.inserted_primary_key_rows` accessors + will return the inserted defaults and primary keys. + + .. versionadded:: 1.4 Added + :attr:`_engine.CursorResult.returned_defaults_rows` and + :attr:`_engine.CursorResult.inserted_primary_key_rows` accessors. + In version 2.0, the underlying implementation which fetches and + populates the data for these attributes was generalized to be + supported by most backends, whereas in 1.4 they were only + supported by the ``psycopg2`` driver. + + + :param cols: optional list of column key names or + :class:`_schema.Column` that acts as a filter for those columns that + will be fetched. + :param supplemental_cols: optional list of RETURNING expressions, + in the same form as one would pass to the + :meth:`.UpdateBase.returning` method. When present, the additional + columns will be included in the RETURNING clause, and the + :class:`.CursorResult` object will be "rewound" when returned, so + that methods like :meth:`.CursorResult.all` will return new rows + mostly as though the statement used :meth:`.UpdateBase.returning` + directly. However, unlike when using :meth:`.UpdateBase.returning` + directly, the **order of the columns is undefined**, so can only be + targeted using names or :attr:`.Row._mapping` keys; they cannot + reliably be targeted positionally. + + .. versionadded:: 2.0 + + :param sort_by_parameter_order: for a batch INSERT that is being + executed against multiple parameter sets, organize the results of + RETURNING so that the returned rows correspond to the order of + parameter sets passed in. This applies only to an :term:`executemany` + execution for supporting dialects and typically makes use of the + :term:`insertmanyvalues` feature. + + .. versionadded:: 2.0.10 + + .. seealso:: + + :ref:`engine_insertmanyvalues_returning_order` - background on + sorting of RETURNING rows for bulk INSERT + + .. seealso:: + + :meth:`.UpdateBase.returning` + + :attr:`_engine.CursorResult.returned_defaults` + + :attr:`_engine.CursorResult.returned_defaults_rows` + + :attr:`_engine.CursorResult.inserted_primary_key` + + :attr:`_engine.CursorResult.inserted_primary_key_rows` + + """ + + if self._return_defaults: + # note _return_defaults_columns = () means return all columns, + # so if we have been here before, only update collection if there + # are columns in the collection + if self._return_defaults_columns and cols: + self._return_defaults_columns = tuple( + util.OrderedSet(self._return_defaults_columns).union( + coercions.expect(roles.ColumnsClauseRole, c) + for c in cols + ) + ) + else: + # set for all columns + self._return_defaults_columns = () + else: + self._return_defaults_columns = tuple( + coercions.expect(roles.ColumnsClauseRole, c) for c in cols + ) + self._return_defaults = True + if sort_by_parameter_order: + if not self.is_insert: + raise exc.ArgumentError( + "The 'sort_by_parameter_order' argument to " + "return_defaults() only applies to INSERT statements" + ) + self._sort_by_parameter_order = True + if supplemental_cols: + # uniquifying while also maintaining order (the maintain of order + # is for test suites but also for vertical splicing + supplemental_col_tup = ( + coercions.expect(roles.ColumnsClauseRole, c) + for c in supplemental_cols + ) + + if self._supplemental_returning is None: + self._supplemental_returning = tuple( + util.unique_list(supplemental_col_tup) + ) + else: + self._supplemental_returning = tuple( + util.unique_list( + self._supplemental_returning + + tuple(supplemental_col_tup) + ) + ) + + return self + + @_generative + def returning( + self, + *cols: _ColumnsClauseArgument[Any], + sort_by_parameter_order: bool = False, + **__kw: Any, + ) -> UpdateBase: + r"""Add a :term:`RETURNING` or equivalent clause to this statement. + + e.g.: + + .. sourcecode:: pycon+sql + + >>> stmt = ( + ... table.update() + ... .where(table.c.data == "value") + ... .values(status="X") + ... .returning(table.c.server_flag, table.c.updated_timestamp) + ... ) + >>> print(stmt) + {printsql}UPDATE some_table SET status=:status + WHERE some_table.data = :data_1 + RETURNING some_table.server_flag, some_table.updated_timestamp + + The method may be invoked multiple times to add new entries to the + list of expressions to be returned. + + .. versionadded:: 1.4.0b2 The method may be invoked multiple times to + add new entries to the list of expressions to be returned. + + The given collection of column expressions should be derived from the + table that is the target of the INSERT, UPDATE, or DELETE. While + :class:`_schema.Column` objects are typical, the elements can also be + expressions: + + .. sourcecode:: pycon+sql + + >>> stmt = table.insert().returning( + ... (table.c.first_name + " " + table.c.last_name).label("fullname") + ... ) + >>> print(stmt) + {printsql}INSERT INTO some_table (first_name, last_name) + VALUES (:first_name, :last_name) + RETURNING some_table.first_name || :first_name_1 || some_table.last_name AS fullname + + Upon compilation, a RETURNING clause, or database equivalent, + will be rendered within the statement. For INSERT and UPDATE, + the values are the newly inserted/updated values. For DELETE, + the values are those of the rows which were deleted. + + Upon execution, the values of the columns to be returned are made + available via the result set and can be iterated using + :meth:`_engine.CursorResult.fetchone` and similar. + For DBAPIs which do not + natively support returning values (i.e. cx_oracle), SQLAlchemy will + approximate this behavior at the result level so that a reasonable + amount of behavioral neutrality is provided. + + Note that not all databases/DBAPIs + support RETURNING. For those backends with no support, + an exception is raised upon compilation and/or execution. + For those who do support it, the functionality across backends + varies greatly, including restrictions on executemany() + and other statements which return multiple rows. Please + read the documentation notes for the database in use in + order to determine the availability of RETURNING. + + :param \*cols: series of columns, SQL expressions, or whole tables + entities to be returned. + :param sort_by_parameter_order: for a batch INSERT that is being + executed against multiple parameter sets, organize the results of + RETURNING so that the returned rows correspond to the order of + parameter sets passed in. This applies only to an :term:`executemany` + execution for supporting dialects and typically makes use of the + :term:`insertmanyvalues` feature. + + .. versionadded:: 2.0.10 + + .. seealso:: + + :ref:`engine_insertmanyvalues_returning_order` - background on + sorting of RETURNING rows for bulk INSERT (Core level discussion) + + :ref:`orm_queryguide_bulk_insert_returning_ordered` - example of + use with :ref:`orm_queryguide_bulk_insert` (ORM level discussion) + + .. seealso:: + + :meth:`.UpdateBase.return_defaults` - an alternative method tailored + towards efficient fetching of server-side defaults and triggers + for single-row INSERTs or UPDATEs. + + :ref:`tutorial_insert_returning` - in the :ref:`unified_tutorial` + + """ # noqa: E501 + if __kw: + raise _unexpected_kw("UpdateBase.returning()", __kw) + if self._return_defaults: + raise exc.InvalidRequestError( + "return_defaults() is already configured on this statement" + ) + self._returning += tuple( + coercions.expect(roles.ColumnsClauseRole, c) for c in cols + ) + if sort_by_parameter_order: + if not self.is_insert: + raise exc.ArgumentError( + "The 'sort_by_parameter_order' argument to returning() " + "only applies to INSERT statements" + ) + self._sort_by_parameter_order = True + return self + + def corresponding_column( + self, column: KeyedColumnElement[Any], require_embedded: bool = False + ) -> Optional[ColumnElement[Any]]: + return self.exported_columns.corresponding_column( + column, require_embedded=require_embedded + ) + + @util.ro_memoized_property + def _all_selected_columns(self) -> _SelectIterable: + return [c for c in _select_iterables(self._returning)] + + @util.ro_memoized_property + def exported_columns( + self, + ) -> ReadOnlyColumnCollection[Optional[str], ColumnElement[Any]]: + """Return the RETURNING columns as a column collection for this + statement. + + .. versionadded:: 1.4 + + """ + return ColumnCollection( + (c.key, c) + for c in self._all_selected_columns + if is_column_element(c) + ).as_readonly() + + @_generative + def with_hint( + self, + text: str, + selectable: Optional[_DMLTableArgument] = None, + dialect_name: str = "*", + ) -> Self: + """Add a table hint for a single table to this + INSERT/UPDATE/DELETE statement. + + .. note:: + + :meth:`.UpdateBase.with_hint` currently applies only to + Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use + :meth:`.UpdateBase.prefix_with`. + + The text of the hint is rendered in the appropriate + location for the database backend in use, relative + to the :class:`_schema.Table` that is the subject of this + statement, or optionally to that of the given + :class:`_schema.Table` passed as the ``selectable`` argument. + + The ``dialect_name`` option will limit the rendering of a particular + hint to a particular backend. Such as, to add a hint + that only takes effect for SQL Server:: + + mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql") + + :param text: Text of the hint. + :param selectable: optional :class:`_schema.Table` that specifies + an element of the FROM clause within an UPDATE or DELETE + to be the subject of the hint - applies only to certain backends. + :param dialect_name: defaults to ``*``, if specified as the name + of a particular dialect, will apply these hints only when + that dialect is in use. + """ + if selectable is None: + selectable = self.table + else: + selectable = coercions.expect(roles.DMLTableRole, selectable) + self._hints = self._hints.union({(selectable, dialect_name): text}) + return self + + @property + def entity_description(self) -> Dict[str, Any]: + """Return a :term:`plugin-enabled` description of the table and/or + entity which this DML construct is operating against. + + This attribute is generally useful when using the ORM, as an + extended structure which includes information about mapped + entities is returned. The section :ref:`queryguide_inspection` + contains more background. + + For a Core statement, the structure returned by this accessor + is derived from the :attr:`.UpdateBase.table` attribute, and + refers to the :class:`.Table` being inserted, updated, or deleted:: + + >>> stmt = insert(user_table) + >>> stmt.entity_description + { + "name": "user_table", + "table": Table("user_table", ...) + } + + .. versionadded:: 1.4.33 + + .. seealso:: + + :attr:`.UpdateBase.returning_column_descriptions` + + :attr:`.Select.column_descriptions` - entity information for + a :func:`.select` construct + + :ref:`queryguide_inspection` - ORM background + + """ + meth = DMLState.get_plugin_class(self).get_entity_description + return meth(self) + + @property + def returning_column_descriptions(self) -> List[Dict[str, Any]]: + """Return a :term:`plugin-enabled` description of the columns + which this DML construct is RETURNING against, in other words + the expressions established as part of :meth:`.UpdateBase.returning`. + + This attribute is generally useful when using the ORM, as an + extended structure which includes information about mapped + entities is returned. The section :ref:`queryguide_inspection` + contains more background. + + For a Core statement, the structure returned by this accessor is + derived from the same objects that are returned by the + :attr:`.UpdateBase.exported_columns` accessor:: + + >>> stmt = insert(user_table).returning(user_table.c.id, user_table.c.name) + >>> stmt.entity_description + [ + { + "name": "id", + "type": Integer, + "expr": Column("id", Integer(), table=, ...) + }, + { + "name": "name", + "type": String(), + "expr": Column("name", String(), table=, ...) + }, + ] + + .. versionadded:: 1.4.33 + + .. seealso:: + + :attr:`.UpdateBase.entity_description` + + :attr:`.Select.column_descriptions` - entity information for + a :func:`.select` construct + + :ref:`queryguide_inspection` - ORM background + + """ # noqa: E501 + meth = DMLState.get_plugin_class( + self + ).get_returning_column_descriptions + return meth(self) + + +class ValuesBase(UpdateBase): + """Supplies support for :meth:`.ValuesBase.values` to + INSERT and UPDATE constructs.""" + + __visit_name__ = "values_base" + + _supports_multi_parameters = False + + select: Optional[Select[Any]] = None + """SELECT statement for INSERT .. FROM SELECT""" + + _post_values_clause: Optional[ClauseElement] = None + """used by extensions to Insert etc. to add additional syntacitcal + constructs, e.g. ON CONFLICT etc.""" + + _values: Optional[util.immutabledict[_DMLColumnElement, Any]] = None + _multi_values: Tuple[ + Union[ + Sequence[Dict[_DMLColumnElement, Any]], + Sequence[Sequence[Any]], + ], + ..., + ] = () + + _ordered_values: Optional[List[Tuple[_DMLColumnElement, Any]]] = None + + _select_names: Optional[List[str]] = None + _inline: bool = False + + def __init__(self, table: _DMLTableArgument): + self.table = coercions.expect( + roles.DMLTableRole, table, apply_propagate_attrs=self + ) + + @_generative + @_exclusive_against( + "_select_names", + "_ordered_values", + msgs={ + "_select_names": "This construct already inserts from a SELECT", + "_ordered_values": "This statement already has ordered " + "values present", + }, + ) + def values( + self, + *args: Union[ + _DMLColumnKeyMapping[Any], + Sequence[Any], + ], + **kwargs: Any, + ) -> Self: + r"""Specify a fixed VALUES clause for an INSERT statement, or the SET + clause for an UPDATE. + + Note that the :class:`_expression.Insert` and + :class:`_expression.Update` + constructs support + per-execution time formatting of the VALUES and/or SET clauses, + based on the arguments passed to :meth:`_engine.Connection.execute`. + However, the :meth:`.ValuesBase.values` method can be used to "fix" a + particular set of parameters into the statement. + + Multiple calls to :meth:`.ValuesBase.values` will produce a new + construct, each one with the parameter list modified to include + the new parameters sent. In the typical case of a single + dictionary of parameters, the newly passed keys will replace + the same keys in the previous construct. In the case of a list-based + "multiple values" construct, each new list of values is extended + onto the existing list of values. + + :param \**kwargs: key value pairs representing the string key + of a :class:`_schema.Column` + mapped to the value to be rendered into the + VALUES or SET clause:: + + users.insert().values(name="some name") + + users.update().where(users.c.id==5).values(name="some name") + + :param \*args: As an alternative to passing key/value parameters, + a dictionary, tuple, or list of dictionaries or tuples can be passed + as a single positional argument in order to form the VALUES or + SET clause of the statement. The forms that are accepted vary + based on whether this is an :class:`_expression.Insert` or an + :class:`_expression.Update` construct. + + For either an :class:`_expression.Insert` or + :class:`_expression.Update` + construct, a single dictionary can be passed, which works the same as + that of the kwargs form:: + + users.insert().values({"name": "some name"}) + + users.update().values({"name": "some new name"}) + + Also for either form but more typically for the + :class:`_expression.Insert` construct, a tuple that contains an + entry for every column in the table is also accepted:: + + users.insert().values((5, "some name")) + + The :class:`_expression.Insert` construct also supports being + passed a list of dictionaries or full-table-tuples, which on the + server will render the less common SQL syntax of "multiple values" - + this syntax is supported on backends such as SQLite, PostgreSQL, + MySQL, but not necessarily others:: + + users.insert().values([ + {"name": "some name"}, + {"name": "some other name"}, + {"name": "yet another name"}, + ]) + + The above form would render a multiple VALUES statement similar to:: + + INSERT INTO users (name) VALUES + (:name_1), + (:name_2), + (:name_3) + + It is essential to note that **passing multiple values is + NOT the same as using traditional executemany() form**. The above + syntax is a **special** syntax not typically used. To emit an + INSERT statement against multiple rows, the normal method is + to pass a multiple values list to the + :meth:`_engine.Connection.execute` + method, which is supported by all database backends and is generally + more efficient for a very large number of parameters. + + .. seealso:: + + :ref:`tutorial_multiple_parameters` - an introduction to + the traditional Core method of multiple parameter set + invocation for INSERTs and other statements. + + The UPDATE construct also supports rendering the SET parameters + in a specific order. For this feature refer to the + :meth:`_expression.Update.ordered_values` method. + + .. seealso:: + + :meth:`_expression.Update.ordered_values` + + + """ + if args: + # positional case. this is currently expensive. we don't + # yet have positional-only args so we have to check the length. + # then we need to check multiparams vs. single dictionary. + # since the parameter format is needed in order to determine + # a cache key, we need to determine this up front. + arg = args[0] + + if kwargs: + raise exc.ArgumentError( + "Can't pass positional and kwargs to values() " + "simultaneously" + ) + elif len(args) > 1: + raise exc.ArgumentError( + "Only a single dictionary/tuple or list of " + "dictionaries/tuples is accepted positionally." + ) + + elif isinstance(arg, collections_abc.Sequence): + if arg and isinstance(arg[0], dict): + multi_kv_generator = DMLState.get_plugin_class( + self + )._get_multi_crud_kv_pairs + self._multi_values += (multi_kv_generator(self, arg),) + return self + + if arg and isinstance(arg[0], (list, tuple)): + self._multi_values += (arg,) + return self + + if TYPE_CHECKING: + # crud.py raises during compilation if this is not the + # case + assert isinstance(self, Insert) + + # tuple values + arg = {c.key: value for c, value in zip(self.table.c, arg)} + + else: + # kwarg path. this is the most common path for non-multi-params + # so this is fairly quick. + arg = cast("Dict[_DMLColumnArgument, Any]", kwargs) + if args: + raise exc.ArgumentError( + "Only a single dictionary/tuple or list of " + "dictionaries/tuples is accepted positionally." + ) + + # for top level values(), convert literals to anonymous bound + # parameters at statement construction time, so that these values can + # participate in the cache key process like any other ClauseElement. + # crud.py now intercepts bound parameters with unique=True from here + # and ensures they get the "crud"-style name when rendered. + + kv_generator = DMLState.get_plugin_class(self)._get_crud_kv_pairs + coerced_arg = dict(kv_generator(self, arg.items(), True)) + if self._values: + self._values = self._values.union(coerced_arg) + else: + self._values = util.immutabledict(coerced_arg) + return self + + +class Insert(ValuesBase): + """Represent an INSERT construct. + + The :class:`_expression.Insert` object is created using the + :func:`_expression.insert()` function. + + """ + + __visit_name__ = "insert" + + _supports_multi_parameters = True + + select = None + include_insert_from_select_defaults = False + + _sort_by_parameter_order: bool = False + + is_insert = True + + table: TableClause + + _traverse_internals = ( + [ + ("table", InternalTraversal.dp_clauseelement), + ("_inline", InternalTraversal.dp_boolean), + ("_select_names", InternalTraversal.dp_string_list), + ("_values", InternalTraversal.dp_dml_values), + ("_multi_values", InternalTraversal.dp_dml_multi_values), + ("select", InternalTraversal.dp_clauseelement), + ("_post_values_clause", InternalTraversal.dp_clauseelement), + ("_returning", InternalTraversal.dp_clauseelement_tuple), + ("_hints", InternalTraversal.dp_table_hint_list), + ("_return_defaults", InternalTraversal.dp_boolean), + ( + "_return_defaults_columns", + InternalTraversal.dp_clauseelement_tuple, + ), + ("_sort_by_parameter_order", InternalTraversal.dp_boolean), + ] + + HasPrefixes._has_prefixes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals + + Executable._executable_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) + + def __init__(self, table: _DMLTableArgument): + super().__init__(table) + + @_generative + def inline(self) -> Self: + """Make this :class:`_expression.Insert` construct "inline" . + + When set, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. + + + .. versionchanged:: 1.4 the :paramref:`_expression.Insert.inline` + parameter + is now superseded by the :meth:`_expression.Insert.inline` method. + + """ + self._inline = True + return self + + @_generative + def from_select( + self, + names: Sequence[_DMLColumnArgument], + select: Selectable, + include_defaults: bool = True, + ) -> Self: + """Return a new :class:`_expression.Insert` construct which represents + an ``INSERT...FROM SELECT`` statement. + + e.g.:: + + sel = select(table1.c.a, table1.c.b).where(table1.c.c > 5) + ins = table2.insert().from_select(['a', 'b'], sel) + + :param names: a sequence of string column names or + :class:`_schema.Column` + objects representing the target columns. + :param select: a :func:`_expression.select` construct, + :class:`_expression.FromClause` + or other construct which resolves into a + :class:`_expression.FromClause`, + such as an ORM :class:`_query.Query` object, etc. The order of + columns returned from this FROM clause should correspond to the + order of columns sent as the ``names`` parameter; while this + is not checked before passing along to the database, the database + would normally raise an exception if these column lists don't + correspond. + :param include_defaults: if True, non-server default values and + SQL expressions as specified on :class:`_schema.Column` objects + (as documented in :ref:`metadata_defaults_toplevel`) not + otherwise specified in the list of names will be rendered + into the INSERT and SELECT statements, so that these values are also + included in the data to be inserted. + + .. note:: A Python-side default that uses a Python callable function + will only be invoked **once** for the whole statement, and **not + per row**. + + """ + + if self._values: + raise exc.InvalidRequestError( + "This construct already inserts value expressions" + ) + + self._select_names = [ + coercions.expect(roles.DMLColumnRole, name, as_key=True) + for name in names + ] + self._inline = True + self.include_insert_from_select_defaults = include_defaults + self.select = coercions.expect(roles.DMLSelectRole, select) + return self + + if TYPE_CHECKING: + # START OVERLOADED FUNCTIONS self.returning ReturningInsert 1-8 ", *, sort_by_parameter_order: bool = False" # noqa: E501 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def returning( + self, __ent0: _TCCA[_T0], *, sort_by_parameter_order: bool = False + ) -> ReturningInsert[Tuple[_T0]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... + + # END OVERLOADED FUNCTIONS self.returning + + @overload + def returning( + self, + *cols: _ColumnsClauseArgument[Any], + sort_by_parameter_order: bool = False, + **__kw: Any, + ) -> ReturningInsert[Any]: ... + + def returning( + self, + *cols: _ColumnsClauseArgument[Any], + sort_by_parameter_order: bool = False, + **__kw: Any, + ) -> ReturningInsert[Any]: ... + + +class ReturningInsert(Insert, TypedReturnsRows[_TP]): + """Typing-only class that establishes a generic type form of + :class:`.Insert` which tracks returned column types. + + This datatype is delivered when calling the + :meth:`.Insert.returning` method. + + .. versionadded:: 2.0 + + """ + + +class DMLWhereBase: + table: _DMLTableElement + _where_criteria: Tuple[ColumnElement[Any], ...] = () + + @_generative + def where(self, *whereclause: _ColumnExpressionArgument[bool]) -> Self: + """Return a new construct with the given expression(s) added to + its WHERE clause, joined to the existing clause via AND, if any. + + Both :meth:`_dml.Update.where` and :meth:`_dml.Delete.where` + support multiple-table forms, including database-specific + ``UPDATE...FROM`` as well as ``DELETE..USING``. For backends that + don't have multiple-table support, a backend agnostic approach + to using multiple tables is to make use of correlated subqueries. + See the linked tutorial sections below for examples. + + .. seealso:: + + :ref:`tutorial_correlated_updates` + + :ref:`tutorial_update_from` + + :ref:`tutorial_multi_table_deletes` + + """ + + for criterion in whereclause: + where_criteria: ColumnElement[Any] = coercions.expect( + roles.WhereHavingRole, criterion, apply_propagate_attrs=self + ) + self._where_criteria += (where_criteria,) + return self + + def filter(self, *criteria: roles.ExpressionElementRole[Any]) -> Self: + """A synonym for the :meth:`_dml.DMLWhereBase.where` method. + + .. versionadded:: 1.4 + + """ + + return self.where(*criteria) + + def _filter_by_zero(self) -> _DMLTableElement: + return self.table + + def filter_by(self, **kwargs: Any) -> Self: + r"""apply the given filtering criterion as a WHERE clause + to this select. + + """ + from_entity = self._filter_by_zero() + + clauses = [ + _entity_namespace_key(from_entity, key) == value + for key, value in kwargs.items() + ] + return self.filter(*clauses) + + @property + def whereclause(self) -> Optional[ColumnElement[Any]]: + """Return the completed WHERE clause for this :class:`.DMLWhereBase` + statement. + + This assembles the current collection of WHERE criteria + into a single :class:`_expression.BooleanClauseList` construct. + + + .. versionadded:: 1.4 + + """ + + return BooleanClauseList._construct_for_whereclause( + self._where_criteria + ) + + +class Update(DMLWhereBase, ValuesBase): + """Represent an Update construct. + + The :class:`_expression.Update` object is created using the + :func:`_expression.update()` function. + + """ + + __visit_name__ = "update" + + is_update = True + + _traverse_internals = ( + [ + ("table", InternalTraversal.dp_clauseelement), + ("_where_criteria", InternalTraversal.dp_clauseelement_tuple), + ("_inline", InternalTraversal.dp_boolean), + ("_ordered_values", InternalTraversal.dp_dml_ordered_values), + ("_values", InternalTraversal.dp_dml_values), + ("_returning", InternalTraversal.dp_clauseelement_tuple), + ("_hints", InternalTraversal.dp_table_hint_list), + ("_return_defaults", InternalTraversal.dp_boolean), + ( + "_return_defaults_columns", + InternalTraversal.dp_clauseelement_tuple, + ), + ] + + HasPrefixes._has_prefixes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals + + Executable._executable_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) + + def __init__(self, table: _DMLTableArgument): + super().__init__(table) + + @_generative + def ordered_values(self, *args: Tuple[_DMLColumnArgument, Any]) -> Self: + """Specify the VALUES clause of this UPDATE statement with an explicit + parameter ordering that will be maintained in the SET clause of the + resulting UPDATE statement. + + E.g.:: + + stmt = table.update().ordered_values( + ("name", "ed"), ("ident", "foo") + ) + + .. seealso:: + + :ref:`tutorial_parameter_ordered_updates` - full example of the + :meth:`_expression.Update.ordered_values` method. + + .. versionchanged:: 1.4 The :meth:`_expression.Update.ordered_values` + method + supersedes the + :paramref:`_expression.update.preserve_parameter_order` + parameter, which will be removed in SQLAlchemy 2.0. + + """ + if self._values: + raise exc.ArgumentError( + "This statement already has values present" + ) + elif self._ordered_values: + raise exc.ArgumentError( + "This statement already has ordered values present" + ) + + kv_generator = DMLState.get_plugin_class(self)._get_crud_kv_pairs + self._ordered_values = kv_generator(self, args, True) + return self + + @_generative + def inline(self) -> Self: + """Make this :class:`_expression.Update` construct "inline" . + + When set, SQL defaults present on :class:`_schema.Column` + objects via the + ``default`` keyword will be compiled 'inline' into the statement and + not pre-executed. This means that their values will not be available + in the dictionary returned from + :meth:`_engine.CursorResult.last_updated_params`. + + .. versionchanged:: 1.4 the :paramref:`_expression.update.inline` + parameter + is now superseded by the :meth:`_expression.Update.inline` method. + + """ + self._inline = True + return self + + if TYPE_CHECKING: + # START OVERLOADED FUNCTIONS self.returning ReturningUpdate 1-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def returning( + self, __ent0: _TCCA[_T0] + ) -> ReturningUpdate[Tuple[_T0]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> ReturningUpdate[Tuple[_T0, _T1]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> ReturningUpdate[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... + + # END OVERLOADED FUNCTIONS self.returning + + @overload + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningUpdate[Any]: ... + + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningUpdate[Any]: ... + + +class ReturningUpdate(Update, TypedReturnsRows[_TP]): + """Typing-only class that establishes a generic type form of + :class:`.Update` which tracks returned column types. + + This datatype is delivered when calling the + :meth:`.Update.returning` method. + + .. versionadded:: 2.0 + + """ + + +class Delete(DMLWhereBase, UpdateBase): + """Represent a DELETE construct. + + The :class:`_expression.Delete` object is created using the + :func:`_expression.delete()` function. + + """ + + __visit_name__ = "delete" + + is_delete = True + + _traverse_internals = ( + [ + ("table", InternalTraversal.dp_clauseelement), + ("_where_criteria", InternalTraversal.dp_clauseelement_tuple), + ("_returning", InternalTraversal.dp_clauseelement_tuple), + ("_hints", InternalTraversal.dp_table_hint_list), + ] + + HasPrefixes._has_prefixes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals + + Executable._executable_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) + + def __init__(self, table: _DMLTableArgument): + self.table = coercions.expect( + roles.DMLTableRole, table, apply_propagate_attrs=self + ) + + if TYPE_CHECKING: + # START OVERLOADED FUNCTIONS self.returning ReturningDelete 1-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def returning( + self, __ent0: _TCCA[_T0] + ) -> ReturningDelete[Tuple[_T0]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> ReturningDelete[Tuple[_T0, _T1]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> ReturningDelete[Tuple[_T0, _T1, _T2]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> ReturningDelete[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... + + # END OVERLOADED FUNCTIONS self.returning + + @overload + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningDelete[Any]: ... + + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningDelete[Any]: ... + + +class ReturningDelete(Update, TypedReturnsRows[_TP]): + """Typing-only class that establishes a generic type form of + :class:`.Delete` which tracks returned column types. + + This datatype is delivered when calling the + :meth:`.Delete.returning` method. + + .. versionadded:: 2.0 + + """ diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py new file mode 100644 index 00000000..45c1674d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py @@ -0,0 +1,5499 @@ +# sql/elements.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Core SQL expression elements, including :class:`_expression.ClauseElement`, +:class:`_expression.ColumnElement`, and derived classes. + +""" + +from __future__ import annotations + +from decimal import Decimal +from enum import Enum +import itertools +import operator +import re +import typing +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple as typing_Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import operators +from . import roles +from . import traversals +from . import type_api +from ._typing import has_schema_attr +from ._typing import is_named_from_clause +from ._typing import is_quoted_name +from ._typing import is_tuple_type +from .annotation import Annotated +from .annotation import SupportsWrappingAnnotations +from .base import _clone +from .base import _expand_cloned +from .base import _generative +from .base import _NoArg +from .base import Executable +from .base import Generative +from .base import HasMemoized +from .base import Immutable +from .base import NO_ARG +from .base import SingletonConstant +from .cache_key import MemoizedHasCacheKey +from .cache_key import NO_CACHE +from .coercions import _document_text_coercion # noqa +from .operators import ColumnOperators +from .traversals import HasCopyInternals +from .visitors import cloned_traverse +from .visitors import ExternallyTraversible +from .visitors import InternalTraversal +from .visitors import traverse +from .visitors import Visitable +from .. import exc +from .. import inspection +from .. import util +from ..util import HasMemoized_ro_memoized_attribute +from ..util import TypingOnly +from ..util.typing import Literal +from ..util.typing import ParamSpec +from ..util.typing import Self + +if typing.TYPE_CHECKING: + from ._typing import _ByArgument + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnExpressionOrStrLabelArgument + from ._typing import _HasDialect + from ._typing import _InfoType + from ._typing import _PropagateAttrsType + from ._typing import _TypeEngineArgument + from .cache_key import _CacheKeyTraversalType + from .cache_key import CacheKey + from .compiler import Compiled + from .compiler import SQLCompiler + from .functions import FunctionElement + from .operators import OperatorType + from .schema import Column + from .schema import DefaultGenerator + from .schema import FetchedValue + from .schema import ForeignKey + from .selectable import _SelectIterable + from .selectable import FromClause + from .selectable import NamedFromClause + from .selectable import TextualSelect + from .sqltypes import TupleType + from .type_api import TypeEngine + from .visitors import _CloneCallableType + from .visitors import _TraverseInternalsType + from .visitors import anon_map + from ..engine import Connection + from ..engine import Dialect + from ..engine.interfaces import _CoreMultiExecuteParams + from ..engine.interfaces import CacheStats + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import CoreExecuteOptionsParameter + from ..engine.interfaces import SchemaTranslateMapType + from ..engine.result import Result + +_NUMERIC = Union[float, Decimal] +_NUMBER = Union[float, int, Decimal] + +_T = TypeVar("_T", bound="Any") +_T_co = TypeVar("_T_co", bound=Any, covariant=True) +_OPT = TypeVar("_OPT", bound="Any") +_NT = TypeVar("_NT", bound="_NUMERIC") + +_NMT = TypeVar("_NMT", bound="_NUMBER") + + +@overload +def literal( + value: Any, + type_: _TypeEngineArgument[_T], + literal_execute: bool = False, +) -> BindParameter[_T]: ... + + +@overload +def literal( + value: _T, + type_: None = None, + literal_execute: bool = False, +) -> BindParameter[_T]: ... + + +@overload +def literal( + value: Any, + type_: Optional[_TypeEngineArgument[Any]] = None, + literal_execute: bool = False, +) -> BindParameter[Any]: ... + + +def literal( + value: Any, + type_: Optional[_TypeEngineArgument[Any]] = None, + literal_execute: bool = False, +) -> BindParameter[Any]: + r"""Return a literal clause, bound to a bind parameter. + + Literal clauses are created automatically when non- + :class:`_expression.ClauseElement` objects (such as strings, ints, dates, + etc.) are + used in a comparison operation with a :class:`_expression.ColumnElement` + subclass, + such as a :class:`~sqlalchemy.schema.Column` object. Use this function + to force the generation of a literal clause, which will be created as a + :class:`BindParameter` with a bound value. + + :param value: the value to be bound. Can be any Python object supported by + the underlying DB-API, or is translatable via the given type argument. + + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which will + provide bind-parameter translation for this literal. + + :param literal_execute: optional bool, when True, the SQL engine will + attempt to render the bound value directly in the SQL statement at + execution time rather than providing as a parameter value. + + .. versionadded:: 2.0 + + """ + return coercions.expect( + roles.LiteralValueRole, + value, + type_=type_, + literal_execute=literal_execute, + ) + + +def literal_column( + text: str, type_: Optional[_TypeEngineArgument[_T]] = None +) -> ColumnClause[_T]: + r"""Produce a :class:`.ColumnClause` object that has the + :paramref:`_expression.column.is_literal` flag set to True. + + :func:`_expression.literal_column` is similar to + :func:`_expression.column`, except that + it is more often used as a "standalone" column expression that renders + exactly as stated; while :func:`_expression.column` + stores a string name that + will be assumed to be part of a table and may be quoted as such, + :func:`_expression.literal_column` can be that, + or any other arbitrary column-oriented + expression. + + :param text: the text of the expression; can be any SQL expression. + Quoting rules will not be applied. To specify a column-name expression + which should be subject to quoting rules, use the :func:`column` + function. + + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` + object which will + provide result-set translation and additional expression semantics for + this column. If left as ``None`` the type will be :class:`.NullType`. + + .. seealso:: + + :func:`_expression.column` + + :func:`_expression.text` + + :ref:`tutorial_select_arbitrary_text` + + """ + return ColumnClause(text, type_=type_, is_literal=True) + + +class CompilerElement(Visitable): + """base class for SQL elements that can be compiled to produce a + SQL string. + + .. versionadded:: 2.0 + + """ + + __slots__ = () + __visit_name__ = "compiler_element" + + supports_execution = False + + stringify_dialect = "default" + + @util.preload_module("sqlalchemy.engine.default") + @util.preload_module("sqlalchemy.engine.url") + def compile( + self, + bind: Optional[_HasDialect] = None, + dialect: Optional[Dialect] = None, + **kw: Any, + ) -> Compiled: + """Compile this SQL expression. + + The return value is a :class:`~.Compiled` object. + Calling ``str()`` or ``unicode()`` on the returned value will yield a + string representation of the result. The + :class:`~.Compiled` object also can return a + dictionary of bind parameter names and values + using the ``params`` accessor. + + :param bind: An :class:`.Connection` or :class:`.Engine` which + can provide a :class:`.Dialect` in order to generate a + :class:`.Compiled` object. If the ``bind`` and + ``dialect`` parameters are both omitted, a default SQL compiler + is used. + + :param column_keys: Used for INSERT and UPDATE statements, a list of + column names which should be present in the VALUES clause of the + compiled statement. If ``None``, all columns from the target table + object are rendered. + + :param dialect: A :class:`.Dialect` instance which can generate + a :class:`.Compiled` object. This argument takes precedence over + the ``bind`` argument. + + :param compile_kwargs: optional dictionary of additional parameters + that will be passed through to the compiler within all "visit" + methods. This allows any custom flag to be passed through to + a custom compilation construct, for example. It is also used + for the case of passing the ``literal_binds`` flag through:: + + from sqlalchemy.sql import table, column, select + + t = table('t', column('x')) + + s = select(t).where(t.c.x == 5) + + print(s.compile(compile_kwargs={"literal_binds": True})) + + .. seealso:: + + :ref:`faq_sql_expression_string` + + """ + + if dialect is None: + if bind: + dialect = bind.dialect + elif self.stringify_dialect == "default": + default = util.preloaded.engine_default + dialect = default.StrCompileDialect() + else: + url = util.preloaded.engine_url + dialect = url.URL.create( + self.stringify_dialect + ).get_dialect()() + + return self._compiler(dialect, **kw) + + def _compiler(self, dialect: Dialect, **kw: Any) -> Compiled: + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + + if TYPE_CHECKING: + assert isinstance(self, ClauseElement) + return dialect.statement_compiler(dialect, self, **kw) + + def __str__(self) -> str: + return str(self.compile()) + + +@inspection._self_inspects +class ClauseElement( + SupportsWrappingAnnotations, + MemoizedHasCacheKey, + HasCopyInternals, + ExternallyTraversible, + CompilerElement, +): + """Base class for elements of a programmatically constructed SQL + expression. + + """ + + __visit_name__ = "clause" + + if TYPE_CHECKING: + + @util.memoized_property + def _propagate_attrs(self) -> _PropagateAttrsType: + """like annotations, however these propagate outwards liberally + as SQL constructs are built, and are set up at construction time. + + """ + ... + + else: + _propagate_attrs = util.EMPTY_DICT + + @util.ro_memoized_property + def description(self) -> Optional[str]: + return None + + _is_clone_of: Optional[Self] = None + + is_clause_element = True + is_selectable = False + is_dml = False + _is_column_element = False + _is_keyed_column_element = False + _is_table = False + _gen_static_annotations_cache_key = False + _is_textual = False + _is_from_clause = False + _is_returns_rows = False + _is_text_clause = False + _is_from_container = False + _is_select_container = False + _is_select_base = False + _is_select_statement = False + _is_bind_parameter = False + _is_clause_list = False + _is_lambda_element = False + _is_singleton_constant = False + _is_immutable = False + _is_star = False + + @property + def _order_by_label_element(self) -> Optional[Label[Any]]: + return None + + _cache_key_traversal: _CacheKeyTraversalType = None + + negation_clause: ColumnElement[bool] + + if typing.TYPE_CHECKING: + + def get_children( + self, *, omit_attrs: typing_Tuple[str, ...] = ..., **kw: Any + ) -> Iterable[ClauseElement]: ... + + @util.ro_non_memoized_property + def _from_objects(self) -> List[FromClause]: + return [] + + def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self: + # usually, self._propagate_attrs is empty here. one case where it's + # not is a subquery against ORM select, that is then pulled as a + # property of an aliased class. should all be good + + # assert not self._propagate_attrs + + self._propagate_attrs = util.immutabledict(values) + return self + + def _clone(self, **kw: Any) -> Self: + """Create a shallow copy of this ClauseElement. + + This method may be used by a generative API. Its also used as + part of the "deep" copy afforded by a traversal that combines + the _copy_internals() method. + + """ + + skip = self._memoized_keys + c = self.__class__.__new__(self.__class__) + + if skip: + # ensure this iteration remains atomic + c.__dict__ = { + k: v for k, v in self.__dict__.copy().items() if k not in skip + } + else: + c.__dict__ = self.__dict__.copy() + + # this is a marker that helps to "equate" clauses to each other + # when a Select returns its list of FROM clauses. the cloning + # process leaves around a lot of remnants of the previous clause + # typically in the form of column expressions still attached to the + # old table. + cc = self._is_clone_of + c._is_clone_of = cc if cc is not None else self + return c + + def _negate_in_binary(self, negated_op, original_op): + """a hook to allow the right side of a binary expression to respond + to a negation of the binary expression. + + Used for the special case of expanding bind parameter with IN. + + """ + return self + + def _with_binary_element_type(self, type_): + """in the context of binary expression, convert the type of this + object to the one given. + + applies only to :class:`_expression.ColumnElement` classes. + + """ + return self + + @property + def _constructor(self): + """return the 'constructor' for this ClauseElement. + + This is for the purposes for creating a new object of + this type. Usually, its just the element's __class__. + However, the "Annotated" version of the object overrides + to return the class of its proxied element. + + """ + return self.__class__ + + @HasMemoized.memoized_attribute + def _cloned_set(self): + """Return the set consisting all cloned ancestors of this + ClauseElement. + + Includes this ClauseElement. This accessor tends to be used for + FromClause objects to identify 'equivalent' FROM clauses, regardless + of transformative operations. + + """ + s = util.column_set() + f: Optional[ClauseElement] = self + + # note this creates a cycle, asserted in test_memusage. however, + # turning this into a plain @property adds tends of thousands of method + # calls to Core / ORM performance tests, so the small overhead + # introduced by the relatively small amount of short term cycles + # produced here is preferable + while f is not None: + s.add(f) + f = f._is_clone_of + return s + + def _de_clone(self): + while self._is_clone_of is not None: + self = self._is_clone_of + return self + + @property + def entity_namespace(self): + raise AttributeError( + "This SQL expression has no entity namespace " + "with which to filter from." + ) + + def __getstate__(self): + d = self.__dict__.copy() + d.pop("_is_clone_of", None) + d.pop("_generate_cache_key", None) + return d + + def _execute_on_connection( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Result[Any]: + if self.supports_execution: + if TYPE_CHECKING: + assert isinstance(self, Executable) + return connection._execute_clauseelement( + self, distilled_params, execution_options + ) + else: + raise exc.ObjectNotExecutableError(self) + + def _execute_on_scalar( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Any: + """an additional hook for subclasses to provide a different + implementation for connection.scalar() vs. connection.execute(). + + .. versionadded:: 2.0 + + """ + return self._execute_on_connection( + connection, distilled_params, execution_options + ).scalar() + + def _get_embedded_bindparams(self) -> Sequence[BindParameter[Any]]: + """Return the list of :class:`.BindParameter` objects embedded in the + object. + + This accomplishes the same purpose as ``visitors.traverse()`` or + similar would provide, however by making use of the cache key + it takes advantage of memoization of the key to result in fewer + net method calls, assuming the statement is also going to be + executed. + + """ + + key = self._generate_cache_key() + if key is None: + bindparams: List[BindParameter[Any]] = [] + + traverse(self, {}, {"bindparam": bindparams.append}) + return bindparams + + else: + return key.bindparams + + def unique_params( + self, + __optionaldict: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Self: + """Return a copy with :func:`_expression.bindparam` elements + replaced. + + Same functionality as :meth:`_expression.ClauseElement.params`, + except adds `unique=True` + to affected bind parameters so that multiple statements can be + used. + + """ + return self._replace_params(True, __optionaldict, kwargs) + + def params( + self, + __optionaldict: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> Self: + """Return a copy with :func:`_expression.bindparam` elements + replaced. + + Returns a copy of this ClauseElement with + :func:`_expression.bindparam` + elements replaced with values taken from the given dictionary:: + + >>> clause = column('x') + bindparam('foo') + >>> print(clause.compile().params) + {'foo':None} + >>> print(clause.params({'foo':7}).compile().params) + {'foo':7} + + """ + return self._replace_params(False, __optionaldict, kwargs) + + def _replace_params( + self, + unique: bool, + optionaldict: Optional[Mapping[str, Any]], + kwargs: Dict[str, Any], + ) -> Self: + if optionaldict: + kwargs.update(optionaldict) + + def visit_bindparam(bind: BindParameter[Any]) -> None: + if bind.key in kwargs: + bind.value = kwargs[bind.key] + bind.required = False + if unique: + bind._convert_to_unique() + + return cloned_traverse( + self, + {"maintain_key": True, "detect_subquery_cols": True}, + {"bindparam": visit_bindparam}, + ) + + def compare(self, other: ClauseElement, **kw: Any) -> bool: + r"""Compare this :class:`_expression.ClauseElement` to + the given :class:`_expression.ClauseElement`. + + Subclasses should override the default behavior, which is a + straight identity comparison. + + \**kw are arguments consumed by subclass ``compare()`` methods and + may be used to modify the criteria for comparison + (see :class:`_expression.ColumnElement`). + + """ + return traversals.compare(self, other, **kw) + + def self_group( + self, against: Optional[OperatorType] = None + ) -> ClauseElement: + """Apply a 'grouping' to this :class:`_expression.ClauseElement`. + + This method is overridden by subclasses to return a "grouping" + construct, i.e. parenthesis. In particular it's used by "binary" + expressions to provide a grouping around themselves when placed into a + larger expression, as well as by :func:`_expression.select` + constructs when placed into the FROM clause of another + :func:`_expression.select`. (Note that subqueries should be + normally created using the :meth:`_expression.Select.alias` method, + as many + platforms require nested SELECT statements to be named). + + As expressions are composed together, the application of + :meth:`self_group` is automatic - end-user code should never + need to use this method directly. Note that SQLAlchemy's + clause constructs take operator precedence into account - + so parenthesis might not be needed, for example, in + an expression like ``x OR (y AND z)`` - AND takes precedence + over OR. + + The base :meth:`self_group` method of + :class:`_expression.ClauseElement` + just returns self. + """ + return self + + def _ungroup(self) -> ClauseElement: + """Return this :class:`_expression.ClauseElement` + without any groupings. + """ + + return self + + def _compile_w_cache( + self, + dialect: Dialect, + *, + compiled_cache: Optional[CompiledCacheType], + column_keys: List[str], + for_executemany: bool = False, + schema_translate_map: Optional[SchemaTranslateMapType] = None, + **kw: Any, + ) -> typing_Tuple[ + Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats + ]: + elem_cache_key: Optional[CacheKey] + + if compiled_cache is not None and dialect._supports_statement_cache: + elem_cache_key = self._generate_cache_key() + else: + elem_cache_key = None + + if elem_cache_key is not None: + if TYPE_CHECKING: + assert compiled_cache is not None + + cache_key, extracted_params = elem_cache_key + key = ( + dialect, + cache_key, + tuple(column_keys), + bool(schema_translate_map), + for_executemany, + ) + compiled_sql = compiled_cache.get(key) + + if compiled_sql is None: + cache_hit = dialect.CACHE_MISS + compiled_sql = self._compiler( + dialect, + cache_key=elem_cache_key, + column_keys=column_keys, + for_executemany=for_executemany, + schema_translate_map=schema_translate_map, + **kw, + ) + compiled_cache[key] = compiled_sql + else: + cache_hit = dialect.CACHE_HIT + else: + extracted_params = None + compiled_sql = self._compiler( + dialect, + cache_key=elem_cache_key, + column_keys=column_keys, + for_executemany=for_executemany, + schema_translate_map=schema_translate_map, + **kw, + ) + + if not dialect._supports_statement_cache: + cache_hit = dialect.NO_DIALECT_SUPPORT + elif compiled_cache is None: + cache_hit = dialect.CACHING_DISABLED + else: + cache_hit = dialect.NO_CACHE_KEY + + return compiled_sql, extracted_params, cache_hit + + def __invert__(self): + # undocumented element currently used by the ORM for + # relationship.contains() + if hasattr(self, "negation_clause"): + return self.negation_clause + else: + return self._negate() + + def _negate(self) -> ClauseElement: + grouped = self.self_group(against=operators.inv) + assert isinstance(grouped, ColumnElement) + return UnaryExpression(grouped, operator=operators.inv) + + def __bool__(self): + raise TypeError("Boolean value of this clause is not defined") + + def __repr__(self): + friendly = self.description + if friendly is None: + return object.__repr__(self) + else: + return "<%s.%s at 0x%x; %s>" % ( + self.__module__, + self.__class__.__name__, + id(self), + friendly, + ) + + +class DQLDMLClauseElement(ClauseElement): + """represents a :class:`.ClauseElement` that compiles to a DQL or DML + expression, not DDL. + + .. versionadded:: 2.0 + + """ + + if typing.TYPE_CHECKING: + + def _compiler(self, dialect: Dialect, **kw: Any) -> SQLCompiler: + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + ... + + def compile( # noqa: A001 + self, + bind: Optional[_HasDialect] = None, + dialect: Optional[Dialect] = None, + **kw: Any, + ) -> SQLCompiler: ... + + +class CompilerColumnElement( + roles.DMLColumnRole, + roles.DDLConstraintColumnRole, + roles.ColumnsClauseRole, + CompilerElement, +): + """A compiler-only column element used for ad-hoc string compilations. + + .. versionadded:: 2.0 + + """ + + __slots__ = () + + _propagate_attrs = util.EMPTY_DICT + _is_collection_aggregate = False + + +# SQLCoreOperations should be suiting the ExpressionElementRole +# and ColumnsClauseRole. however the MRO issues become too elaborate +# at the moment. +class SQLCoreOperations(Generic[_T_co], ColumnOperators, TypingOnly): + __slots__ = () + + # annotations for comparison methods + # these are from operators->Operators / ColumnOperators, + # redefined with the specific types returned by ColumnElement hierarchies + if typing.TYPE_CHECKING: + + @util.non_memoized_property + def _propagate_attrs(self) -> _PropagateAttrsType: ... + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: ... + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: ... + + @overload + def op( + self, + opstring: str, + precedence: int = ..., + is_comparison: bool = ..., + *, + return_type: _TypeEngineArgument[_OPT], + python_impl: Optional[Callable[..., Any]] = None, + ) -> Callable[[Any], BinaryExpression[_OPT]]: ... + + @overload + def op( + self, + opstring: str, + precedence: int = ..., + is_comparison: bool = ..., + return_type: Optional[_TypeEngineArgument[Any]] = ..., + python_impl: Optional[Callable[..., Any]] = ..., + ) -> Callable[[Any], BinaryExpression[Any]]: ... + + def op( + self, + opstring: str, + precedence: int = 0, + is_comparison: bool = False, + return_type: Optional[_TypeEngineArgument[Any]] = None, + python_impl: Optional[Callable[..., Any]] = None, + ) -> Callable[[Any], BinaryExpression[Any]]: ... + + def bool_op( + self, + opstring: str, + precedence: int = 0, + python_impl: Optional[Callable[..., Any]] = None, + ) -> Callable[[Any], BinaryExpression[bool]]: ... + + def __and__(self, other: Any) -> BooleanClauseList: ... + + def __or__(self, other: Any) -> BooleanClauseList: ... + + def __invert__(self) -> ColumnElement[_T_co]: ... + + def __lt__(self, other: Any) -> ColumnElement[bool]: ... + + def __le__(self, other: Any) -> ColumnElement[bool]: ... + + # declare also that this class has an hash method otherwise + # it may be assumed to be None by type checkers since the + # object defines __eq__ and python sets it to None in that case: + # https://docs.python.org/3/reference/datamodel.html#object.__hash__ + def __hash__(self) -> int: ... + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + ... + + def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + ... + + def is_distinct_from(self, other: Any) -> ColumnElement[bool]: ... + + def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]: ... + + def __gt__(self, other: Any) -> ColumnElement[bool]: ... + + def __ge__(self, other: Any) -> ColumnElement[bool]: ... + + def __neg__(self) -> UnaryExpression[_T_co]: ... + + def __contains__(self, other: Any) -> ColumnElement[bool]: ... + + def __getitem__(self, index: Any) -> ColumnElement[Any]: ... + + @overload + def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ... + + @overload + def __lshift__(self, other: Any) -> ColumnElement[Any]: ... + + def __lshift__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ... + + @overload + def __rshift__(self, other: Any) -> ColumnElement[Any]: ... + + def __rshift__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def concat(self: _SQO[str], other: Any) -> ColumnElement[str]: ... + + @overload + def concat(self, other: Any) -> ColumnElement[Any]: ... + + def concat(self, other: Any) -> ColumnElement[Any]: ... + + def like( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def ilike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def bitwise_xor(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_or(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_and(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_not(self) -> UnaryExpression[_T_co]: ... + + def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]: ... + + def in_( + self, + other: Union[ + Iterable[Any], BindParameter[Any], roles.InElementRole + ], + ) -> BinaryExpression[bool]: ... + + def not_in( + self, + other: Union[ + Iterable[Any], BindParameter[Any], roles.InElementRole + ], + ) -> BinaryExpression[bool]: ... + + def notin_( + self, + other: Union[ + Iterable[Any], BindParameter[Any], roles.InElementRole + ], + ) -> BinaryExpression[bool]: ... + + def not_like( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def notlike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def not_ilike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def notilike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def is_(self, other: Any) -> BinaryExpression[bool]: ... + + def is_not(self, other: Any) -> BinaryExpression[bool]: ... + + def isnot(self, other: Any) -> BinaryExpression[bool]: ... + + def startswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def istartswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def endswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def iendswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ... + + def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ... + + def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: ... + + def regexp_match( + self, pattern: Any, flags: Optional[str] = None + ) -> ColumnElement[bool]: ... + + def regexp_replace( + self, pattern: Any, replacement: Any, flags: Optional[str] = None + ) -> ColumnElement[str]: ... + + def desc(self) -> UnaryExpression[_T_co]: ... + + def asc(self) -> UnaryExpression[_T_co]: ... + + def nulls_first(self) -> UnaryExpression[_T_co]: ... + + def nullsfirst(self) -> UnaryExpression[_T_co]: ... + + def nulls_last(self) -> UnaryExpression[_T_co]: ... + + def nullslast(self) -> UnaryExpression[_T_co]: ... + + def collate(self, collation: str) -> CollationClause: ... + + def between( + self, cleft: Any, cright: Any, symmetric: bool = False + ) -> BinaryExpression[bool]: ... + + def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]: ... + + def any_(self) -> CollectionAggregate[Any]: ... + + def all_(self) -> CollectionAggregate[Any]: ... + + # numeric overloads. These need more tweaking + # in particular they all need to have a variant for Optiona[_T] + # because Optional only applies to the data side, not the expression + # side + + @overload + def __add__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __add__( + self: _SQO[str], + other: Any, + ) -> ColumnElement[str]: ... + + @overload + def __add__(self, other: Any) -> ColumnElement[Any]: ... + + def __add__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... + + @overload + def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]: ... + + def __radd__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __sub__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __sub__(self, other: Any) -> ColumnElement[Any]: ... + + def __sub__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rsub__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __rsub__(self, other: Any) -> ColumnElement[Any]: ... + + def __rsub__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __mul__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __mul__(self, other: Any) -> ColumnElement[Any]: ... + + def __mul__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rmul__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __rmul__(self, other: Any) -> ColumnElement[Any]: ... + + def __rmul__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... + + @overload + def __mod__(self, other: Any) -> ColumnElement[Any]: ... + + def __mod__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... + + @overload + def __rmod__(self, other: Any) -> ColumnElement[Any]: ... + + def __rmod__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __truediv__( + self: _SQO[int], other: Any + ) -> ColumnElement[_NUMERIC]: ... + + @overload + def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]: ... + + @overload + def __truediv__(self, other: Any) -> ColumnElement[Any]: ... + + def __truediv__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rtruediv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NUMERIC]: ... + + @overload + def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ... + + def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __floordiv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NMT]: ... + + @overload + def __floordiv__(self, other: Any) -> ColumnElement[Any]: ... + + def __floordiv__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rfloordiv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NMT]: ... + + @overload + def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ... + + def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ... + + +class SQLColumnExpression( + SQLCoreOperations[_T_co], roles.ExpressionElementRole[_T_co], TypingOnly +): + """A type that may be used to indicate any SQL column element or object + that acts in place of one. + + :class:`.SQLColumnExpression` is a base of + :class:`.ColumnElement`, as well as within the bases of ORM elements + such as :class:`.InstrumentedAttribute`, and may be used in :pep:`484` + typing to indicate arguments or return values that should behave + as column expressions. + + .. versionadded:: 2.0.0b4 + + + """ + + __slots__ = () + + +_SQO = SQLCoreOperations + + +class ColumnElement( + roles.ColumnArgumentOrKeyRole, + roles.StatementOptionRole, + roles.WhereHavingRole, + roles.BinaryElementRole[_T], + roles.OrderByRole, + roles.ColumnsClauseRole, + roles.LimitOffsetRole, + roles.DMLColumnRole, + roles.DDLConstraintColumnRole, + roles.DDLExpressionRole, + SQLColumnExpression[_T], + DQLDMLClauseElement, +): + """Represent a column-oriented SQL expression suitable for usage in the + "columns" clause, WHERE clause etc. of a statement. + + While the most familiar kind of :class:`_expression.ColumnElement` is the + :class:`_schema.Column` object, :class:`_expression.ColumnElement` + serves as the basis + for any unit that may be present in a SQL expression, including + the expressions themselves, SQL functions, bound parameters, + literal expressions, keywords such as ``NULL``, etc. + :class:`_expression.ColumnElement` + is the ultimate base class for all such elements. + + A wide variety of SQLAlchemy Core functions work at the SQL expression + level, and are intended to accept instances of + :class:`_expression.ColumnElement` as + arguments. These functions will typically document that they accept a + "SQL expression" as an argument. What this means in terms of SQLAlchemy + usually refers to an input which is either already in the form of a + :class:`_expression.ColumnElement` object, + or a value which can be **coerced** into + one. The coercion rules followed by most, but not all, SQLAlchemy Core + functions with regards to SQL expressions are as follows: + + * a literal Python value, such as a string, integer or floating + point value, boolean, datetime, ``Decimal`` object, or virtually + any other Python object, will be coerced into a "literal bound + value". This generally means that a :func:`.bindparam` will be + produced featuring the given value embedded into the construct; the + resulting :class:`.BindParameter` object is an instance of + :class:`_expression.ColumnElement`. + The Python value will ultimately be sent + to the DBAPI at execution time as a parameterized argument to the + ``execute()`` or ``executemany()`` methods, after SQLAlchemy + type-specific converters (e.g. those provided by any associated + :class:`.TypeEngine` objects) are applied to the value. + + * any special object value, typically ORM-level constructs, which + feature an accessor called ``__clause_element__()``. The Core + expression system looks for this method when an object of otherwise + unknown type is passed to a function that is looking to coerce the + argument into a :class:`_expression.ColumnElement` and sometimes a + :class:`_expression.SelectBase` expression. + It is used within the ORM to + convert from ORM-specific objects like mapped classes and + mapped attributes into Core expression objects. + + * The Python ``None`` value is typically interpreted as ``NULL``, + which in SQLAlchemy Core produces an instance of :func:`.null`. + + A :class:`_expression.ColumnElement` provides the ability to generate new + :class:`_expression.ColumnElement` + objects using Python expressions. This means that Python operators + such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations, + and allow the instantiation of further :class:`_expression.ColumnElement` + instances + which are composed from other, more fundamental + :class:`_expression.ColumnElement` + objects. For example, two :class:`.ColumnClause` objects can be added + together with the addition operator ``+`` to produce + a :class:`.BinaryExpression`. + Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses + of :class:`_expression.ColumnElement`: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy.sql import column + >>> column('a') + column('b') + + >>> print(column('a') + column('b')) + {printsql}a + b + + .. seealso:: + + :class:`_schema.Column` + + :func:`_expression.column` + + """ + + __visit_name__ = "column_element" + + primary_key: bool = False + _is_clone_of: Optional[ColumnElement[_T]] + _is_column_element = True + _insert_sentinel: bool = False + _omit_from_statements = False + _is_collection_aggregate = False + + foreign_keys: AbstractSet[ForeignKey] = frozenset() + + @util.memoized_property + def _proxies(self) -> List[ColumnElement[Any]]: + return [] + + @util.non_memoized_property + def _tq_label(self) -> Optional[str]: + """The named label that can be used to target + this column in a result set in a "table qualified" context. + + This label is almost always the label used when + rendering AS AS "; typically columns that don't have + any parent table and are named the same as what the label would be + in any case. + + """ + + _allow_label_resolve = True + """A flag that can be flipped to prevent a column from being resolvable + by string label name. + + The joined eager loader strategy in the ORM uses this, for example. + + """ + + _is_implicitly_boolean = False + + _alt_names: Sequence[str] = () + + @overload + def self_group(self, against: None = None) -> ColumnElement[_T]: ... + + @overload + def self_group( + self, against: Optional[OperatorType] = None + ) -> ColumnElement[Any]: ... + + def self_group( + self, against: Optional[OperatorType] = None + ) -> ColumnElement[Any]: + if ( + against in (operators.and_, operators.or_, operators._asbool) + and self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity + ): + return AsBoolean(self, operators.is_true, operators.is_false) + elif against in (operators.any_op, operators.all_op): + return Grouping(self) + else: + return self + + @overload + def _negate(self: ColumnElement[bool]) -> ColumnElement[bool]: ... + + @overload + def _negate(self: ColumnElement[_T]) -> ColumnElement[_T]: ... + + def _negate(self) -> ColumnElement[Any]: + if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity: + return AsBoolean(self, operators.is_false, operators.is_true) + else: + grouped = self.self_group(against=operators.inv) + assert isinstance(grouped, ColumnElement) + return UnaryExpression( + grouped, operator=operators.inv, wraps_column_expression=True + ) + + type: TypeEngine[_T] + + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + # used for delayed setup of + # type_api + return type_api.NULLTYPE + + @HasMemoized.memoized_attribute + def comparator(self) -> TypeEngine.Comparator[_T]: + try: + comparator_factory = self.type.comparator_factory + except AttributeError as err: + raise TypeError( + "Object %r associated with '.type' attribute " + "is not a TypeEngine class or object" % self.type + ) from err + else: + return comparator_factory(self) + + def __setstate__(self, state): + self.__dict__.update(state) + + def __getattr__(self, key: str) -> Any: + try: + return getattr(self.comparator, key) + except AttributeError as err: + raise AttributeError( + "Neither %r object nor %r object has an attribute %r" + % ( + type(self).__name__, + type(self.comparator).__name__, + key, + ) + ) from err + + def operate( + self, + op: operators.OperatorType, + *other: Any, + **kwargs: Any, + ) -> ColumnElement[Any]: + return op(self.comparator, *other, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def reverse_operate( + self, op: operators.OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: + return op(other, self.comparator, **kwargs) # type: ignore[no-any-return] # noqa: E501 + + def _bind_param( + self, + operator: operators.OperatorType, + obj: Any, + type_: Optional[TypeEngine[_T]] = None, + expanding: bool = False, + ) -> BindParameter[_T]: + return BindParameter( + None, + obj, + _compared_to_operator=operator, + type_=type_, + _compared_to_type=self.type, + unique=True, + expanding=expanding, + ) + + @property + def expression(self) -> ColumnElement[Any]: + """Return a column expression. + + Part of the inspection interface; returns self. + + """ + return self + + @property + def _select_iterable(self) -> _SelectIterable: + return (self,) + + @util.memoized_property + def base_columns(self) -> FrozenSet[ColumnElement[Any]]: + return frozenset(c for c in self.proxy_set if not c._proxies) + + @util.memoized_property + def proxy_set(self) -> FrozenSet[ColumnElement[Any]]: + """set of all columns we are proxying + + as of 2.0 this is explicitly deannotated columns. previously it was + effectively deannotated columns but wasn't enforced. annotated + columns should basically not go into sets if at all possible because + their hashing behavior is very non-performant. + + """ + return frozenset([self._deannotate()]).union( + itertools.chain(*[c.proxy_set for c in self._proxies]) + ) + + @util.memoized_property + def _expanded_proxy_set(self) -> FrozenSet[ColumnElement[Any]]: + return frozenset(_expand_cloned(self.proxy_set)) + + def _uncached_proxy_list(self) -> List[ColumnElement[Any]]: + """An 'uncached' version of proxy set. + + This list includes annotated columns which perform very poorly in + set operations. + + """ + + return [self] + list( + itertools.chain(*[c._uncached_proxy_list() for c in self._proxies]) + ) + + def shares_lineage(self, othercolumn: ColumnElement[Any]) -> bool: + """Return True if the given :class:`_expression.ColumnElement` + has a common ancestor to this :class:`_expression.ColumnElement`.""" + + return bool(self.proxy_set.intersection(othercolumn.proxy_set)) + + def _compare_name_for_result(self, other: ColumnElement[Any]) -> bool: + """Return True if the given column element compares to this one + when targeting within a result row.""" + + return ( + hasattr(other, "name") + and hasattr(self, "name") + and other.name == self.name + ) + + @HasMemoized.memoized_attribute + def _proxy_key(self) -> Optional[str]: + if self._annotations and "proxy_key" in self._annotations: + return cast(str, self._annotations["proxy_key"]) + + name = self.key + if not name: + # there's a bit of a seeming contradiction which is that the + # "_non_anon_label" of a column can in fact be an + # "_anonymous_label"; this is when it's on a column that is + # proxying for an anonymous expression in a subquery. + name = self._non_anon_label + + if isinstance(name, _anonymous_label): + return None + else: + return name + + @HasMemoized.memoized_attribute + def _expression_label(self) -> Optional[str]: + """a suggested label to use in the case that the column has no name, + which should be used if possible as the explicit 'AS \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -306,17 +305,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -330,17 +329,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -354,17 +353,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -378,17 +377,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -426,17 +425,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -450,17 +449,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -474,17 +473,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -498,17 +497,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -522,17 +521,17 @@ " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", @@ -551,101 +550,88 @@ "" ], "text/plain": [ - " Tract ZIP County_x \\\n", - "222 6083980000.0 93117 Santa Barbara \n", - "223 6083980000.0 93117 Santa Barbara \n", - "224 6083980000.0 93117 Santa Barbara \n", - "225 6083980000.0 93117 Santa Barbara \n", - "226 6083980000.0 93117 Santa Barbara \n", - "... ... ... ... \n", - "116353 6037910811.0 93510 Los Angeles \n", - "118390 6037901003.0 93536 Los Angeles \n", - "118983 6037401901.0 91711 Los Angeles \n", - "118984 6037401901.0 91711 Los Angeles \n", - "118985 6037401901.0 91711 Los Angeles \n", + " Tract ZIP County_x ApproxLoc TotPop19 CIscore \\\n", + "4662 6067988300.0 95630 Sacramento Folsom 4860 -999.0 \n", + "4663 6067988300.0 95630 Sacramento Folsom 4860 -999.0 \n", + "7377 6073009901.0 92106 San Diego San Diego 767 -999.0 \n", + "8943 6073006300.0 92140 San Diego San Diego 3760 -999.0 \n", + "12609 6073006200.0 92101 San Diego San Diego 23 -999.0 \n", + "... ... ... ... ... ... ... \n", + "101197 6059021813.0 92807 Orange Anaheim 4 -999.0 \n", + "101198 6059021813.0 92807 Orange Anaheim 4 -999.0 \n", + "101199 6059021813.0 92807 Orange Anaheim 4 -999.0 \n", + "101200 6059021813.0 92807 Orange Anaheim 4 -999.0 \n", + "118597 6029006002.0 93561 Kern Tehachapi 4228 -999.0 \n", "\n", - " ApproxLoc TotPop19 CIscore CIscoreP \\\n", - "222 Santa Barbara 0 -999.0 -999.0 \n", - "223 Santa Barbara 0 -999.0 -999.0 \n", - "224 Santa Barbara 0 -999.0 -999.0 \n", - "225 Santa Barbara 0 -999.0 -999.0 \n", - "226 Santa Barbara 0 -999.0 -999.0 \n", - "... ... ... ... ... \n", - "116353 Unincorporated Los Angeles County area 179 -999.0 -999.0 \n", - "118390 Lancaster 4895 -999.0 -999.0 \n", - "118983 Claremont 3945 -999.0 -999.0 \n", - "118984 Claremont 3945 -999.0 -999.0 \n", - "118985 Claremont 3945 -999.0 -999.0 \n", - "\n", - " Ozone OzoneP PM2_5 ... Net Density DUA \\\n", - "222 0.039755 20.846297 8.035979 ... 0.0 \n", - "223 0.039755 20.846297 8.035979 ... 0.0 \n", - "224 0.039755 20.846297 8.035979 ... 0.0 \n", - "225 0.039755 20.846297 8.035979 ... 0.0 \n", - "226 0.039755 20.846297 8.035979 ... 0.0 \n", - "... ... ... ... ... ... \n", - "116353 0.066049 95.308027 9.386662 ... 0.0 \n", - "118390 0.062365 88.699440 7.135657 ... 0.0 \n", - "118983 0.061338 84.579963 13.145812 ... 0.0 \n", - "118984 0.061338 84.579963 13.145812 ... 0.0 \n", - "118985 0.061338 84.579963 13.145812 ... 0.0 \n", + " CIscoreP Ozone OzoneP PM2_5 ... Net Density DUA \\\n", + "4662 -999.0 0.054629 71.661481 7.607284 ... 0.0 \n", + "4663 -999.0 0.054629 71.661481 7.607284 ... 0.0 \n", + "7377 -999.0 0.043205 32.146858 10.161979 ... 0.0 \n", + "8943 -999.0 0.042599 29.894213 10.270812 ... 0.0 \n", + "12609 -999.0 0.042599 29.894213 10.338105 ... 0.0 \n", + "... ... ... ... ... ... ... \n", + "101197 -999.0 0.048278 55.382701 12.156761 ... 0.0 \n", + "101198 -999.0 0.048278 55.382701 12.156761 ... 0.0 \n", + "101199 -999.0 0.048278 55.382701 12.156761 ... 0.0 \n", + "101200 -999.0 0.048278 55.382701 12.156761 ... 0.0 \n", + "118597 -999.0 0.064647 93.627878 7.132276 ... 0.0 \n", "\n", " Applicants Assisted Invasive Cover 12 Months \\\n", - "222 0 0 \n", - "223 0 0 \n", - "224 0 0 \n", - "225 0 0 \n", - "226 0 0 \n", + "4662 0 0 \n", + "4663 0 0 \n", + "7377 0 0 \n", + "8943 0 0 \n", + "12609 0 0 \n", "... ... ... \n", - "116353 0 0 \n", - "118390 0 0 \n", - "118983 0 0 \n", - "118984 0 0 \n", - "118985 0 0 \n", + "101197 0 0 \n", + "101198 0 0 \n", + "101199 0 0 \n", + "101200 0 0 \n", + "118597 0 0 \n", "\n", " Invasive Cover 36 Months Project Acreage IS IAE \\\n", - "222 0 0 False \n", - "223 0 0 False \n", - "224 0 0 False \n", - "225 0 0 False \n", - "226 0 0 False \n", + "4662 0 0 False \n", + "4663 0 0 False \n", + "7377 0 0 False \n", + "8943 0 0 False \n", + "12609 0 0 False \n", "... ... ... ... \n", - "116353 0 0 False \n", - "118390 0 0 False \n", - "118983 0 0 False \n", - "118984 0 0 False \n", - "118985 0 0 False \n", + "101197 0 0 False \n", + "101198 0 0 False \n", + "101199 0 0 False \n", + "101200 0 0 False \n", + "118597 0 0 False \n", "\n", " Intermediary Admin Expenses Calc PRIMARY_FUNDING_RECIPIENT_TYPE \\\n", - "222 0 None \n", - "223 0 None \n", - "224 0 None \n", - "225 0 None \n", - "226 0 None \n", + "4662 0 None \n", + "4663 0 None \n", + "7377 0 None \n", + "8943 0 None \n", + "12609 0 None \n", "... ... ... \n", - "116353 0 None \n", - "118390 0 None \n", - "118983 0 None \n", - "118984 0 None \n", - "118985 0 None \n", + "101197 0 None \n", + "101198 0 None \n", + "101199 0 None \n", + "101200 0 None \n", + "118597 0 None \n", "\n", " TRIBAL AFFILIATION PROJECT PARTNERS \n", - "222 None None \n", - "223 None None \n", - "224 None None \n", - "225 None None \n", - "226 None None \n", + "4662 None None \n", + "4663 None None \n", + "7377 None None \n", + "8943 None None \n", + "12609 None None \n", "... ... ... \n", - "116353 None None \n", - "118390 None None \n", - "118983 None None \n", - "118984 None None \n", - "118985 None None \n", + "101197 None None \n", + "101198 None None \n", + "101199 None None \n", + "101200 None None \n", + "118597 None None \n", "\n", "[409 rows x 194 columns]" ] }, - "execution_count": 19, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -657,7 +643,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -667,12 +653,12 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlIAAAHPCAYAAACYzzRzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACiwUlEQVR4nOzdeXiU1fnw8e/sS1YCwyYQCCRhzRBFQcEISlUEAygg7YtoxQVkUaq4l1ZL1WL9VWVTEa17RRRFRdyqgqhU2xBQISyRyCYEsmf2mef9Y5ghk5ksJIHJcn+uy0vzrOfMQHP3nPvcR6UoioIQQgghhDhl6mg3QAghhBCipZJASgghhBCigSSQEkIIIYRoIAmkhBBCCCEaSAIpIYQQQogGkkBKCCGEEKKBJJASQgghhGggCaSEEEIIIRpIAikhhBBCiAaSQEqIFm7Lli2kp6ezZcuWaDcl6OKLL+aee+6JdjOEEOK000a7AUK0ROnp6fW67qWXXmLo0KG1XvP000/Tp08fRo8e3RRNq9Hbb7/NvffeG/xZr9fTtWtXhg8fzq233kqHDh1O6/ubUkM+s4qKCv75z3/y8ccfs3//frxeLz169OCiiy5i+vTpdOrUqcnb+eqrr2Iymbjqqqua/Nmn6sCBA1xyySXBn9VqNZ06dWLAgAHMmTOHfv36RbF1tfvyyy/Ztm0bc+fOjXZThAgjgZQQDbB48eKQn9999102b94cdrx37951PuuZZ57hsssuO+2BVMC8efPo1q0bLpeL//73v7z++ut8+eWXvP/++5hMpiZ5x4YNG1CpVE3yrEhO9TPbv38/119/PYcPH+byyy/nmmuuQafTkZeXx5o1a/j000/56KOPmrydr7/+Ou3atWsWgVTAuHHjyMrKwufzsXfvXl5//XU2btzI6tWrm20w9eWXX/Lqq69KICWaJQmkhGiA8ePHh/ycm5vL5s2bw443R1lZWQwaNAiAyZMnk5iYyAsvvMBnn33GuHHjIt5js9kwm831foder2+StjYFj8fDnDlzOH78OC+99BJDhgwJOT9//nxWrlwZpdadef379w/5c3r22Wcza9YsXn/9dR566KEotizcqf65EyIaJEdKiNPEZrPx6KOPctFFFzFw4EAuu+wyVq1ahaIowWvS09Ox2WysXbuW9PR00tPTg7lFBw8e5M9//jOXXXYZGRkZDB06lHnz5nHgwIEmbeewYcMAgs+95557yMzM5JdffuGmm24iMzOTO++8s959gsg5UmVlZfz1r38N3vub3/yGZ599Fp/PF3Kdz+fjxRdf5Morr2TQoEEMGzaMGTNmsH379jo/s0g+/vhjdu7cycyZM8OCKIDY2Fjmz58fcuzDDz/kqquuCn7ud955J0eOHAm5prCwkHvvvZesrCwGDhzIiBEjmDVrVvBzvPjii9m9ezf/+c9/gu289tprI7bR7XZz3nnnhUy9BlRUVDBo0CD+9re/BY+9/PLLjB07FqvVyrnnnstVV13Fe++9V+NnUJvq3399+r9q1SrS09M5ePBg2PMef/xxBg4cSGlpafBYbm4uM2bM4JxzzsFqtTJt2jT++9//hty3ZMkS0tPT2bNnD3fccQfnnnsuv/vd77jnnnt49dVXAYKfY3p6OoqicPHFFzNr1qywNjidTs455xwWLlzYoM9EiFMhI1JCnAaKojBr1iy2bNnCpEmT6NevH5s2bWLx4sUcOXKE++67D/BPET7wwANkZGQwZcoUAHr06AHA9u3bycnJYezYsXTu3JmDBw/y+uuvM336dD744IMmm4b75ZdfAEhMTAwe83g8wV98d999N0ajsd59isRutzNt2jSOHDnC1KlT6dKlCzk5Ofzf//0fhYWF3H///cFr77//ft5++22ysrKYNGkSXq+X77//ntzcXAYNGlTrZxbJZ599BoSPItYkkEs2aNAg/vCHPwRHsv73v//xzjvvEB8fD8DcuXPZs2cP06ZN46yzzqKoqIjNmzdz+PBhunXrxn333cdf/vIXzGYzM2fOBKgxD02n0zF69Gg++eQTHnzwwZARvU8//RSXy8UVV1wBwOrVq1m0aBGXXXYZ06dPx+l0kpeXR25uLldeeWW9+lhV9e+/Pv0fM2YMjz32GB9++CE33nhjyPM+/PBDhg8fTkJCAgDffPMNN910EwMHDmTOnDmoVCrefvttrrvuOl577TUyMjJC7r/ttttITk5m/vz5KIpC//79OXr0aNjUuUql4sorr2TVqlWUlJSE/Pn997//TUVFBdnZ2af8eQhxyhQhRKM9+OCDSlpaWvDnTz75RElLS1OWL18ect3cuXOV9PR0paCgIHhs8ODByt133x32TLvdHnYsJydHSUtLU9auXRs89u233yppaWnKt99+W2sb33rrLSUtLU35+uuvlePHjyuHDx9WPvjgA+W8885TMjIylF9//VVRFEW5++67lbS0NOXvf/97yP2n0qdRo0aF9GnZsmXK4MGDlZ9//jnk3r///e9Kv379lEOHDimKoijffPONkpaWpvzlL38Ja7/P5wv+d02fWSQTJkxQzjnnnHpd63K5lPPPP18ZN26c4nA4gsc///xzJS0tTXnyyScVRVGU0tJSJS0tTXnuuedqfd7YsWOVadOm1evdmzZtUtLS0pR///vfIcdvuukm5ZJLLgn+PGvWLGXs2LH1emZV+/fvV9LS0pQlS5Yox48fVwoLC5UtW7YoEyZMUNLS0pSPPvqo3v1XFEW55pprlIkTJ4a8Izc3N+TPp8/nUy699FLlhhtuCPn+7Ha7cvHFFyu///3vg8eeeuopJS0tTfnDH/4Q1vbqf78C8vPzlbS0NOW1114LOT5z5kxl1KhRIe8U4nSRqT0hToONGzei0WjCpnJuuOEGFEVh48aNdT7DaDQG/9vtdlNcXEyPHj2Ij4/np59+anDbrr/+es4//3wuuugi5s+fT0xMDEuXLg1btfbb3/62yfq0YcMGzjnnHOLj4ykqKgr+c8EFF+D1evnuu+8A/zScSqVizpw5Yc9oaPJ6RUUFMTEx9br2hx9+4Pjx4/z2t7/FYDAEj48cOZKUlBS++OILwP/d6HQ6/vOf/4RMYTXGsGHDaNeuHevXrw8eKy0t5euvvw6ORgHEx8fz66+/sm3btga9Z8mSJZx//vkMHz6ca6+9ll9++YU777yTSy+9tN79BxgzZgw//vhjcEQL/KNRer0+uAhgx44d7Nu3jyuvvJLi4uLg926z2Tj//PP57rvvwqZ2p06dWu++9OrVC6vVGjKtWVJSwqZNm7jyyitP64IHIQJkak+I0+DgwYN07NiR2NjYkOOBVXyRckuqczgcPPPMM7z99tscOXIkJA+pvLy8wW1buHAhvXr1QqPR0KFDB3r16oVaHfr/qbRaLZ07dw451pg+FRQUkJeXx/nnnx/xfFFREeCfZurYsWPINE1jxcbGsn///npde+jQIcD/C7q6lJSUYF6PXq/nzjvv5G9/+xvDhw/HarUycuRIJkyYgMViaVA7tVotl156Ke+//z4ulwu9Xs/HH3+M2+0OCaRuuukmvv76ayZPnkxycjLDhw9n3LhxnHPOOfV6zzXXXMPll1+OSqUiPj6e1NTU4FRiffsPcPnll/Poo4+yfv16Zs6ciaIobNiwgaysrOCfkX379gFw991319ie8vLy4DQgQLdu3erVj4Dx48fzl7/8hYMHD3LWWWexYcMG3G53i1j4IVoHCaSEaKb+8pe/BHNJBg8eTFxcHCqVKpg70lAZGRnBVXs10ev1YcFVY/h8PoYPHx6WTxPQs2fPJntXdSkpKfz0008cPnyYLl26NNlzr7/+ei6++GI+/fRTvvrqK5588kmeffZZXnzxRfr379+gZ44dO5Y33niDjRs3Mnr0aDZs2EBKSgp9+/YNXtO7d282bNjAF198waZNm/j444957bXXmD17NvPmzavzHcnJyVxwwQUNal9VnTp1YsiQIXz44YfMnDmTrVu3cujQoeDCBCD45/Suu+6qsbRC9VV5VUfC6mPs2LE88sgjvPfee8ycOZN169YxcOBAUlJSTrFHQjSMTO0JcRqcddZZHD16lIqKipDj+fn5wfN1+eijj5gwYQL33HMPl19+OcOHD+ecc85p1GhUYzSmTz169MBms3HBBRdE/Kdr167B644ePUpJSUmTtXvUqFEArFu3rs5rA+34+eefw879/PPPwfMBPXr04IYbbuD555/n/fffx+128/zzzwfPn+rU0rnnnovFYmH9+vUUFRXx7bffhoxGBZjNZq644goeeeQRPv/8c0aOHMnTTz+N0+k8pfdVd6r9HzNmDDt37iQ/P5/169djMpmCnzdA9+7dAf+oYE3fvU6nq7NdtX2OiYmJjBw5kvfee4+DBw/yv//9T0ajxBklgZQQp0FWVhZerze4bDvgn//8JyqViqysrOAxs9lMWVlZ2DM0Gk3YsZdffhmv19v0Da6HU+lTdWPGjCEnJ4dNmzaFnSsrK8Pj8QBw6aWXoigKS5cuDbuu6ihcTZ9ZJJdddhlpaWk8/fTT5OTkhJ2vqKjgH//4BwADBw6kffv2/Otf/8LlcgWv+fLLL9m7dy8jR44E/KsQqwctPXr0ICYmJuQ+k8lU73aCv9r45Zdfzueff866devweDxhgVRxcXHIz3q9nt69e6MoCm63u97viqS+/Q+47LLL0Gg0fPDBB2zYsIGRI0eGjDANHDiQHj168Pzzz1NZWRn2vsCUbl0CK1Rr+izHjx/Pnj17WLx4MRqNhrFjx9bruUI0BZnaE+I0uPjiixk6dCj/+Mc/OHjwIOnp6WzevJnPPvuM6667LmS5/oABA/jmm2944YUX6NixI926dQvm3Lz77rvExsbSp08ftm7dytdff92k+UOnq0/VzZgxg3//+9/MnDmTiRMnMmDAAOx2O7t27eKjjz7is88+IykpiWHDhjF+/HhefvllCgoKuPDCC/H5fPz3v/9l6NChTJs2Daj5M4tEp9OxdOlSfv/73zNt2jQuv/xyzj77bHQ6Hbt37+b9998nPj6e+fPno9PpuPPOO7n33nuZNm0aY8eODS7/P+uss7j++usBf+7P9ddfz+WXX06fPn3QaDR8+umnHDt2LOSX+IABA3j99ddZvnw5ycnJJCUl1ZgnFjBmzBhefvllnnrqKdLS0sKq48+YMYMOHTpw9tln0759e/Lz83nllVe46KKLwvLXTlV9+x/Qvn17hg4dygsvvEBlZWVY0KdWq1m0aBE33XQT48aN46qrrqJTp04cOXKELVu2EBsby9NPP11nuwYMGADAokWLGDFiRFiwdNFFF5GYmBjM0Wrfvn2jPgchToUEUkKcBmq1mhUrVvDUU0+xfv163n77bc466yzuuusubrjhhpBr77nnHhYuXMgTTzyBw+Fg4sSJWK1W7r//ftRqNe+99x5Op5Ozzz6bF154ocY8o+bUp+pMJhMvv/wyzzzzDBs2bOCdd94hNjaWnj17MnfuXOLi4oLXPvLII6Snp7NmzRoWL15MXFwcAwcOJDMzM3hNTZ9ZTZKTk3nnnXf45z//ySeffMJnn32Gz+cjOTmZyZMnh6xEvOqqqzAajaxcuZK///3vmM1mRo8ezYIFC4I1pDp37szYsWP55ptvWLduHRqNhpSUFJ544gkuu+yy4LNmz57NoUOHeO6556isrOS8886rM5A6++yz6dKlC4cPH444rXfNNdfw3nvv8cILL2Cz2ejcuTPXXnstt956a63Pra/69L+qK664gq+//pqYmBguuuiisPNDhw7ljTfeYPny5bzyyivYbDYsFgsZGRlcc8019WrTpZdeyrXXXssHH3zAunXrUBQlJJDS6/VcccUVvPbaazKtJ844ldKYrFUhhIjgoosuYsSIEfz1r3+NdlNEG/Hwww+zZs0aNm/e3GTFaoWoD8mREkI0KbfbTUlJCe3atYt2U0Qb4XQ6WbduHZdddpkEUeKMk6k9IUST2bRpE+vXr8fhcNQ5hSVEYx0/fpyvv/6ajz76iJKSEqZPnx7tJok2SAIpIUSTefbZZ/nll1+YP38+w4cPj3ZzRCu3Z88e7rzzTtq3b88DDzxQY60qIU4nyZESQgghhGggyZESQgghhGggCaSEEEIIIRpIcqROQU5ODoqi1GtLAyGEEEI0D263G5VKFVKPrqk0qxGpgoICFi5cyPjx4+nfvz/jxo2LeN2bb77JZZddxqBBg8jOzubzzz8Pu6a8vJz77ruP8847j8zMTObNm8fRo0cb1T5FURq1WWxtz3W5XKfl2c1NW+lrW+knSF9bo7bST5C+tkaR+nm6fn9DMxuR2r17N19++SVWqxWfzxex0x988AF//OMfmTlzJsOGDWP9+vXMmTOHV199lcGDBwevu/3229mzZw9//vOfMRgMPPHEE9x000289dZbaLUN63ZgJGrQoEENur8mNpuNHTt20KdPn7Cd0FubttLXttJPkL62Rm2lnyB9bY0i9XP79u2n7X3NKpC6+OKLGT16NODfAuKHH34Iu+app55i7Nix3H777QAMGzaMXbt2sWzZMlauXAn4p+C++uorVq1axYgRIwDo1asXV1xxBR9//HHEbReEEEIIIU5Vs5raU6trb87+/fvZt28fY8aMCTl+xRVX8M033wR3K9+4cSPx8fEhdWxSUlLo168fGzdubPqGCyGEEKJNalYjUnXJz88H/KNLVfXu3Ru3283+/fvp3bs3+fn59OrVC5VKFXJdSkpK8BkNpSgKNputUc+ozm63h/y7NWsrfW0r/QTpa2vUVvoJ0tfWKFI/FUUJiwmaSosKpEpLSwHCdiAP/Bw4X1ZWFrKbfEBCQkLE6cJT4Xa72bFjR6OeUZN9+/adluc2R22lr22lnyB9bY3aSj9B+toaVe+nXq8/Le9pUYFUc6DT6ejTp0+TPtNut7Nv3z569uzZ6jfcbCt9bSv9BOlra9RW+gnS19YoUj/37Nlz2t7XogKphIQEwF/awGKxBI+XlZWFnI+Pj+fXX38Nu7+0tDR4TUOpVKrTttrBZDK16pUUVbWVvraVfoL0tTVqK/0E6WtrVLWfp2taD5pZsnldUlJSAMLynPLz89HpdHTv3j143c8//xxWPuHnn38OPkMIIYQQorFaVCDVvXt3evbsyYYNG0KOr1+/nvPPPz84/5mVlUVpaSnffPNN8Jqff/6Zn376iaysrDPaZiGEEEK0Xs1qas9ut/Pll18CcPDgQSoqKoJB03nnnUdSUhJz587lzjvvpEePHgwdOpT169ezbds2XnnlleBzMjMzGTFiBPfddx933303BoOBf/zjH6Snp3PppZdGpW9CCCGEaH2aVSB1/PhxbrvttpBjgZ9feuklhg4dyrhx47Db7axcuZJnn32WXr16sXTp0rD9c5544gkeeeQRFi5ciMfjYcSIETzwwAMNrmouhBBCCFFds4oqunXrRl5eXp3XTZ48mcmTJ9d6TVxcHA8//DAPP/xwUzXvtDheZqe8wo0hMZmjpW7iPHbax7fe1RRCCCFEa9KsAqm25tfjlSx9M5fc3YXBY9ZUC3MmW+ncPiaKLRNCCCFEfbSoZPPW5HiZPSyIAsjdXcjSN3M5Xta6K88KIYQQrYEEUlFSXukKC6ICcncXUl7pOsMtEkIIIcSpkkAqSirtnkadF0IIIUT0SSAVJTGm2tPT6jovhBBCiOiTQCpK4mL0WFMtEc9ZUy3ExZyezRWFEEII0XQkkIqS9vEm5ky2hgVTgVV7UgJBCCGEaP5k/iiKOrePYf7vMimvcFFhdxNr0hEXq5cgSgghhGghZEQqytrHm+iYqMNZUkDHRJ0EUUIIIUQLIoGUEEIIIUQDSSAlhBBCCNFAEkgJIYQQQjSQJJtHWWGJjQqbf9Piogo35Q4bNoebyhPJ52aTjo7tzHU+50hR6H0mo45OSXXfJ4QQQoiGk0Aqig4fq2DZmm3k7i6kR6dY7r9hKEtWbw3bxHj2pAy6dIit13NO5T4hhBBCNI5M7UVJYYktJPi5+7pzWV4tGAL/vnvL1mzjaLEt4nOOFNnCgqiq9x0pinyfEEIIIRpPRqSipMLmprjMzvK7Lsbj9VFW4WJG9gAMOg3f7TjMKx/m4XB5AX9QVGl3Q7vw59gc7lo3P7Y53KezG0IIIUSbJoFUlLjcbh64YWjEKblbJg6iZ6dEFv1zSzCYqmkT40p77YGSbH4shBBCnD4ytRclCbGmGqfknlm7HY0WsrN6B4/XtIlxjElX63tk82MhhBDi9JFAKkrsTk+tU3JGg46+yf65PGuqpcaAyWzU1br5sdlYe6AlhBBCiIaTQCpK6pqSszs8uNy+4Oq7qiUQym0uDhwtJ6+gCLfHy5zJVs7t1ynk/sB9UgJBCCGEOH1k3idK6pqSMxm1xJp1zLtmcEgQVVhiZ8nqHHLyTo5mZaZbmHWVlelj+1Fh8xBj0mKWOlJCCCHEaScjUlFiMmhrnZJzON10SDSFjURVD6IAcvIKWfF2Lu0TTAzs3Z5eXRMkiBJCCCHOAAmkosTt9TB7UkZYMBVYtdch3kScWR9yrrTCGRZEBeTkFVJa4Txt7RVCCCFEOJnaixK9Vse/Pt7JLRMH4fH6qLT7p+QMOg17D5SQnpwUdk/dpQ6kZpQQQghxJsmIVJR0SjLzu9Fp6LSBr0BBBajVKvp2T6RjhKm5uksdyAo9IYQQ4kySEako8gDL3syNuEdeJAmxBjLTLRGn9zLTLSTEGk5XU4UQQggRgYxIRUlD9siLM+uZOyWTzPTQvKrMdAvzpmSG5VQJIYQQ4vSSEakoaegeeZZEEwumDaG0wkml3U2MSUdCrEGCKCGEECIKJJCKksbskRdn1kvgJIQQQjQDMrUXJbJHnhBCCNHySSAVJbJHnhBCCNHySSAVJZ2SzDUW5JQ98oQQQoiWQeaPoqSwxIYWmDPZit3pCSaOmwxaNIpCYYkNS6IEU0IIIURzJiNSUeL1Kuw5WISqyrHAf7t8XhSvEo1mCSGEEOIUSCAVJRqfhz5nJeHx+kKOe70+dCoNKkUCKSGEEKK5k6m9aFFr8SgKK97eHrGyebndie2wF61GQ6XDhdko9aIiKbe5pKaWEEKIqJFAKkq8UGtl8zmTrCxYspHkLglkX5jC/Su+pl+vJOZOycSSaIpOo5uZwhI7S1bnhGyZk5lukc9ICCHEGSNTe1Fid3pqrWxud3n466zhFBwuZd2mfLKzepOTV8iS1TmU21xnuLXNT7nNFRZEAfIZCSGEOKMkkIqS+lQ2f/adH5gzJZPc3YX0TW4H+AOF0grnmWhis1Za4Yy4eTPIZySEEOLMkUAqSupT2Tx3dyHtE4wAuNwnk9LrCsLagroDUfmMhBBCnH4SSEWJyaCttbK5yaClU5IJu8O/555ed/KrqisIawvqDkTlMxJCCHH6SSAVJW6vp9bK5m98ksdDN19ArFnHw7OG0z7BxAM3DGWEtQsJsYYotbr5SIg1kJkeORDNTLfIZySEEOKMkFV7UWLUaHF4Pdw8cSBerxJcvq/RqFj0/BZ+OVJBdlZv5j3+RfCeQJAly/shzqxn7pTMiKv25k3JlM9ICCHEGSGBVJT4gNJyN/cu31zjNdXzfAKlEeb/LpP28bK835JoYsG0IVJHSgghRNRIIBUlNqeHuBgdf5oxlPYJJmxOj3+fPbWKLT8d5s1Pd0fM88ndXUh5pUsCqRPizHoJnIQQQkSNBFJR4nC6SYgzsm5TPjm7QiubXzM6jQdvao/H54t4b6Xdc6aaKYQQQohaSCAVJYlxRpa/tY2tu8IrmwNcOLgrlfbIRSVjTPK1CSGEEM2BrNqLEofLGxZEBeTuLiQp3kj3TvFh56ypFuJiZCpLCCGEaA4kkIqSugpGutw+FCX0mDXVwpzJVsmPEkIIIZoJmSOKkroKRup1apLijfxj/kXYHR5iTToMeg2d28ecoRYKIYQQoi4SSEWJyaAlM80SkmgeYE21UFTmYPeBEl7/KA+AhTOG0qWDBFFCCCFEcyJTe1GiUamYdXVGWHXuwKq9jD7t6dsjCaNeEwyspFq3EEII0bzIiFSU+BSFb7YfZPbVVuwuD5V2DyaDBoNOy9GiSn7ML8LjVZiRPZCzLLG0TzBKvSQhhBCimZFAKkpsTg8vvL+TF97fGfH8whlDAeiQYOLbHw8zONVCV0vsmWyiEEIIIeogU3tRUp9Vey63j0PHKnntozxMRol5hRBCiOZGfjtHSX1W7QVI7SghhBCieZIRqSgJrNqLJJBcXlTm4HipQ2pHCSGEEM1UiwykPvvsMyZPnkxmZiYjRozgtttuY//+/WHXvfnmm1x22WUMGjSI7OxsPv/88yi0NjIN1Lpqr2eXeAb17sC5AzpFrB1VbnNx4Gg5eQVFHDhaTrkt8nYyQgghhDh9WtzU3pYtW5gzZw4TJkxg/vz5lJSU8OSTT3LDDTfw3nvvYTQaAfjggw/44x//yMyZMxk2bBjr169nzpw5vPrqqwwePDi6nQAqnB6eeXsbt089G4c7dNWex+PF7fPh9njR6TRh9xaW2FmyOoecvJM1qDLTLcydkoklUUauhBBCiDOlxQVSH3zwAV27duXhhx9GpVIBkJSUxHXXXccPP/zAkCFDAHjqqacYO3Yst99+OwDDhg1j165dLFu2jJUrV0ar+UGVdjc//lzETY98GvH8PdPPJSnewMsf7uC2qWcHSx+U21xhQRRATl4hS1bnsGDaECmTIIQQQpwhLS6Q8ng8xMTEBIMogLi4OACUE5vT7d+/n3379rFgwYKQe6+44goWL16My+VCr49usBFj0jEiozPXjRuI3emh0u4m1qTDaNDy4vs/oNep0es1JHdJoLjMEQyOSiucYUFUQE5eIcdK7Lg83nrlVJXbXJRWOKm0u4kx6UiINUgQJoQQQpyCFhdIXXXVVbz77ru8+uqrZGdnU1JSwv/93//Rv39/zj77bADy8/MB6NWrV8i9vXv3xu12s3//fnr37t2g9yuKgs1ma1wngFiDluljB7D0zVxyd58MjKypFmZPyuDwsUr0Wg19k9tRYXcH31lRRy7UocJKVq37kdmTMkgw15wCV+FQWLomN2x6cM4kK7FGVY33NZbdbg/5d2vVVvoJ0tfWqK30E6SvrVGkfiqKEjIA05RaXCA1ZMgQli5dyh133MFDDz0EQL9+/XjuuefQaPz5RKWlpQDEx8eH3Bv4OXC+IdxuNzt27Gjw/QFn9ezLsjXbQoIogNzdhSxbs405k60cL7HjcvtIStAE35loSa71uUa9hvTkdhwttlNcpsKgAzx2ykqO4fP5/M9I6sg/P97P1l3h04NL38zluku7U1J0tNF9rM2+fftO6/Obi7bST5C+tkZtpZ8gfW2NqvfzdM1EtbhA6n//+x933XUXU6ZMYeTIkZSUlLB8+XJuvvlmXnvttWCy+emi0+no06dPo59zpMRNcZmdZXeNwutVglN7ao2Kv734HXanB4NBS1eTjkq7m/Zd+xB7ovbUopkXUGFzo9ep2VlQzLqNe3G4vJzbrxNxMXryCopZ/emu4LuqjzQdK/Owddf/IrYrZ1chN44fSL9+7Rvdx0jsdjv79u2jZ8+emEytNzG+rfQTpK+tUVvpJ0hfW6NI/dyzZ89pe1+LC6QWLVrEsGHDuOeee4LHBg8ezMiRI3n33Xe55pprSEhIAKC8vByL5WR5gbKyMoDg+YZQqVSYzeYG3x/gKjzG/TcMZXm1USlrqoX7bxhKeaUTt8dHfKyBe5dvDp67ZeIg/v7K95RUuILHFkwbwoZv9vHby9J58YMdYaNcOXmFLF2TG0xEtxcW1do2u9OD2Rxf6zWNZTKZmuRzbO7aSj9B+toatZV+gvS1Naraz9M1rQctsI7U3r176du3b8ixzp07065dO3755RcAUlJSgJO5UgH5+fnodDq6d+9+Zhpbi4RYU1gQBf6pveVrtpEQa6RDvAmfTwk598za7cyZkhly7P2v8rns/J6UlDvDnheQk1dIaYUTqLuqel3nhRBCCOHX4gKprl278tNPP4UcO3jwIMXFxZx11lkAdO/enZ49e7Jhw4aQ69avX8/5558f9RV74B/1qSnoyd1diN3pwenxUmFzh51rnxA6fZmzqxC1yr8/X20C+/slxBrCCoEGZKZbSIg11LcbQgghRJvW4qb2pk6dysMPP8yiRYu4+OKLKSkpYcWKFbRv354xY8YEr5s7dy533nknPXr0YOjQoaxfv55t27bxyiuvRLH1J1Xa3YzI6Mz14wZiq1b+oLjUhsPpxmTURRwdsjs8Ycdcbl/I/nyRBJ4VZ9Yzd0pmxKKe86ZkSgkEIYQQop5aXCA1ffp09Ho9r7/+Om+99RYxMTEMHjyYJ554gnbt2gWvGzduHHa7nZUrV/Lss8/Sq1cvli5dSmZmZi1PP3M6xOuYPnYASyKUP7j16gy0KiiqcKLRhAdHJmP419bVEoNapSIz3RKxzlT1kSZLookF04ZIHSkhhBCiEVpcIKVSqfjtb3/Lb3/72zqvnTx5MpMnTz4DrWoAtZZl1YIoOJEj9dY2ZmQPICHWwDc/HAo5b021cLzUEXIsM91Ch0QTDpeXyRen4fMR8tyaRprizHoJnIQQQohGaHGBVGtRV44UgMPlpXvHk6vnAqv27lv+VfBYIEgCWLI6hx0/F5Gd1ZvxWSnB6b6iMgcGffiefUIIIYRoHAmkoiSQ+F3beRUqzEYdD88aTqxZx7ESO9/+cIh512QGg6ROSWY6JJo4cLQ8OKVXtYZUwICU9jL6JIQQQjQxCaSipN4lCFQKh47aiDPrQopvBqy4+2KgfoGZEEIIIZpWiyt/0FqYDVqsqZFLEASOG/UaFJ+KR1/6jrmPf0FeQTELpg3BeGKarmoCudSGEkIIIc48CaSiRA3MnpQRFkwFVu3F6DW4PF5UKlhy50U8cMNQCg6Xsm5TPtlZvcMSyKU2lBBCCHHmydRelFQ4PRz4tZi5k60n6kh5iDFpMRm0aAC3z0N5pX86Tq/X8P6mfBbNGs4DKzZz4/gBTLiod1jO08yJGTy9dpvUhhJCCCHOEAmkokSvc9OnR4eIdaRmT8pAp9KQGA/uE6lNW3cXsvKdH5gzJROnyxsSGBWW2ENW7F01sg8GnQajQYsKcLpdHClSsDncwcKfZpOOju1a/15LQgghxOkkgVSUaNQmltZQR2rZmm3MnpSBVq3F5nXhOpFcnru7kOvH9Q/mSAGU21whFcp/3HuM0ed2Z9mJffx6dIrl/huGsmT11ogBW5cOsfVqb7nNJcU7hRBCiGokkIqSuupIBVbmLX7pO+7//dDgOYfTQ7v4k/lOpRXOkKm826eezdI1JwO0u687t8bNkZet2ca8awbXOTIVGPGqPmU4d0omlkRTPXsshBBCtD6SbB4l9SlXUGl307l9LG7vyc2ITQZtcIQq0nPsrtAAzetVag3Y6mpH9RGvgJy8QpaszqHc5qr1fiGEEKI1k0AqSupTriDGpOPWqzNY/NJ3gH867lipA7vTy4Gj5eQVFGHQa5gyOi043Vc9MKo7YAvfALmq6iNeVeXkFVJa4az1fiGEEKI1k6m9KDEZtPz2N2lccm6PE6v2/EngRoMWr9eDVuP/au5/ejNHiuwMTrVw44SBPLBiM/dedx4P//O/HCmyA/4Aa8G0ITz2yvdhAVrdAVvtfwSk0KcQQghRMwmkokTjczBqSPeIq/ZuvTqDkkoHcSY9d/y/czDoNOi0ag4dszHvmkxizTruvu5c/vCPjcDJvfmys3pj0vsLfQaOaTSqkJ+rsqZaGl3IUwp9CiGEaMtkai9KvGpjcGVdVbm7C1n+1jYMWi3b9x5Hq1Hz/Hs/ceviz1n0/BYeWrWF59b9gNmgC1m9l7u7kAsHd8XudoYU+vzbi99xaw2FP2dPyqgz0VwKfQohhBA1kxGpKKlr1Z4CJMUb0WrU/Hq8IuR8Tl4hz6zdRnZW75ANip0uL+nJSQDMmzIYt8eLw+2lpNzJTeMHotWoKK1wkRinRaPRYXO4+WHvMWJNOkxGHZ2SwoOqOLOeuVMyI67ak0KfQggh2joJpKKkPrlHLrePYyV2Hrz5Av707NfBnCjwB1NXjkgJuafqNJtKreKZd7ZHLFng8XhPqa6UJdHEgmlDpI6UEEIIUY1M7UVJfXKP9Do1sWYdK97axl3Tzw27xuU+WRah6jRbbSULvF5fjVOKy9Zs40iRLWJ74sx6unWMIz05iW4d4ySIEkIIIZARqagxGbSMOT+ZCRf1wen2Blft6XUa3vlyDyqgqMxBV0sMEy/qjdvtY+GMoewsKGbdxr04XF70On8cPHRAJ26emEFphZNDhRUY9BpSu7djx89FwcKeAXVNKdocsgpPCCGEqC8JpKJEA0wc2SdsdCgwxWZ3OelmiUWNivxDJby0fmfw/IJpQ/h4yz46JZl5+p6L0WrULH1za8gIVNWSCI5aCnhWV1ddKSGEEEKcJIFUlHih1im2OZOs3L3kc9KTk7hl4sBgIBW4fvakDHyKgter8N7G3cycmBEysmU0aNEAi+deGHYstVsCuw+URmxXXXWlhBBCCHGS/NaMkrqm2OwuDw6Xl9zdhXg8Sth5p9vLgqc28ecbhjL+ovCRrXP7deLmiYNOJIh7sDs9HC91MDi1A/deO4RHXv4+LJiyplowG6UulBBCCFFfEkhFyalUDK+MkLd0tNhOdlZv2ieZWVqtqKdRr+Hy83uGHbemWjjLEkvHdiYWXDuEmx/5LOTc7EkZEUsgCCGEECIyCaSiJMakIzFWz5wpmbRPMGJzeIgxajlW6mDp6pyQVX2RRoniTDr690zC4fKQntyO8VkpuNw+jAYNlnYmPB4fU0an8ftx/Skqd/LzwRJ6dU3A7vRgd3nQadQsuXMkFTYPMSYt5hrqSAkhhBCiZhJIRUmMQcvDt47gmbXbw0aNHr51BDq1Kviz2aAlMVZPSYULgMw0CwW/ltM/JQkVKvIKikMKc1pTLWRfmMJjr3wPwMIZw/hh73Fe/nBn8JpATameXRLORHeFEEKIVknqSEWJV/GFBVHgz396Zu12jhTbsaZamHVVBi+8/yNzpmQC/iDphuyBrFr3A2UVTla+E/kZ6zblk53Vm+ys3rzx6S5ydoXXlFqyOodym+v0dlQIIYRoxSSQihKHy1drsnl8rJ4rL0zhaJGNr7cfpmM7EwtnDCU9uR3HS/z5UTEmfViAVPUZfZPb0Te5XY3vyckrpLTCWe82l9tcHDhaTl5BEQeOltc7CKt63/EyD4lJHev9TiGEEKI5k6m9KKkr2dxm97Do+S3cc6KieaXdzUOrtmBNtTBsYGfyCopJ6Vr7tFzVyucNbUdAYYk94n57c6dkYkk0ndp9aRbmTE7CLClZQgghWjgZkYqSuraIMZ+o5xSoXh5j0mFNtTD9in68/lEeubsLg+dqotep67zGoNfUOcJU25YztU0P1njfrkKWrsmVaUUhhBAtnoxIRYlBp8aaaok47WZNtWDQachMt7D7QAnWVAsmg5b05HaUV7r4bscRAHYWFNf6jJ0FxcH/rumaTVsPBRPVaxphKq1whgVDAYHpwUh77zX0PiGEEKKlkBGpKNGq1dwycRDWVEvIcWuqhVsmDkKrVjFzYgZntY9hzmQrTpeHwakW2sUbmTI6DaNew7qNe8m+MCXiM7IvTGHdxr2s27iXa0ankZle8zUBNY0wnUrNq6a4TwghhGgpZEQqSiqdHhY+8zVzpmRy/bj+2B0eTEYtx0sd3Lf8Kx665QLUakjt0Y5lb+aytVqJhMA+eo+98j3ZWb2DdaS6WmJwe31o1CoWzhiGyailuNzJwJT2XDkihRiTDrNRy6ath8L24YPII0V1TUPWdL6h9wkhhBAthQRSUVJpd5PeI5HkznHYnR5UKtCqVSR3jiO9RyKVdg+JsXqeX/dDSBAFJ/fby87qzepPdwWn5vyjWQNZtW5HxKm8zHR/AHaosCKk7lSktlWVEGsgM90ScZouM91CQqwh4nMaep8QQgjRUkggFSXt43XcMn4Q3gjnbhk/CBSFf32+m2suTSe5awLrNu4NGT3K3V3I+KwUpoxOo29yOxQFkhKM2F1eJlzUG7WasBV286ZkEmfWn/JIUZxZz9wpmRFX7QWeGUmN96VZmDPJKvlRQgghWjwJpKJEq9biURSWRdgPb/akDLRqNVeNSuXXYzbyCoqDU3lVg6kYk45dv4RWNc9MszBldBq3Tc3E7vBQaXcTY9KREGsIBi4NGSmyJJpYMG3IiU2Qw59Zk+r3mQxa3PZSYo2qU/7MhBBCiOZGks2jxAssW7MtYlXyZWu24QWeWbudWLM+pFJ5VW6Pj63VK5bvKmT1Z7vQazV06xhHenIS3TrGhQQ8gZGi6gno9RlhqumZtal6X4d4LSVFR+t1nxBCCNHcyYhUlNidnlorm9udHnLyCrnuiv7B6TujXkvf5HbsLCim4FAp2/Yci3h/fUoLNHSESQghhBAnSSAVJXWXBvAA4HR7I25KfOvVGWzedhCjXhO28q4+zwf/SJEETkIIIUTDydRelNSd8O2PcQ06TcTpv+VvbSPObGDBtCEY9ZpTfr4QQgghGk8CqSgxGbRhhTQDApXMrakWPF4l4jW5uwtpn2CMmDslpQWEEEKIM0MCqSgpt9mZPSkjYlXy2ZMyKLc5mHBRbypq2Y/OqNeSV1BE3+R2wWOZ6RZunjCIskqn7GUnhBBCnGaSIxUlWo2OJ/+Vw+1Tz8buOlmmwKTX8sS//sctV2VQUl7B4eO2Gp9hMmh56JYLMBu0PDb3QpxuL9v2HGPp6q3MuyaTYyV2Cg6XEWvSYTLq6JRkPoM9FEIIIVo/CaSixGTQotVquOmRT8POBab2+nRvx9fbDke835pq4ZsfDrP6011kplu4afwgcnYd5ce9x7htamZYaYXASFeXDrGnrU9CCCFEWyNTe1FS59RepR2bw83l5/esdVNi8Jc7eGbtdtrFGbn9t2fXWp/qSFHNI1xCCCGEODUyIhUlWo2ONZ/lMWeyFbvTP7UXa9JhNGh589OdjBneG6MGKh0eRp3TjRvHD6TS7sbmcLOzoDisynlgy5i66lPZHHWXRRBCCCFE/UggFSWxBi2TLklnaQ1bxJTb7Kg1Ot79ci+zJ2WwOfcAGamdeGjVlhqf6XL76l2fSgghhBCNJ4FUlPiofYuYuZOtoJz8edZVg7C7QoMgo15DdlZv+ia3w+X20bm9GaNeU2ORTqNeQ7s4PQeOlks1cyGEEKIJSCAVJba6puCcHgw6dfBnl8eHVq3m2Xsv4bGXv2f/0QoWTBvCuk35oZsWp1tYOGMYT77xPxbOyMSgM2OrMnWo0ag5etzGX17YgsPlJTPdwtwpmVgSTWek30IIIURrIoFUlNRnCs6nnKxYXmFzc9+KzVhTLSy4dggqxcfSt34IC8Zy8gqJMWp55OYL8KhgSQ1Th4/NG8qCp7aQk1fIktU5LJg2REamhBBCiFMkq/aipD5bxLiqTM+ZjP6YNzDVhzp865iAkef0wKtW1Tp1aNSbgxXRA5scCyGEEOLUSCAVJeY6togxG7QkxBow6jVYUy0cL3UEz+fuLsTurDlpvH2Csc7Ve3anJ6Qien02ORZCCCFEKAmkokQNtdaRUgPb9x5jRvZA/6q9rftDrqst8LE5PPWYOnTjcvuCP8smx0IIIcSpkxypKKlweljyRg53ThuC0+0NrqIz6DQ89vL3zL0mk6R4I+0TTPzz/Z/4/ZUD+Px/h4DA6jsDi2ZeQIXNjV6nZmdBMes27sXh8hJj1KJWq2p9f4xJFxzVkk2OhRBCiIaRQCpKKu1udh8o5ZZHP6vxvMvt49fjlXy9/TBTL00H/EHUwhnDeGbtdnJ2hSaRL5g2hA3f7EOvU6PT+qcEI03vBbag2VlQTGa6hXlTMiXRXAghhGgAmdqLkrqTzXUnAiL/VxSYqpuRPZA3P9sVEkSBP+/p/a/yuez8njz43LdofN5apw41Ph8XZZ7FgmlD6CClD4QQQogGkRGpKDGdSDavbcToeJmDwmI74A+snrpjJCqVimVrciM+M2dXIVdemMKRIjt/WLqZxXOGMHey9UQdKQ8xJi0mgxa1z8MvxxwcK7VzQUbXGttYbnNRWuEMKd4JhB1ritGsSO+SUTIhhBDNnQRSUaLxebhtUgYewOHyBgtmGvQa/5fiUxjcpwNzH/8iGFjd9PCn3DP93FqfG0ggT+6SgNtroPBYBY++9B0Lpg3hX5/khdWUGtS7Q8SApbDEzpLVOeTknbw+M93C5IvTeGjVt8HK6U1R0LOmd0mhUCGEEM2dTO1Fi1qLB/82MfMe/4J7l29m7uNfsGzNNjwAahUOt5f05CRmT8qgpNSGUa9Br6v9K+uYZGbhjKGMsHbl2x8Po1KryM7qzbpN+RFrSj29dhvlNlfI8XKbKyywAX+9qTc+3RWsPxU4tmR1Ttgz6qu2dzXmuUIIIcSZ0GIDqbVr1zJhwgQGDRrE0KFDufHGG3E4TtZa+ve//012djaDBg3isssu46233opia8N5qX2vPS9QafMwZ7KVVe9u55WPd5Od1ZudBcU11p/KTLMACkVlDqx9OjA8oysatYq+ye1qrCkVqRhnaYUzLLCp2r6q9adqekZ91fYuKRQqhBCiuWuRU3srVqxg5cqVzJw5k8GDB1NcXMw333yD1+ufbvr++++ZM2cOkyZN4r777uPbb7/l/vvvJyYmhssvvzzKrferT8FMs8k/nRcwPiuFxS9/z4JpQ4LXBVhTLYwbkcJ9yzfTr1cS/Xom8eqGnXTrFEd6j3Zh76iqes2pumpQVa0/Vd976vvupnquEEIIcSa0uEAqPz+fpUuXsnz5ci666KLg8csuuyz43ytWrCAjI4OHHnoIgGHDhrF//36eeuqpZhNIVdrdjDq7K9Mu749XUULypIwGDcVlDkwGHSvvG81z72xjy09Hcbl9OFxeNnyzjxnZA/D5FJwuL5UONzsLinnsle9xuLzk5BXy7Ds/cN3Yfjz03LecP7BLrW2pvoKwrhWFkaYXG1rQsz6rF4UQQojmqsUFUm+//TbdunULCaKqcrlcbNmyhTvvvDPk+BVXXMH777/PgQMH6Nat25loaq0s8Tr+32X9cHp8PLN2e9jo0i0TB3Hf8q9I7pLA7EkZwHa6WmJ4+u6L8fp8KIoKk0FDnEmHweCvGXXBoC4cK3WwdHUOubsLUZR+XDG8F//Z8SuD0yxsrVIywajXkJ3Vm4w+HaiwuTlwtDy4Ui4h1kBmuiXilJs11cLOguKQY40p6Fnbu6RQqBBCiOauxQVSubm5pKWlsXz5cl5++WXKy8sZOHAg9957L1arlV9++QW3201KSkrIfb17+xOk8/PzGxVIKYqCzWZrVB8AUGs5WmLjjU92R8yTembtduZMyWTR81tYtmYbcyZbKfi1nI++2cd14/rz2kc7uXZMP3YXFBFr1rP4Zf9olDXVwqJZw3lgxWZKyp0Mz+jK/Ss2M3dKJorif7ZRr2HBtCGs25TP6k93Bd+bmW5hziQrsUYVcyZZWbomt8ZVe9Xv0eDBZqt5/78Au90e8m8N1PiuU3luc1O9n62Z9LX1aSv9BOlraxSpn4qioFLVvuNHQ6kURVFOy5NPk8svv5wjR47QsWNH5s+fj8lk4umnn2bXrl18/PHH7Nu3j9/97ne88cYbDB48OHhfUVER559/Pn//+9+58sorG/Tu7du343I1zSqypC598PoU5v/jyxqv+cf8i4Lnn7pjJC+8/yO3TMxAq1ZRXmHjnx/uZubEQTzzznbSerQLBkXWVAtXXpiCWgXvbcontUc71m3cS3ZWb/omtyMxzsDL63eEFfUEf8L6dZd2p6zkGPGJHUBjwubyYtZrUPn8yfyK2hg8htdOWckxfL7wvKn6UqvVYe9qiucKIYQQAXq9nkGDBjX5c1vciFRgROjJJ5+kb9++AFitVi6++GJeeeUVRowYcVrfr9Pp6NOnT6Ofk3/Yhq+OENbuODkSU2l3k5NXyIq3tjHC2hVrage6WUy4vT627iok+8KTI3C5uwv5/bj+fL39cLBI52qXl9Wf7sKaauH6sf0jBlHgL+p54/iBpHdqH+GsOeKxLhGvraFPdjv79u2jZ8+emEy11Yg6tec2N/XvZ8snfW192ko/QfraGkXq5549e07b+1pcIBUfH09iYmIwiAJITEykf//+7Nmzh7FjxwJQXl4ecl9ZWRkACQkJjXq/SqXCbI4UUJyaGJMbbx2RlMl48usJJF3n7i5kfFZKcLrveIl/lKj6Sjq708O6jXuD9z46ewS2E0npR4trn5q0Oz2YzfGn3KdTYTKZmuRzbO7aSj9B+toatZV+gvS1Naraz9M1rQctsI5UbaNBTqeTHj16oNPpyM/PDzkX+Ll67lS0mAxa7E53jTWhrKkWjpc6gv9tMpwMqlxuX0iJBAhfSWc0aIPVx+Nj9CTE6nlo1RZWf7oruH9fTWSlnBBCCFE/LS6QGjVqFCUlJezYsSN4rLi4mB9//JEBAwag1+sZOnQoH330Uch969evp3fv3s1ixR6AyuehY4KJWyYOirix8C0TB7F0dU5wk+Hn3tkWPN+5vZnEWD2VdjdGnYbBaaEr6aoGYYGVb4HVcUDtRT1lpZwQQghRby1uam/06NEMGjSIefPmMX/+fAwGA88++yx6vZ7f/e53AMyaNYvp06fz5z//mTFjxrBlyxbef/99/vGPf0S59Scpai3f/3SI8/p3ZfakDBwuLza7f4TJdKKO1EO3XIDJoA3WkQJ/oKNRq3j41uH4FIWSCjs3jR/I/Ss2A/4g6qYJA3lgxWYy0y3Mm5IZ3Etv7pRMlqzOYd3GvRGLela/XgghhBC1a3GBlFqt5tlnn+WRRx5h4cKFuN1uhgwZwquvvorF4h9lGTJkCEuWLOGJJ55gzZo1dO3alUWLFjFmzJgot74Kn4dz+nXhqTdzw2pIzZ6UQfs4I0vf3hZSEsCa6i8/sHt/CT5FwZpqQa2CFz/4ib/ccgEKYNRrqbA7eWT2iGBdqABLookF04ZQWuHE5nAze1IGbo8Pu9NDjEkXdr0QQgghatfiAimApKQkHnvssVqvueSSS7jkkkvOUIsaQK1lWbUgCk7utTd3spWbxg/i1+OVuNw+9Do1OwuKeWjVt9x1rX80yeZwo1Kp2PLjEf7f5f04XuqgSwczqd2TanxtnFkvwZIQQgjRRFpkINUa1LXXns3pwevz8dCqLWHnAyv0Ku0e2icYAaiwufnbS9/x4E3nU1x2jFiTDpNRR6ek1r8yQwghhIgWCaSipD6b9Rr0Gox6TXD1XUBghV6sWYdPUXhs7oX+f8+7EK1GTXmlC1TgcLg5crySTu1jKLe5KK1wUml3B6fxgJBjJqMWh9NDhc0tU31CCCFEPUggFSUxJh2dkkzcNf1cdBp1MJhxe30sfuk7Ykw6nn/vR7Kzeods4xJYkadSQZf2MTz9dvg+fdkXprDw2W/o1yuJa0anoSgKz637gS0/HgH8++wtnDGMNz/bFVKYM3BvYPPjzHQLc6dkYklsvYXbhBBCiMaQQCpKYg1aHrr5Apa/tS0sEHro5gvQqVVhFcutqRauGZ2GWgUJcQbKKx0Rc6yAkwGYAhdkdCW5S0IwkMrO6s0bn+6q896cvEKWrM5hwbQhMjIlhBBCRNCoOlLr16/H6XQ2VVvaFK+ihAVR4A9mlr+1De+JLRBjjDr+NnsET/7hImZeNYj2CQacLi/3LvsKnS5y4czc3YX0TW4H+Ld8aZ9gDP4M0De5Xa35WVWvzckrpLRCvmMhhBAikkaNSP3hD38gNjaWSy+9lOzsbIYNG9ZU7Wr17C5vrcGM/UReVKXDzUOrtvDkH/wbGFfNl6q6F191VbeMqb59TPWfa7sX6s7nEkIIIdqqRgVSr732Gu+99x4bNmxg7dq1dOrUiXHjxpGdnU1aWlpTtbFVqjvZ3ENmuoWfD5ViTbVQWOIISzqvuhdfdVW3jKm+fUz1n2u7F2TLGCGEEKImjQqkzj77bM4++2zuv/9+Nm3axHvvvcerr77KqlWrSEtLY/z48YwbN46OHTs2VXtbjbqCkxiTluvHDsDudDHcehb3LN0Uct6aasHujByMWVNPbhmTmeZPTi8udzBldBp9k9uh0aj568wLyN1zjHUb94YEaFXvBdkyRgghhKhNkySba7VaRo0axahRo6isrOSTTz5h7dq1PPbYYzz++OOcd955TJgwgTFjxqDXS9Iy+DcttqZaIk7vBTYpPni0gqQEIw6Xm3nXZOLxKnRINKIo4PH4MOg1zJ5kZdW6H4LB0C0T+jOkf1fsTg+ZaRZiTTqMBi0anwevWovd6aHS7ibWpOPic7ph7d2Bv7ywBYfLG7JqL9COW6+2SqK5EEIIUYMmX7W3e/dutm/fzq5du1AUhZSUFEpKSrj77rv5v//7Px5//HGGDBnS1K9tcUrK7cy6OoMVEVbt3Xp1BiXl/qk8/xSgwuKXv2fBtCG8+MGOsP3x/jH/IsornHSI0+FRaVhaw7Yz//3pEM+881PY8Sf/kMWxUid2pxeNWsXtU88OVlJ/7t3t3Db1bAmmhBBCiAiaJJD6+eefee+993j//ffZv38/7dq1Y9y4cUyYMIEBAwYAsH37du6//37+/Oc/8/777zfFa1s0g17HN7kHmDvZis3podLuIcakxWTQcqjQPxJl1HswGrQUlznIzurNuk35YSNYOXmFPP32dtKT2/Gb83rUuu3MnMnWkECq6vH7V3xdY1tLK5wSSAkhhBARNCqQevHFF3nvvff48ccf0ev1jBo1ivvuu48LL7wQjUYTcu2gQYP4/e9/z/3339+oBrcWsQYtIwZ342ixPaymU2a6hVuvysBoUHO81IHH46NvcruQwpxV5e4uZHxWSp3bztid4av8ajpelazaE0IIISJrVB2pRx55BL1ez4MPPshXX33FE088wciRI8OCqICBAwdy6623NuaVrYYX2Lr7WMTCmDl5hSx/exuJsSaWrs5Br1PXq2RBfbadiXy89kBKVu0JIYQQkTVqROqTTz6he/fu9b4+NTWV1NTUxryy1bA7/RsO1zSClJNXiMPloVfXBHYWFIcUyYxEr1PXYyVg5PMxJi2Z6RZy8sLbIqv2hBBCiJo1akTq/vvv55tvvqnx/Lfffsv06dMb84pWq9LurnOUyen2cv24AazbuJedBcVYUy0RrwuULAisBKzpGpMhPG4OHJ+RPZA5k60Y9SdHEzPTLcybkin5UUIIIUQNGjUi9Z///IfJkyfXeL6oqIjvvvuuMa9otWJMOhJiFJ67b/SJZHP/psVGvQatSsXjr/8Pk0GLTqciPTmJdRv3smCaf7Vj9XyqK0ek8OHX+9AoCrMnZbBsTfhKwNmTMvj+p0MhbQgc/8uqb/nlSAWZ6Rae/MNIKuwuzEYdCbEGCaKEEEKIWjR61Z5KparxXEFBATExMY19RasUa9CCoqFqrXIVJz5PReEPUzPxKj68ior05HbMyB6Aw+lh1lWDcLq9OF1eKuxuisocdO4Qwy0TB+H22dGhZc5ka7BeVIxJh8mgReNzM6R/Vwb07hhcIajRqFj0/BZ+OVIBnFgBuHabbFIshBBC1NMpB1Jr165l7dq1wZ9XrFjB6tWrw64rLy8nLy+PrKysxrWwlVNObE4cSoVP8aFWa7A73Jyd3hGVChJiDXh8XmKMOorLbTy0yj/al5luYcKFKfy4z59LlRQPL37wEzm7Ihf7vHBwV44W2yOuAgxsUlyfQKrc5qK0whkM2GQESwghRFtzyoGU3W6nuPjkFiKVlZWo1eGpVmazmalTpzJ79uzGtbAV80CN03BqVHh9Pm77vy+D5zLTLdyYPZAX3v+R68b255arBvDi+zvJyStk+pj+5BUUs/rTXSycMTRiEAUnSyUkxRtrbFd9yh0UlthZsjonJEE9M93C3CmZWBJN9ei9EEII0fKdciD1u9/9jt/97ncAXHzxxdx///1ccsklTd6w1s5LeBAFJ4tkzrpqENXHqnLyCnnu3R8YOyKFZWu2MXtSBh0TY3nsle8pKnOQV1DElNFpJMYZuWf6ucHq5NX306sryb2u1X/lNldYEBVo35LVOTI1KIQQos1ocI6Uw+Fg9OjRteZIiZrVVTzT5fGhKEpYQJSzq5DpY/uTu7sQh8vLuk35ZGf1RqNWsWDaENZtyg+ZsrOmWlgwbQiPvfJ9MJjS69TodZFrfdWn3EFphTNiqQQ4talBIYQQoqVrcPkDo9HI6tWrOX78eFO2p82oT/FMh9PLE//6Hw+t2kJeQTELpg3BqNdgd3hOXOPh1+MVZPTpgMenRNxCJnd3YTDYAn9gVVTmoEuHGDLTQ0sl1LfcQUMLfwohhBCtTaNW7Q0YMIBduyJvWyJqV5/imSoVZGf1ZnWV6ufZWb0xGbUnrtFy/4xzsdt9xBh1tY5wjc9KwZpq4ZrRaXTpEEOHRBMLpg1pULJ4Qwt/CiGEEK1Nowpy3nfffaxfv54333wTj6f2bUZEKHMdxTP1WjVGvZYLBnVhyug0jHoNubsLyejTgeOlDqypFox6DSadHpUKjhTZan+fUcfsSRn07BpPhxPJ4HFmPd06xpGenES3jnH1no5LiDWEjWYFSCV0IYQQbUmjRqTuueceVCoVCxcuZNGiRXTq1AmDIfSXqEqlYt26dY1qZGukhlqLZ2qBwlI7ZqOOC61d+M2Q7tjdXirtbjq2MzFnspX//HiQ8wachVGvJSFWYckdIzEatKh9Ho6WuYk16YgxaMHnxavWYHd6OF5aRqxJh8moo1OSuUFtjzPrmTslM+KqPamELoQQoi1pVCCVmJhIYmIivXr1aqr2tBk2twedRsXsSRk4XN5gkUyTQYvap2D3ebl72WbAH1zdMnEQC5/5mpIKFwC/H9uX8zPOYumbuREDsc+/30PunuM8cvMFeFRqltVwXZcOsQ1qv6URU4NCCCFEa9GoQOrll19uqna0OVq1muVvbY+Y12RNtTDrqkHBn3N3F/LM2u3MmZLJoue3AHCBtVtYEBW4dtmabcyZbOWyC1x41aqwIKrqdXOnDG7UyJQETkIIIdqyRuVIiYZzeXx1lj+ofqx9wskimnWVT7A7Peg06jqvszlkhZ0QQgjRUI0OpCoqKnj22WeZMWMGEyZMYNu2bQCUlJTwwgsvUFBQ0OhGtkZ1lQiw2cOT9wNlD+pzf6XdHfyn9utkkYAQQgjRUI2a2vv111+ZNm0av/76K8nJyeTn51NZWQn486f+9a9/cfDgQR544IEmaWxrUleJAKMxvGBmoOxBfe6vbwmCGFOj960OI3vwCSGEaCsa9Vt08eLFVFZW8s4775CUlMQFF1wQcn706NF88cUXjXlFqxUof1BTjtSxEkfYseOlJ4+Z6rjfZNBSZnMRb9bXep3Z2LQ1n2QPPiGEEG1Jo6b2Nm/ezLXXXkufPn0ibhXTvXt3Dh8+3JhXtFpqn53ZkzLCakkFVugtXZ1T67Gvcw/UeP/sSRm8+elOFr/0HRqfUut1m3J+odzmapI+1bUHX1O+58DRcvIKijhwtLzJniuEEEKcqkaNSDkcDpKSkmo8H5jmExGoTWgVhTmTrdidnuA0mNmgRaUoPHTLBdjsHswmLR6vD6fHw0O3XBC8zu318dE3P4fdbzpRR2rUkJ6Mu7APqFXV3nOyzMJrG37i8/8dYtigs5pk6q0+e/AlxUbe46++ZMRLCCFEc9KoQKp379589913TJ06NeL5Tz/9lP79+zfmFa2WF1harRhngDXVwpzJVlT4p/CWRChfELgOwKv4+NtL35HcJYH05HYhmxYHrgscf+oPIzlWYmdnQTHf/HAEaLq98eqTAN+YQKquEa8F04ZILpYQQogzqlFTe9dddx3r16/n2WefpaKiAgBFUSgoKGDBggVs3bqV66+/vina2erUp3xBmc1FcbmTObVMzb3xSR4GrZaHZw+n4HApfZPbRXxe3+R2WFMtHC2xh22C3FR74zUkAb7c5uLwsQryD5by08/H+eXXmqfq6hrxKi5zyJSfEEKIM6pRI1Ljx4/n0KFDPPnkkzzxxBMA3HjjjSiKglqtZv78+YwePbop2tnq1Gf0xutVeO3fO7ll4qCwKTyNRsWi57fwy5EKCkscXPObVOZMycTl9kV8nqLATRMG8sAKf7X0QBB34/iBTbY3XmAPvkjBzsk9+E6WWygssfPrsUreqLIpc+DaSFN15ZW1f2a/Ftn4y6otdT5HCCGEaCqNXvs+a9Ysxo8fz8cff0xBQQE+n48ePXpw6aWX0r1796ZoY6sUY9Jx6XndmDy6bzBAijXpMBq0vPnpTmJMOrw+hZy8QuwOL6999CM3ThiIWa9FrYJvfjzIL0f8o4C5uwu5flx/TAYdxWWOiO9LijfywIrNwS1mAvfdkN0fl8fLgaPljS5XUJ89+Gw2fyDl9MD/dh5h09ZDYSNzkabqym0uXB5vre+vvtxBpvyEEEKcbk1SRKhr164yhXeKYg1aJl2SXuNeeVqViqITQU9RmYNeZyWwfM02rvlNKomxRob27wrAynd3ACeLde4sKA57lzXVwjc/HA4Joox6DRNH9UGFir0HSlGduHfdxr3065XU4JGc+u7BV27zkBRvrHF6M5CcHrivtMLJtj3Hai3lEKnv1Z8jhBBCNKVG5UhNmjSJf/7zn/z6669N1Z42wwcsi5BsHtgDzwdUnMjxUamgb3I7cncXYjLoeGbtdgpL7Zw34KzgfSajllizjp8PloY8LzPdQvaFKazbuDd4zKjXsGDaEHb8XMS8x7/gL6u2hORN7fi5qFHlCuLMerp1jCM9OYluHeMiBjE2h7vGaciAqtOflXY36zbuJfvClLB8scy08D7W9BwhhBCiKTVqREqj0fDoo4+yePFirFYrY8eO5bLLLsNisdR9cxtnq2sPPKeHn/YVkZlmYfeBEpI7xQPgcHooOFyK0aDD7vSPQllTLdidbjweH1dc0JNrx/TD5vRgMmgx6NTs+iV0pCY7qzfrNuVHDOIArhrVBwU4Xupg/5FyTHotRoOG2CbcpNhs1FFuqz3AqZqcHmPS4XB5eeyV78nO6s34rBRcbh96nZrEOAP3Ld+MwxV56q+mJPeySider4JPUXA4vcSapQq7EEKIU9OoQOqNN97g0KFDrF+/ng8//JBFixbxyCOPcM455zB27Fh+85vf1Fpnqi2rzx54hwrLmXxJGj/kH0Ov8w8emgxaFs0ajs3mwqGcLNapVsPR43be3bSXrbtCpwqvGZ3Gn28cxp+f+xaHy0vfCCUSAvIKirh5wkBWvvsDr3+UF/aczh1imiR5O86sZcc+R41TdSeT0/2qJrJXb/ucyVb69UqqI8n9pMISO8+8nculQ3uGBZSSoC6EEOJUNHrT4q5du3LjjTfy1ltv8cknnzBv3jzKysr405/+RFZWFjNmzGiKdrY6dZcK0HLd2AE8tOpb+vdqz86CYv/WMaUOVr7zAwlxRmJMOmZPykCvUaFVqVn75Z5gEGXUa5gyOo3xWSnYnR50Og3/mH8R911/HkZ9zfFzdlZvnnv3h5BgDPyjVW98uov/7TzSJGUFDFo4u28nrhmdFj5VVyU5PSCQyJ6ZHn7tOX071Xiu+nMCtaiSuyREHJVr6irsQgghWrcm3bG2e/fu3HLLLdx88828+eab/O1vf+Prr79uyle0GvXZK8/p8uJweVGrVPx8sDRYvqCkwoXT7cVk0HKwsBKDTo1RryWnShC1YNoQ1m3KDxm9yUy3cGP2QFyemnOTahutyt1dyPislCZL3rYkmjDqNcyelIHd6cXh8hBr0tEu3hjx+XUlstcnyT1Qi+rKESk19lMS1IUQQtRXkwZSW7du5cMPP2TDhg0cPXoUs9nMuHHjmvIVrYbD7d9rr3rCeWDVnsNtx+7wL+iPj9ExfWx/KmxuHrz5Agw6NVqNGrXPgxqINeuoqJJvVFMOVE5eIc++8wNXnkjYjhTEKUrt7Xa5fU2avB13inlXtV1fn2cF2n4qie5CCCFETRodSP3www+sX7+eDRs2cPjwYYxGIyNHjuSKK67goosuQq+X/1cfieLTkbvr14h75W3NO0x6Twsmo38UaWdBMUvfzA3eG8hXsiQaiY9RYdBpwHyyilJdo0o3jh/AzKsG8ew728PqPXVMMtfabr1O3WSV0KMh0PZAzlld1wkhhBC1aVQgNXr0aA4ePIhOpyMrK4s777yTUaNGYTJJom5dYg1arGmda6wjpVG8uBQ1ky9O46FV34bcG7h+hLUrg9MsOF1edBoVg9MsbN1VWOdoi9PlpWdyQsSpMKDG6uTWVAtFZQ769my5CwgCSeuBnLP6JLoLIYQQNWlUINWnTx/mzp3LJZdcQmxsbFO1qU3wUnsdqTmTrZSXOVj88ncRl/UH8pXsTg8utxev10f2hSmoqP9oS01TYZGqkwdGwbp0iGnRuUOBpPVn3s4l+8IUgLBVe9UT1IUQQoiaNCqQevrpp5uqHW1OfTYt1us1XDqsZ43TdIF8pfgYPV6fwp9WfsvEUX3o2M5cjz3vahZI6i4uc1Bhd2PUazE1cR2paLIkmrht6tmUVTq5ecJAqSMlhBCiwZok2fw///kPX3zxBYcOHQL8JRFGjhzJeeed1xSPb5Xqs2lxjEnHsIGdWbdxb8RRqUC+kk6rxqRVk56cxOsf5bH28z0smDYEn6/hoy2nmgTe0rT2/gkhhDgzGhVIuVwu7rjjDj799FMURSE+3l99u6ysjBdeeIHf/OY3PP744+h0krhbXd11pHQsfOZrenX15zI99sr3IcGUNdXC8VIHPTrFUVrpJK1HUsgqwEAF8MmXpKLTqnE4PXRKisGg14S9q9zmqrNsgBBCCCHCNSqQWrZsGZ988gk33HADN9xwAx06dADg+PHjPP/886xatYply5Zx++23N0VbWxVzPepI3X/dEBYs+xqf4i9pEJjiq7pqT41/uxUAlaIwI3sAcHJEy+31sfil7+jcPpZZVw0Kq49UWGIPy4eS6t5CCCFE/TQqkHrvvfeYOHEid911V8jx9u3bs2DBAo4fP866deskkIpADbXWkdr4v/1knd0d8E/PzcgewDnpHTEYNBh1mmAdqQoXdDgR8BSVO6mwe8JqSFlT/Zv6llY40WhOJqIHqnxXz6UKVPdeMG2IjEwJIYQQtWhUIFVYWEhGRkaN5zMyMvjggw8a84pWq8LpIVZHSB2pWJMOo0HLf3cc4uUNeZw7oEvw+kOFlTz60ncsnDGUh1ZtwZpqYc4kK3aHIxjsmIw6/rTy27BNfXcWFPPYK9+zeO6F6LQnA6lAle9IpLq3EEIIUbdGBVKdO3fmP//5D7/97W8jnv/uu+/o3LlzY17ReqnceFRGltVQR2rU2SVU2j3B453am7ln+rm0izMye5KV/ilJvLj+R6Zd3o9ym4s4sx6zUUd6clLEVX7WVAtGvX/lXUB9Et6FEEIIUbNGbVo8YcIEPvzwQxYuXEh+fj5erxefz0d+fj5/+tOf2LBhAxMnTmyqtrYqJr2p1jpSv7u8PzEmf5xrTbXwzfbDPPrSd8x/4ku+3naIwiI73+84yqFjlTz2yvcUltjplGRm9qSMsE2AA8GZTqsJGWGqT8J7TcptLg4cLSevoIgDR8tlk18hhBBtUqNGpGbOnMn+/ftZvXo1b775Jmq1Py7z+XwoisLEiROZOXNmkzS0talPHSmTQUtmuoVxw1N47JXvg+dzdhWiABNH9cHl9oXkNHXpEMvcKYOxOdxU2j3EmLSYDFpiTLqwabpAle9TrTclCepCCCGEX4MCKafTyWeffcaBAwc4++yzufrqq8nJyeHgwYMAnHXWWWRlZdG3b98mbWxrUve0mj+H6qbxg/jDE1+G1ZHauquQ667oT2mFE/DnNBWV+fOlOtWxX15AoMp3pKAoUG+qemkEk1HLM2/nRkxQf+btXGZNsmJ3eKSUghBCiDbhlAOp48ePM3XqVA4cOICiKKhUKoxGI0uXLuXmm28+HW2sVWVlJWPGjOHIkSOsWbOGQYMGBc+9+eabPPfccxw6dIhevXoxf/58Ro0adcbbGEnd02pavGotZWX2iMU4AYrKHChVfq60nXpOU6CKeaQ6UjWNPI0bnkLu7mMh7TLqNVw6tCdPvp5Dzi4ZqRJCCNE2nHKO1PLlyzl48CDXX389zzzzDPfeey8Gg4E//elPp6N99WqP1xseaHzwwQf88Y9/ZMyYMaxcuZLBgwczZ84ctm7deuYbGYHpRB2pSAJ1pOxOD/G1bOeiUoFadfJnoyG82GZ9xJn1dOsYR3pyEt06xgVHomoqjbBuUz7ZWb1Djmdn9WbdpvyQICpw/ZLVOZJDJYQQolU65UDqq6++Yvz48dx9991cdNFFTJ8+nYULF3Lw4EHy8/NPRxtrtHfvXl577TXmzp0bdu6pp55i7Nix3H777QwbNoyHHnqIQYMGsWzZsjPaxprYXfZaE8PtTjuVdg8uT+TRKGuqhZ0FxbjcvuDPBl3DAqlIaiuNkLu7kL7J7UKO9U1uV2POV6CUghBCCNHanPLU3uHDhznnnHNCjp1zzjkoisLx48dJSUlpssbVZdGiRUydOpVevXqFHN+/fz/79u1jwYIFIcevuOIKFi9ejMvlQq+Pct6OouP1j3aE1JGKMekwGbS8tuEnsi9KJcakwuH0hCWEZ6ZZuCF7IMdL7MTH6hnavxPjL+qD0dgkWycCdedwBQK4mn4+1ecJIYQQLdEp/+Z1uVwYDKHTTYGgxOPxRLrltNiwYQO7du1iyZIl/PjjjyHnAiNj1QOs3r1743a72b9/P717h05NnWlmg5aicjc3Pfxp2DlrqgWH002HRDNqFdx6tRWH04PN4cHt9bFtzzEWPLURh8tLZrqFWyZmoFOraB/fdHlIdeVwxZp1tf58qs8TQgghWqIGDWEcPHgwJHgpLy8HoKCgILhxcVUDBgxoYPMis9vtPProo8yfP5/Y2Niw86WlpQBhbQn8HDjfEIqiYLPZGnx/QG1bxNwycRAGjQqVomDWa3nyzVzSk9uRV1AcNn2Wk1fIM2u3MX/q4CZpV0CMUVNraYSzLDEsWzAKm8OD2ajFbNTWen2MUYPNZsNutwME/91atZV+gvS1NWor/QTpa2sUqZ+BxXGnQ4MCqSeffJInn3wy7PiDDz4Y8nOg4Tt27GhY62qwYsUK2rdvz9VXX92kz60Pt9vdJP3p1rMv+fuPM3eyFZvTE6z5ZNRr0arA5fOgUWtR4c9JGp+VErFiOZwofVBio6SwoNHtClCr1dw4Lp3nFEJX4aVZuHFcOvt/zsPn80/nVdbj+n17T14PsG/fviZra3PWVvoJ0tfWqK30E6SvrVH1fp6ulJ5TDqQeeeSR09GOejt48CDPP/88y5YtC46EBUZibDYblZWVJCQkAP6RMovlZDJ3WVkZQPB8Q+h0Ovr06dPg+wOOlLh59JWtNZ5/6o6RwMmp0rpykNw+Nf369Qs55vRAuc2DzeEmxqgj1qzFcIrf+LxrBlNhc1Pp8BBj1BJr1mHWQ4fE9IjXz//t4BPv9I9UxZ14Z+B6u93Ovn376NmzJyZTw6cim6Jvp1Nt/WzubT9VTfWdRlt9vpeW0NeG/Pmqfo9RBwd+2UuPHj2abT+bSkv4TptKc+5rU/7vYqR+7tmzpwlbG+qUmxntLV8OHDiA2+2OWLNq+vTpWK1WHn/8ccCfK1U1+T0/Px+dTkf37t0b/H6VSoXZXL+Cl7WpPHys9vMnkrPVJ4Yi9braF1ga9BryD9uINen8+UgqVaOrjzekgrkZaBc+uxvGZDI1+HNsSZXVq/ezJbX9VDXmO422U/1emmtfG/Lnq6Z7bhyX3mz7eTpIX6PndP3vYtV+nq5pPWjkXnvR0K9fP1566aWQf+69917AP7X4pz/9ie7du9OzZ082bNgQcu/69es5//zzo79ij/rtcxeoJA6ws6C4xrpTmekWvso9xL3LN7NgySZ+LapkyRuRa0DVt6ZTbXWkolkXqrm2qz5acttbs9byvTSkH7Xd89x7eTjP3Poh0Ua1hr9/LW5CIT4+nqFDh0Y8N2DAgGBi+9y5c7nzzjvp0aMHQ4cOZf369Wzbto1XXnnlTDa3RiaDlluvGsDZfbucyJFyE2vSYTRoOV5sQ6UCo16L80T18HUb97Jg2hCAsOT0WVdlcNeSTfz2snTO69cZh8vDlRemkNqjHes27g2pQB6o6VTXti211ZGq7zNOh+barvpoyW1vzVrL99KQftR6z65Cym2eeo0wC9FQreHvX4sLpOpr3Lhx2O12Vq5cybPPPkuvXr1YunQpmZmZ0W4aABqfncHpnVnyZm5YYDR7Uob/i1EUfIoPa6qF3N2FPPbK92Rn9WbyJamo1SoUn0LHJDPvfLGLuVMyee+rfF7/KC/kWQumDeGxV74PCaaq13Sqvp9eQqwBm6OuvQCjUxeq7j0Km2+9qpbc9tastXwvDelHXffYHDIkJU6v1vD3r1UEUkOHDiUvLy/s+OTJk5k8eXIUWlQ3r9rEsmpBFPhHm5at2cacyVZUgEGnYc5kK06Xl6IyB+3ijVTYXHRIMKJWqXhlw09YkmJZtyk/4rPAv31L1RV/VacVa5qbnjkxA6NeU+M+f9GqC1WfKdHmqiW3vTVrLd9LQ/pR1z3mJizyK0QkreHvX4vLkWot7E5PjVuq5O4uxO704HB5qXR6OXi0gpXv/kCXDjEseGoj//pkF4Ul/g2Lf9pXXOv2LNW3c8lMt5BwYv++2uamn167jRvHD4z4zKrPONMSYg1kptecKxatdtVHS257a9ZavpeG9KPWe9IsxJklkBKnV2v4+yeBVJTUZziz0uHG4fTQPtFEXkFRMLjK3V3IG5/uwu3xcf/vh9ZZGiFwPjPdwrwpmcH55rrmpvsmJ4X9Aa/+jDMtzqxn7pTMZteu+mjJbW/NWsv30pB+1HbPjVemt+iyHKJlaA1//+SvSZTUZzhTpYJKu4e8giJ+e2laSPCVu7sQp8eLWqWqc3uWjklmFs28gK6WGDpUWUpaVzDncHlYMG1IWP5UtP9gWxJNzbJd9dGS296atZbvpSH9iHRPjFHDvr15NdaLE6IptfS/fxJIRYnJoA0mkVdnTbVgMmjxen3YHG42bT3ErKsz8Pq8PHLrcGJMOtxeHx6PF7dHoVM7U63bs/zyaxlxZj3PvL2NW66yButy1CeYizPrm+Uf5ubarvpoyW1vzVrL99KQflS/x2azhexEIMTp1pL//snUXpS4fXZmT8oIqw0VWLVXYbNRbnOx88T+es+8vQ2dRoPJoOVosZ23PttNfIyBhDgdKpWKyRenRXzW5IvT6NEljsde+Z4tPx4JqcvRGuamhRBCiGiSEako8bh1eHz2sL32TAYtvxwp47GX/8viuReybuNewF/TxeHycvs/vsSaauGmCQP55/s/8fsrB2Bzenho1bdkZ/VmfFYKLrcPvU7NzoJiHlr1LY/MHhFcfVe1LkdgbjrSqr2WMjcthBBCRJMEUlFiMmg5ctzFye2OFAIF7LdsP0S/XkmoUEWs/5S7u5CV7/zAlRemYD9RzNPh8ta4qbH9RC0Yo15DdlZvXG4feQVFwXnoqnPTJoMWnVZNUZkdh8vTouaphRBCiDNNAqko0fg8dE3yR1FGvQaAihOB0pTRfVErCpXu0BpOJqOWJXeMxGjQ8tqGn2ifYKTS7sZYx9Iak1GLUa9hwbQhrNuUHxJwBfYz6tYxrlXvAyeEEEKcDhJIRYtai09RcHl8PLN2e8Tq5jFaN8/fNxovBEeeYk06VMC1l/fH57OjUes4WmyvNXHdpyhMHNUnYtHOwH5Gt00Nn+Kren7BtCEyMiWEEEJUI8nmUeIFjpbaw4IoOFnd3Kc24VEUlr6Zy7zHv+De5ZuZ+/gXLHkzF4+ioFYZiTVoWbo6h5smDIyYbH7ThIG89dluzuvXucainTl5hZRXuurc70gIIYQQoWREKkrsTg8mg67WiuQ2p4dV636sdRsZgJIKFw+s2MycKZlcP64/doeHWLN/pOqBFZspqXBxyXk9am1Ppb32PbVawn5HQgghxJkmgVSUVNrd+JS6rql7G5mAkgoXi57fEvz5qTtGhvysonYxptr/KLSE/Y6EEEKIM02m9qIkxqSrc0PQuoKbSrs74kiRNdWCQacJObazoJjMtJprRsXF6KWmlBBCCHGKJJCKErNBi93pDstrCghUN69NjElHjEkXXPUXuG/2pAz+/sr3Ideu27iXmyYMiphHNesqK+3jTS1+vyMhhBDiTJOpvShRAx0TTNwycVCNq/Y0J/67tm1kAJ66YxQl5Q6MBn+Zg8de/p7dB0pDrk9PTkKvVTP63G5hRTudbv8UYUvf70gIIYQ40ySQihIf8Pl/93PJkB7MnpSBw+XFZvdgNmnRalTsKiimb88kZk/KYNmabZEDLUWhpMLOixt2M3eKlU5JMRw+VonZFBr4WFMtZF+YwjNrt3PzxEHc9PCnIecz0zoG/7sl73ckhBBCnGkSSEWJzenBbNLgAxwub7BGlNGgRQP075GAze3BrNMyZ7L1RB0p/zYyZoMWNfDyhp+4+LzkE6v4clkwbQguj4crL0wJrt4zGbUcL3WwZHUOJRUurr2iX1hb6srFEkIIIURk8hs0Skw6N+f278qSN3MjjjZpVWrMGjhQWMlbX+xhzmQr8x7/Iuw5V12cDpys9aRWqXmvWuHNzHQLj865ELvLjUYNi+eMwONVMBm0aNQqYoyyIk8IIYRoCAmkokSlNrG0WhAFJ2tEzbpqEBqNmpgYXVipg4DMNAs6zcnCBhU2N699tDNi9fJV7/7AtWP68dy7P5KzKzRwu2Z0Giq1SraBEUIIIU6RBFJRYnPWXiPK5fGBx4da5Q+Uqpc5sKZamDamX0jFcaNBy5TRaVw3tj/HSh0sPTGdB9DrrAReeD80iAq8C+DCwV25IKOr5EcJIYQQp0ACqSipq1J44LxyomhnjEnHwhlDcbl9dG5vxqcoPPTctzx0ywWAP7DKKyjiaLGd1Z/uwppqYdGs4cHK5n2T24VsVlxV7u5CxmelUFrhlEBKCCGEOAVSRypK6qoUHqgRZTJqg6UOHlq1hY++3cexUgf3Ld9McpcEtBpVcFXec+/+QN/kdoA/OFr5zg/MmZIJgMvtq/V9LrdPtoERQgghTpEEUlFiMmhrLcap16oxGbQ4nB5mT8rgq5z9WFMtjBuRwt9f+Z70ZH9phJJSJ+nJ7Xjsle9xuLwhAVPu7kLaJxgBiDXXHrjpdWrZBkYIIYQ4RTK1FyUaqLVGlBZAUeiYaMLnczBkQFfOzziL0goXi+deiEajYtHzW/jlSEXIc7taYnjk1uHBUgpen4en7hyOSW8iM91CTl7k4p5FZQ769kw6vZ0WQgghWhkJpKJIC8ydbMVWpUaU6UQdKQB8HkptPrRaLVoNrFr3I5ed35PXPs6LmKg+ONWCx+vjwee+xeHyYk21cNukDNQ6Dc+u3c6VI1JAIeKqvS4dYkLyo44U2bA53MH6Viajjk5J5tP7gQghhBAtjARSzZXPh1etxeN1YDJpePmDnVx7RT8WPvM1c0/kPVWvFXXT+EF4fT6eumMkxWUODHotXpWKf773I9/tOML2vceYOKoPv700PVhHymTQEh8bWs388LGKGkfKunSIPXOfgRBCCNHMSSAVJRVOD3t+OUZGaucTRxQCFaGOFdswxehRFB8mow63W+HKC1NQAY/Ny6K0wsGsqwbh8vhwOD24PD627TnGH574MjgSddOEgTywwp+QftOEgfz083FKKly8/lEer3+UF2zHU3eMDBuJqh5Ewcn6VnOnDJaRKdFmOT2QaEnml6N2Ys1e2YtSCCGBVLSoNW4G9ulUc2VzRaGw3MUvR8pZtiY35PytV2dQbnMQY9KzZ38FPbvGM2xgFwanWtBq1eTsOsqL7//EnCmZLHp+S3D13qLnt4S1o9IeWujT5nDXWt/K5pCVfaJtKiyxs2T11pA8w8x0C3OnZEoxWyHaMFm1FyUGranWkR+vWk1CrJ5V634IO7/8rW0kxJpQq9R0bh/Dix/s4A9PfMl9KzZz15JN7Py5iDEX9KRDojF4T2D1XnXV99mru75VeIV1IVq7cpuLJatzwhZr5OQVsmR1DuU2V5RaJoSINhmRihJ7HZXN7U4PKpUKh8tb4/ldvxSzaeuh8C1hdhWiADdcOeDk+xzhAZA11YK52j57dde3kj8you0prXBGXPEKJ/e5lCk+Idom+a0YJfWpbK5Rq2o9nxRvrDEY21ptKxizMfSrDkwhVs93Mht1WFMtEZ9bNfAqt7korXBSaXcTY9JJroho1eq7E4EQou2RQCpK6lPZHOCvs4YTY9SG7Z0XY9JRWlH7dEKFzf8/7tZUCzEmHU/dMTIY+JgMWlS+8FGqTknmWutbdUoyn8gVyZFckSiRIPbMq+/fVyFE2yOBVJSYT1Q2r2nkx2TQ8u//7g+usKu6d15ylwRMBi1GvSbs3qpcbm8wAFIr8Mzb2/jx56Lg8+ZdMzjifV06xDJ3yuATdaT89a3MJ+pI1ZUrsmDaEPmlfhpJEBsdCbGGGgvaZqZbSIg1RKFVQojmQJLNo0SNv7J59W1iAoGPRlFI7nRy2i2wd95d04cwe1IGuwqK6NDOyLn9OkV8fma6hY5JZq68MIW7lmziqTW53DY1kwG9koLv6Niu5jIGnZLM9OqawMDe7enVNSE4BVifXBFxekjCc/TEmfXMnZJJZnro39fMdAvzpmTK/3kQog2TEako8QL/+fFQxMrmR4/biI/T07tb+5B7cncXMiN7AI+9/D2xZj3XjxvAdeP64fL4wopzXjkiJVhXCqDkxGrA2397Ni6PF5uzYb90JVckeiThObosiSbmTx1MUYkNt09NrFmmVYUQEkhFjd3p4bl1O3hu3Y6I55+6YyR2Z3gOU4XNzW2/PZsHVmzm+fd+ZPYkKzdPGIjXp+BwejAZtXy9/TCLX/4+bMVfYLXfV7mHuHhItwa1W3JFokeC2OgzaKGksIB+/fphNkthWiGETO1FTX1+KUaq2WQyaoMFNnN3F3K02MaXOQeZ9/gXvP5JHhq1mrWf74lYNiHw3LyCYrSahn31gVyRSCRX5PSSIFYIIZofCaSipD6/FKvXbLKmWjhe6ggpsFlhc9M3uR3gn95Z+e52srN61/pcf9HP3Abl1EiuSPRIECuEEM2PTO1FSV2r9gBMBm3IscD+eQAOp5dOSSb0OjUuty94XU5eIVePSmX1p7siPtek1wava2hOjSXRxIJpQ2QJ/hkWCGIjrdqTIFYIIaJDAqkoUQO3TcrAp1KdSDZ3E2vSYTRo0QAoCigKD88aTqxZx9FiOw+s2BysI2UyaHjo5gv49sdDdO8YH/JsvVbNsgWjUBTwKQo2x4lEdr2WklJb8LrG5NTEmfXyizsKJIgVQojmRQKpKPIAy2ratBhApUKlgm9/OMxrJ+pJBa45VurgvU353Jg9gE25h0Ke6/b6+PW4jXWb8kOePTjVwq2TMnjopnNZuPI7yalpoSSIFUKI5kNypKLEC7VvWqxS4cU/AnGsuDJ4PjDFt3R1Drm7/Xvqrdu4N+S83ekNC6IAtp54dhdLAo/cOhyPx8fRYhtCCCGEaBgZkYqS+mxaDP5k8smXpDPuwmS8Pi3HSx0hU3yVdg8JsXocRXYy0yyMG5GCRq2q89n3LvfnWgVGwLp0iD0NvRRCCCFaNwmkoqTS7ubS87oxeXRf7NVypMrKbTicbrw+FW98uosR1q4MTrNw08Ofhj0nxqTloVsuoPC4HaNRywNPb+b2qWfX+e6AwAjYvGsG11rpXAghhBDhJJCKko7xOiZdks7SWnKkbB4vubsLGZ+VErE4Z2BPvqPFNrQ6NVqNiuys3nXuwVc9Nyp3d6E/uGrXJF0TQggh2gwJpKLEp9by054jzJ1sxe7ygAIK/ik/u9NDjFGHUef/elxuH5V2D0vuGOm/WQUoYNZreOnDHXyZczD4XGuqhaEDOnNuv058t+NI2HsDwVd1kYp/CiGEEKJ2EkhFicbnoX/vjjyzdjuXn98zLDk8M93CLRMzMOo16HVqYkxa3v58F0XlbrIvTGHDN/u4cfxAdhYUhTw3d3chL6+H6WPD9+ALrNo7XFga1p7qxT+FEEIIUTf57RklXrWWZW/mkp7cLuIKu5y8Qp5Zu40Z2QM5Xuqge6c4fnd5/2CeVHpyO5a/tY27pp/LHU9sDLl36+5CZmgGcNP4gfgUBbvDvwef2aDlUGEpC1d+F3K9NdUipRCEEEKIBpBAKkrsTg+d2hm49LwejLB2DUk2V/l8PPTCf8jJK+S6sf2JMWgpL7eh1ftrBwXyplZ/ugudZkDE5zvdXtKTk0KOHT5WwVtf7As5FsjJkkRzIYQQ4tRJIBUlKrWbSZeksyRCsvmtV2fwpxuG8uDzW3C5vLidbo6Xu0mIVQWvC2wLU1N18kgjTF06xDLvmsHBDZFjTFpiTDoJooQQQogGkkAqSow6U9iKPfCPNi1/axs3Zg/g7uvO5chxf8FMf57UyeBIr/PXUo0UMNW2gW3HdmZZnSeEEEI0EQmkoqSugpwK4PUqHC91BI937xQH+EetdhYUY021oKp2r2xgW7dym0v2qhNCCNEkJJCKkro2DPafV9G9o7/iePsEI69t+AlrqiW4am/2pAy0KhVL7hyFzeHGqNdiMmgw1FFHqi0rLLGzZHUOOXmhKyTnTsnEkmiKYsuEEEK0RBJIRUldq+QC52N0ahS1GofHwcRRaahQgUrh5omDcHk8KGo1z7/3gwQG9VBuc4UFUeBfIblkdQ4Lpg2RkSkhhBCnRAKpKDEbtFhTLRGn9wJTdiaDFo9P4e6nNgb31gucnzPZSnmFmwOFFTUGBjdmD0StVsnU1QmlFc6wzyogJ6+Q0gqnfE5CCCFOiTraDWir1MDsSRlYUy0hxwOr9mL0GtTA8re2MWdKZsj52ZMy0Ph8/LTvOEnxxojPz8kr5EiRjVl/+zePvfI9hSX209ib+im3uThW5kGf0I3jZR7Kba66b2pC9ZtObf3KbS4OHC0nr6CIA0fLz/j3IIQQrYmMSEVJhdPD+5v2MHeyFZvTEyxHYDpRR8rh8+Hx+BPPZ2QP4JFbRwTPl5Xb+MsL3zPvmsxgGYRIAueaw9RVc8hNqu90amvWHL4HIYRoTVpcIPXhhx+ybt06fvzxR8rKykhOTubaa6/l6quvRqU6uYbtzTff5LnnnuPQoUP06tWL+fPnM2rUqCi2PFSl3c3H/znAx/85EPH8I7cOhxNr8ipsbu5bsTnsGpfbFyyDEEnVc9GcumouuUkJsQYy0y0Rp/dqKxnRWjSX70EIIVqTFje1989//hOTycQ999zDihUryMrK4o9//CPLli0LXvPBBx/wxz/+kTFjxrBy5UoGDx7MnDlz2Lp1a/QaXk19RkcC+9+ZjJHj3VizjqIyR8RzgRIJVUVr6qo+uUlnQpxZz9wpmWSmh06ntpWSEc3lexBCiNakxY1IrVixgqSkk1ufnH/++ZSUlPDCCy9w6623olareeqppxg7diy33347AMOGDWPXrl0sW7aMlStXRqnloUz1SDY3nrimai2pgMx0C52TzFgSTWGjLIESCY+98n3IPdGaumpOuUmWRBMLpg1pk3WkmtP3IIQQrUWLC6SqBlEB/fr1Y/Xq1dhsNoqLi9m3bx8LFiwIueaKK65g8eLFuFwu9Pro/9LU+DzcNikDn0p1Ikfq5F57GgBFwefzMHtSBv98/6eQe62pFmZdlYFaUSiudDLrqgy8PoUKmxun28u2Pcd47JXvcbi8wXuiOXXV3HKT4sz6NhE4VdfcvgchhGgNWlwgFcl///tfOnXqRGxsLP/9738B6NWrV8g1vXv3xu12s3//fnr37h2NZoZSa/EoCstq2GtPqwK1WovL46Vn13hGn9cjmBO1s6CYle/8wM0TB6HTaiirdAU3KC4ssfPW57vDgqhoTl219dyk5kK+ByGEaHotPpD6/vvvWb9+PXfffTcApaWlAMTHx4dcF/g5cL6hFEXBZrM16hkAPmDZmm017rU3I3sAJoOWle/+wLTL+/GXVd+G1JJacdcFqAC1WoXb6WPfoVL/aJZKxfypgym3ebA5PJiNWuLMWgxaf7vLHWBzuIMjYCajjrjIFRSajAaYM8nK0jW5YavF5kyyosGDzeY5vY04w+x2e8i/m4PT9T00x76eLm2lr22lnyB9bY0i9VNRlJAFaU2pRQdSv/76K/Pnz2fo0KFMnz79jLzT7XazY8eORj8nqUsf+vZIqFL+IHRqz+XzYHd6yMkrxOeDhTcO477lm3G4vKy46wI0GhNLIoxmzZ6UAS4waZwUHv+FSqAQMJvNJFq6hwVvgXtKCvc3SYBYE7VazXW/6c51l6dhc3kx6zXgtXOwYBc+X80lHFq6ffv2RbsJIU7n99Dc+no6tZW+tpV+gvS1Narez9OV1tNiA6mysjJuuukmEhMTWbJkCWq1fwFiQkICAOXl5VgslpDrq55vKJ1OR58+fRr1DICiShuXnJdcYzCkV2mwef2rqHJ3F2JzpAb31Cut9BJr8pF9YS/yCoqC03i5uwtZtmYbcyZbUalj6NevX/C55Q5YsnprxBGwZWu2MXfKYJJP88gU+P8fQknhAbr27InJ1J4undqf/pdGgd1uZ9++ffTs2ROTqbnXZzI36ntoWX1tnLbS17bST5C+tkaR+rlnz57T9r4WGUg5HA5uueUWysvLeeONN4iLiwueS0lJASA/Pz/434GfdTod3bt3b9S7VSoVZrO5Uc8AKLMrLK0WRMHJwGb2pAzizCcjmwqbm1izG41KRVyMjr+9+B3tE038ddYF3L/i65Bgyu70T890SjoZNB4pKY24QjBwj83hDrn+dDOZTE3yOTZ3baWfIH1tjdpKP0H62hpV7efpmtaDFlhHyuPxcPvtt5Ofn89zzz1Hp06dQs53796dnj17smHDhpDj69ev5/zzz28WK/YA7E5PrYGNw+XF7vSQGOtvr16n5tdjNuY+/gXPrv2B+28YyvESO69s2El2VmjyfKXdTaXdE3asNtWvF0IIIUTdWtyI1IMPPsjnn3/OPffcQ0VFRUiRzf79+6PX65k7dy533nknPXr0YOjQoaxfv55t27bxyiuvRK/h1dSvpo+KOVMyeW9TPjsLiumb3A44kZC+Zht3X3cusxd/zpUjUkLujbSMve6l7y3uj4IQQggRdS3ut+fmzf6tUh599NGwc5999hndunVj3Lhx2O12Vq5cybPPPkuvXr1YunQpmZmZYfdES31r+pgMGrIvTOHjLfu4+JxuPHLr8JNJ6T7/KFLV/fasqRb/fn3VhjHNRl2tBUDNRqkhJIQQQpyqFhdI/fvf/67XdZMnT2by5MmnuTUNV1dlc6Neg0ql4liJg4+37GN8Vh9u/8eXwVyoQFL61SNTgnvqBY5pFAVL+5iQZ3ZKMjN7UkaNq/Y6JbX++XIhhBCiqbW4QKq10JyoWl5TYKPFXwIhxqRlSL/OPLTq25Aim1VX6DmcXp66YySmE3WkLDUERV06xDJ3yuATdaT8zzYbdRJEidOq3OZqk1vyCCHaBgmkosSr1rJ9169V6kj5AxtTYIsYnw+NWotRD8vW5EZ8RmCF3qsbdnDb1LODv5yOFNnCim4GgiUJmsSZVFhiZ8nqnLACoHOnZGJJbL3Lr4UQbYcEUlHi8toZlNqpxjpSWpWK0goHJmPtX1Gl3cPMq6zBIOrwsYoaR7m6dIg9PZ0RIoJymyssiALIyStkyeocFkwbIiNTQogWTwKpKPF6dGh19hpHpN7dtIdLzuuJVl17hYoYk5YOJ/6f/ZEiW43bzgSKbsqIlDhTSiucEff1A38wVVrhlEBKCNHiSSAVJbEGLR5Fw7I1ueTsCh09umZ0GuNG9EZzYuVdbUnpJoOWo8U2OrYzY3O46yy6KcSZUr8SH0II0bK1uIKcrYUPWPHWtpAgCvwBzxuf7mLr7mP4AC8we1IG1lRLyHWB6boNX+cHfyFJ0U3RnNS3xIcQQrRkMiIVJTanJyyICsjdXcj4rBRsJ7Z60enczJlsxX5ic+MYkw6TQcuGr/N564t8zhvQleNldmJMOu6Zfi56nZqdBcWs27g3ZKXfmSq6WdcqrcSkjhwr82AvLJJVXK1YQqyBzHRLxOm9zHQLCbGGKLRKCCGalgRSUVLX6JHL7Qte005nZO7fPw8JigKMeg0JcQb+8VpOWIL5gmlDeOyV73G4vGes6GZdq7QqHAr//Hg/W3f9L+J50XrEmfXMnZIZ8c/DvCmZEjwLIVoFCaSipK5pDb1OHbzmmbXbmJE9MGIZhBnZA3nm7cgJ5gDZWb3JKyg+I0U361qlddvUTJauyWXrLlnF1VZYEk0smDZE6kgJIVotyZGKEpNBS2a6JeI5a6qF46UOTAb/Kr6cvELSk9tFzJPq2zOp1inCEdauzJ0y+IyUPqhrlVZ5pavOVVyi9Ykz6+nWMY705CS6dYyTIEoI0arIiFSUaHweZl2VwYq3t4UEF4FVe5ZEIxpFodLtz5OyO/3XuzxebHYPZpMWjUbFocLKWt/jcnvp1TXhtPYloLHJ7rKKSwghREsjgVSUHClzc7y4iDlXW7G5vNjsbgwGDUadBp1Gjcrn41i5E8+J/Yg1ahUz//ZZ2HMWzhha63vO5Mqouldp1f7HTVZxCSGEaGkkkIqSGJOOe5dvq/H8whlDaRdvRIc/OTfWpOepO0Zid3qINenQqFU4XF5UJ87XZ2VU1dV0JoMOnVaFx+vD61NwOL3Emk8tf6X66jyTUVtrW+Ji9K1yFVekVYqaaDdKCCHEGSGBVJSYDFoy0ywR85usqRaKyhyYDP5fxzdmD2TVuh/4bscRjHoNC6YNYd2mfHJ3F2LUa7jr2iEAYVOEky9Ow+HyEmeuYTVdmoXJl6SFbIhc3xV0kZ43dEAnbr3ayvK3ciOu0mofb2LOJCtL3wwtQtqSV3HVtEpxziQr6jqq0gshhGj5JJCKEofbzqyra86RahdvQKdW4VY8vPzBTr7bcQTwr8ILBFEADpeXPQdKuGBQV64ckYLL7QvWkXpo1bf065XEbVPDl6AD5OwqxKf4n7n6013+Y/VYQVfT6rwtP/rbeNvUTOwOT8RVWrFGFddd2p0bxw/E7vS06FVcta1SXLoml+t+0z1KLRNCCHGmSCAVJYpPh91uZ/bVgb323MSadBhP7LWHz4fN40Wv1TL6vB78L+8oDpeXvsntgkFPQJ9uiTy0akvE99S1Wi5Q/LP6PbXtg1bb6rwtPx7h+nEeunWMq7HvJUVH6devPWZzfI3XtAR1rVK87vK0M9wiIYQQZ5oEUlES2GtvyZu5YYU0Z0/KQKtSYdZpqXB6WLcpPzhq5HL7wp4V6VhVda2Wi3R/bSvoZA81v7r6aYtQQFUIIUTrIkkcUeIFlq2JXEhz2ZptuHwKXvy/rLfuKqRvcjvAX6izukjHqqprtVyk+2tbQSd7qPnV1U+zXlLOhRCitZNAKkrsTk9YEBWQu7sQl8cXzCGCk6NGOwuKwwpzRjoWUHW1XCTWVAs7C4rD7qltBV1gD7Wa3tdSV9+dqro+B7z2M9wiIYQQZ5oEUlFSn+mxSruHWIN/NCnW7A+o1m3cS/aFKSGB07qNe7lmdBqZaaG/1Kuulps7JTPsl35mmj+xfd3GvWH31Jb8HdhDLex5LXj1XUPU9jnMmWSlrORYlFomhBDiTJEcqSip7/SY0+sjM91ChwRTsI5UjEnHrKsG4XB5cbi8qNWg06qZPrY/v79ShdMVXhOq+p5nJoMWnVaNx+vjsXkXnnIdKdlDza+mz0GDB5+v9tw1IYQQLZ8EUlFiMmiZeFFvxg7vhd3lodLuwWTQYtBp0GtU4PPhVauxOz3MvCqDvz6/hV+OVIQ9Z+GMoSEr9jLTLTWWLogz65s00Gnq57VUkT4Hm632BH8hhBCtgwRSUaLxeRhzQU+WrQktThmy157PH2A5XB6GDerK0eK9ZGf1pm9yO1xuH3ExOuxOL0a9JlhQs7bSBZEqcEsgJIQQQjScBFJR4lVrWb4ml627wlftAVw4uCvWVAsxJlCp4Jx0Cxef052n124LqSNlTfWPQD32yvfBYCpS/lVNFbjrU8VcCCGEEJFJsnmU2J2esCAqIHd3IUnxRuxO/3SfTqPh16JKVry9LWLgFagzFVA9/6q2CtxLVudQbnM1Ua+EEEKItkUCqSipa9Wey+2j0u6hrMLB4eOVpJzVrtZyCYE6U5npFnRaDT8fKuWHvcfYd6iUSrubisrIwVJgKlAIIYQQp06m9qKkrlV7ep2aGJMWvU5NfIyRMpuj1utdbv/qvllXZfDM29uCe/PBiem/a4fw2Mvfs/tAadi9baUSuRBCCNHUZEQqSkwGbVjdpwBrqoWiMgcmgxa3R+HZd7YTZzbQo1Nsjc/raonhlgmDKC5zMP2KfjxzzyWkdksATlZLv3PakIj3tpVK5EIIIURTkxGpKFF8dmZdncGKt7eF5C5VXbXn9NipsEGvsxJYtmYb911/HjP/9u+wZ2WmW8grKGbZmtyQ59xz7RAqnF5UKv+ok8vt5fHbs1j80nccKbIH720rlciFEEKIpiaBVJSo1CbWb/6Z2Vdbq9SR0mDQadFrVJTabcSazLj0bvomt/NvWOzxYU21hORKZaZbuGn8IO5b/lXI8389XoFHgeff+zFsU+QHb76APz37NV0tsW2qErkQQgjR1CSQihK708PaL/ey9su9Ec//Y/5F2J0eQAnus1dpdzPC2pUZ2QP49XglWo2arh1iWLXuRy4d1jOkLMJd089l+VuRN0Ve8dY2Fs0cToxJJ0GUEEII0QgSSEVJXQnedocHlUrFT/uO0auLP9cpxqSjXbyRhc98TUmFfxXeI7cO57sdRxhzQc+Q+3Uada2r/OxOD53bxzS+I0IIIUQbJoFUlNSV4G0yatGoVSTFGdlZUIw11YJGo2LR81tCrjOe2NQ4MGoVUPemyLKFiRBCCNFYsmovSkwGLdbUmlftOZz+jYX7dE/k54OlzJ6Uwd9e/C7suuOl/rIIel3oV1n3psgSQwshhBCNJYFUtPjszJ6UERZMWVMt3DJxEJYEExqfD7vTwy0TBuHxeUM2LbamWrhpwkCWrs4hM/1kQFX9WZFYUy3EmqXkgRBCCNFYMiwRJSq1ia9zDzB3shWb079qL8akxajXokEBRcGlgEGnxeby50s9OnuEf8sYrZpfi2w8sGIzyV0SuHpkKh3bmXjqjpHBDYljDVpmT8pg2ZptYav2Zk/KwJJojmLvhRBCiNZBAqkosTk9vPDBTl74YGfE80/dMRIAp8uD2+vj/hVfB89lplu4ZWIGj8wezqath1j0wpbghsWB8wumDSHOrOe2qYOpsLmDgVqsWVdrEHW8zE55pSt4fVyMnvbxsqmxEEIIEYkEUlFSdzK4/3xZpQuVKvRcTp6/hMGFg7tybr9OXHxOd2xODzqdG63ahN3poeBwGbEmHSajjl5dE+rVpl+PV7L0zdywEaw5k62ywk8IIYSIQAKpKKk7Gdx/3u700C7OGHY+d3ch47NSeOnDHVwwqCvHiiu55LzkiIHQ7EkZdOlQ8/Yy4B+Jqn5v4D1L38xl/u8yZWRKCCGEqEaSzaOkrlV7eq0avVZNUZkDg14T8TqX20dOXiHtE4yMHtozLB8KTu6zd6TIVmt7yitdtdadKq901aNXQgghRNsigVSUaKDGVXuzJ2WgV6vQq1UM7N2esgpnxGcESh54vQp2p6fWQMjmaFxdKak7JYQQQoSTqb1o8XnQqjTMmWzFXmXVnsmgJTj+5PPhcnuDRTeryky3sLOgGIDOHWJwOGsfMaorEKqrrpTUnRJCCCHCyYhUlHjVWnYWHEMFqFT+fxRFwetV/BcoCl61Gr1Wg7Ha1J411cKM7IGs27iXzDQL5TYniXG15y/VFQjFxehrnWqMi5E9+YQQQojqZJghSjRAn+QOLKkhOVwLaHw+nF4FVAoP3Xw+NoeHWLOODokm/vTs1/TrmcSVF6bgcvkLd1pTLRGn96ypFszG2pPb28ebmDPZWuOqPUk0F0IIIcJJIBUl3hNTe9ULcgan9hQFN6CgYNRpSYwDnVZNrEmHRq3iLzcP5fOcwyx++Xtun3o2lXZ3rQU4OyXVXYCzc/sY5v8uU+pICSGEEPUkgVS0qLV88t0vXHJuj/BzJ2b3fswvYlCfDjWWNMjfX4zD5UWvUxNj0uFVnGE5V2ajrl5BVED7eJMETkIIIUQ9SSAVJXaXnVFDutc4tVdW4WBQ7/aUlDtqLGkwZ7IVhzuX46UOenaNx5JYv8KbQgghhGgaEkhFi6Jj177CKlN7bmJNOownpvaS4kxUuNwY9JG/otzdhdidHq4ZnYYl0Sh75wkhhBBRIIFUlMQatKT1tNSabO52u4mPqTlAqrR76NzBLEGUEEIIESVS/iBKfFBrJXKvSkVCjBmb01HjM2JMWgmihBBCiCiSEakosdVRidzu9BfQNBvC99mD+pU0EEIIIcTpJSNSUVJpr2vLFjeVdk+wPlRVp1LSQAghhBCnj4xIRUmMqfbRpMD5SrunSkkDNzEm3SmXNBBCCCHE6SEjUlFiMmhr3ZLFZPAX54wxaXE4PRwrsaPXaYiP1UsQJYQQQjQTEkhFicbnYfakjBqn7TSKgsfnwWzQ4vUpGPVakuKNUixTCCGEaEZkai9KfGotW3cermGLGAVQoVNpUft8xJh0dEwyE2eWjYOFEEKI5kQCqShR+zwMTu9cSx0pBTXgxEe39nHRa6gQQgghaiSBVJR41Vr+8+PByCNSisLuQ6WU29w15lEJIYQQIvpadSC1d+9eFi1aRE5ODjExMYwfP57bb78dvT76U2R2p4fn3t3Bc+/uiHh+4YyhJMVrgvWkhBBCCNH8tNpAqrS0lOuuu46ePXuyZMkSjhw5wqOPPorD4WDhwoXRbl6ddaRcbt+J6ySQEkIIIZqrVhtI/etf/6KyspKlS5eSmJgIgNfr5cEHH+SWW26hU6dOUW1fXXWk9Dr1ieta7VckhBBCtHittvzBxo0bOf/884NBFMCYMWPw+Xxs3rw5eg07wWzUkZlWcx2pojIHRWUO2QZGCCGEaMZa7XBHfn4+V199dcix+Ph4LBYL+fn5DX6uoijYbLbGNo84I8y6OoMVb28jJy901d41o9PQaqBdnJE4I03yvubCbreH/Lu1aiv9BOlra9RW+gnS19YoUj8VRUGlUp2W97XaQKqsrIz4+Piw4wkJCZSWljb4uW63mx07IieIn6r4+HhmX23F7vKv2jMZNBh0WtQqBY1azfGjv3CwFQVRVe3bty/aTTgj2ko/QfraGrWVfoL0tTWq3s/TtdCs1QZSp4tOp6NPnz5N+kwtHo4f2kfnnj0xmU5+0bHJyU36nubAbrezb98+evbsicnUequ0t5V+gvS1NWor/QTpa2sUqZ979uw5be9rtYFUfHw85eXlYcdLS0tJSEho8HNVKhVm8+nZ685kMp22Zzc3baWvbaWfIH1tjdpKP0H62hpV7efpmtaDVpxsnpKSEpYLVV5eTmFhISkpKVFqlRBCCCFak1YbSGVlZfH1119TVlYWPLZhwwbUajXDhw+PYsuEEEII0Vq02kBq6tSpxMTEMHv2bL766iveeustFi9ezNSpU6NeQ0oIIYQQrUOrDaQSEhJ48cUX0Wg0zJ49+/+3d+dRUV13HMC/A7LIJtIEjVA27UzZFxMQSUE4UlRsXYLVtKgEJJKgNhAaQK2Vqo1ysCZuEQXFWEuSJriAiCK1qGypSCDRxlJAoIBoUJYpO3P7B4d3HIbNx+Aw8PucwznMnXvv3N9cfnB578572L9/P/z8/BAVFaXooRFCCCFkgpiwm80BYPbs2UhKSlL0MAghhBAyQU3YI1KEEEIIIWONFlKEEEIIITzRQooQQgghhCdaSBFCCCGE8CRgjDFFD0JZ3LlzB4wxud+vhzGGrq4uqKmpjenVV8eDyRLrZIkToFgnoskSJ0CxTkQDxdnZ2QmBQAAnJye5v96E/tSevI3VD55AIBizmymON5Ml1skSJ0CxTkSTJU6AYp2IBopTIBCM3d9wOiJFCCGEEMIP7ZEihBBCCOGJFlKEEEIIITzRQooQQgghhCdaSBFCCCGE8EQLKUIIIYQQnmghRQghhBDCEy2kCCGEEEJ4ooUUIYQQQghPtJAihBBCCOGJFlKEEEIIITzRQooQQgghhCdaSBFCCCGE8DRF0QOYSMrKyrB7924UFRVBW1sby5Ytw3vvvTfs3bYZYzhx4gT++te/4smTJ7C0tER0dDQcHByk6tXX12P37t24desW1NTU4O3tjejoaOjo6IxhVAPjE+ujR4+QlJSEnJwcVFVVQVdXF6+99hrCw8NhZGTE1SsoKMC6detk2i9ZsgQHDhwYk3iGwndevby8UFNTI1NeUlICDQ0N7vF4mVc+cQ42VwBgbm6OjIyMIespYk4rKyuRmJiI4uJilJaWwsLCAmlpacO2U8Y85ROrsuYp33lVtjzlE6cy5unly5dx8eJF3L17F83NzTA1NcXatWvxxhtvQCAQDNpOEXlKCyk5aWpqwvr162FmZoZDhw6hvr4ee/fuRXt7O3bs2DFk2xMnTuDgwYOIiIiASCTC2bNnERgYiAsXLuDHP/4xAKCrqwsbNmwAAOzfvx/t7e3Yt28f3n//fcTHx495fM/iG+vdu3eRmZmJN954A/b29nj69Ck++eQTrFq1CmlpaTAwMJCq/+GHH8LCwoJ7PH369DGLaTCjmVcA8PHxQWBgoFTZswuT8TKvfOO0trbG559/LlUmFosRHBwMd3d3mfrjYU5LS0uRnZ0Ne3t7SCQSMMZG1E7Z8hTgF6sy5inAf14B5clTgF+cypinSUlJMDIyQlRUFKZPn47c3Fz8/ve/x8OHD7Fp06ZB2ykkTxmRi2PHjjEHBwf29OlTruyzzz5jlpaW7OHDh4O2a29vZ05OTmz//v1cWUdHB/P09GR/+MMfuLLU1FQmEolYWVkZV3bz5k0mFApZcXGxXGMZDt9Ym5qaWFdXl1RZXV0dE4lELDExkSvLz89nQqGQlZSUyH3sz4tvrIwx5unpyWJiYoasM17mdTRx9vfVV1/JjH88zWlPTw/3fWRkJPP19R22jTLmKWP8YlXGPGWMX6yMKVeeMsY/zv7Ge542NDTIlG3fvp05OTlJvQfPUlSe0h4pOblx4wZcXV2hr6/PlS1evBgSiQQ5OTmDtrtz5w7EYjEWL17Mlamrq8Pb2xs3btyQ6l8kEkn9l+Dm5gZ9fX1kZ2fLN5hh8I1VT08PU6ZIHwSdOXMmDAwM8OjRo7Ea7qjwjfV5+h8P8yrPONPS0mBmZgY7Ozs5j1I+VFSe/9eeMuYpwC9WZcxTgF+sIzWe5lVecY73PO1/5BMALC0tIRaL0draOmAbReUpLaTkpLy8XGpSgN5fSC+//DLKy8uHbAdApu3s2bNRW1uL9vb2QfsXCAQwNzcfsv+xwDfWgVRUVKChoQGzZ8+Wee7tt9+GpaUl3N3dsW/fPu69eJFGG2tqaipsbGzg6OiI4OBg3L9/f9j+FTGv8prTH374Afn5+Vi6dOmAz4+HOeVDGfNUnsZ7no6WsuSpvChrnhYWFmLGjBmD7mNSVJ7SHik5aW5uhp6enkz5tGnT0NTUNGQ7dXV1qU2NQO8fMcYYmpqaoKmpiebmZujq6j53/2OBb6z9Mcawe/duGBoawtfXlyvX1dXFhg0b8Nprr0FDQwP5+fk4efIkysvLX/h+hNHE6uXlBTs7O8yaNQvV1dU4duwYfv3rX+P8+fPcufrxMq/ymtP09HT09PTI/IIeT3PKhzLmqbwoQ56OhjLlqbwoY57evn0b6enpiIyMHLSOovKUFlJEYQ4dOoT8/HwkJCRAS0uLK7eysoKVlRX32NXVFYaGhvjjH/+IkpKScXsour/t27dz37/66qtwc3PD4sWLkZiYiJ07dypuYGMoNTUV1tbWMDc3lyqfKHM6GVGeTjzKlqcPHz5EWFgYXFxcBv30oSLRqT050dPTQ0tLi0x5U1MTpk2bNmS7zs5OdHR0SJU3NzdDIBBwbfX09CAWi5+7/7HAN9ZnffHFFzhy5AhiYmLg6uo6bP2+c97ffffd8w12lOQRax9DQ0PMnTsXd+/elep/PMyrPOKsqqpCSUkJfvnLX46ovqLmlA9lzFN5UJY8lafxnKfyoGx52tzcjODgYOjr6+PQoUND7hFTVJ7SQkpOLCwsZM6ttrS04PHjxzLnYvu3A3r3IDyrvLwcs2bNgqam5qD9M8ZQUVExZP9jgW+sfTIzM7Fz505s2bIFfn5+YzVMuRhtrHz6V8S8yiPO1NRUqKioYMmSJWMxRIVSxjwdLWXK07E2keZVmfK0vb0dGzduREtLCxISEgY8HfcsReUpLaTkxN3dHbm5uWhububKMjIyoKKiAjc3t0HbOTk5QUdHB5cvX+bKurq6cPXqVanre7i7u+P777/HgwcPuLK8vDw0NjbCw8NDvsEMg2+sQO8F38LDw7Fq1SqEhoaO+DUvXboEALC1teU3aJ5GE2t/9fX1KCwslIphvMyrPOK8dOkSnJ2dYWhoOOL6wIufUz6UMU9HQ9nyVJ7Gc57Kg7LkaXd3N9577z2Ul5cjISEBM2bMGLaNovKU9kjJyZo1a3DmzBmEhoZi48aNqK+vR2xsLNasWSP1A7B+/XrU1tYiMzMTAKChoYGNGzfi0KFDMDAwgFAoRHJyMhobGxEUFMS18/HxQXx8PDZv3ozw8HC0tbUhNjYWCxYseOHnrfnGWlZWhtDQUJiZmWHZsmX45ptvuLoGBgYwMTEBAERERMDU1BRWVlbchsekpCQsXLjwhScz31jT0tJw/fp1eHh4wNDQENXV1Th+/DhUVVXx1ltvce3Gy7zyjbPPvXv3UFZWJhXbs8bTnLa1tXEfca6pqYFYLOau7Ozs7AwDA4MJkad8Y1XGPAX4xapseco3zj7KlKcxMTG4fv06oqKiIBaLpX4OraysoK6uPm7ylBZScjJt2jScPn0au3btQmhoKLS1teHn54ewsDCpehKJBD09PVJlwcHBYIzh5MmT3CXtExMTuU+MAICamhoSEhKwe/duhIeHY8qUKfD29sbWrVtfSHzP4htrcXExWlpa0NLSgjfffFOq7ooVK7B3714AwE9+8hOkpqbi5MmT6OrqgpGREUJCQvD222+PfXD98I3V2NgYjx49wp/+9Ce0tLRAV1cX8+bNw5YtW8blvI7m5xfoPV2grq4OHx+fAfsfT3Pa0NCA3/72t1JlfY8//fRTuLi4TIg8BfjFqox5CvCLVdnyFOD/8wsoV572Xb+u7+ftWVlZWTA2Nh43eSpg7Dmuo08IIYQQQji0R4oQQgghhCdaSBFCCCGE8EQLKUIIIYQQnmghRQghhBDCEy2kCCGEEEJ4ooUUIYQQQghPdB0pQgghhChEZWUlEhMTUVxcjNLSUlhYWCAtLe25+4mKisK5c+cGfO79998f02th0UKKEEIIIQpRWlqK7Oxs2NvbQyKRgO+lLd99912sWbNGqiw9PR2nT5+Wuj3MWKBTe4QQhSgoKIBIJEJBQYGih8Lx8vJCVFSUoodByKTh5eWF7OxsHDx4ENbW1rz7MTExgYODg9TXv/71L8yZMwc//elP5ThiWXREipBJRCQSjahe360mhnLs2DHMmTMHCxculMfQBpWSkoLo6Gjusbq6OmbNmgU3Nze8++67eOmll8b09eWJz3smFouRlJSEq1evorq6Gj09PTAxMYGHhwfWrVs3opu5Pq+zZ89i6tSpWLlypdz7JuRZKirDH8/pu+XLF198gZqaGsyYMQNr165FQEDAoG3q6+tx+/ZtmdvpjAVaSBEyicTGxko9vnDhAnJycmTKZ8+ePWxf8fHx8PHxGfOFVJ8tW7bA2NgYnZ2dKCwsRHJyMrKzs5GWloapU6fK5TUyMjIgEAjk0tdAnvc9q66uRkBAAOrq6rBo0SKsXr0aampquH//Pr788ktcu3YNV65ckfs4k5OTMX36dFpIkXFhz549+Nvf/oaQkBDY29vjzp07iIuLg4aGhsz9IPukpaVBIpHA19d3zMdHCylCJpFly5ZJPS4uLkZOTo5M+Xjk7u7O3YF+1apV0NfXx6lTp5CVlYWlS5cO2Ka1tRVaWlojfg11dXW5jFUeuru7sWnTJjQ0NODTTz/Fq6++KvV8WFgYTpw4oaDREfJiVFVV4S9/+QtiYmKwevVqAMD8+fPR3t6OI0eOYPXq1QMe1UpLS4Ojo6PUzYrHCu2RIoRIaW1txd69e+Hh4QEbGxv4+PggMTFRahOoSCRCa2srzp07B5FIBJFIxO0tqqmpwc6dO+Hj4wM7Ozu4uLhgy5Yt+O9//yvXcc6bNw8AuH6joqLg6OiIqqoqBAcHw9HRERERESOOCRh4j1RzczP27NnDtfX29sbx48chkUik6kkkEpw+fRq/+MUvYGtri3nz5iEoKAjffvvtsO/ZQK5evYrvv/8eISEhMosoANDR0UFYWJhU2eXLl7Fy5UrufY+IiEB9fb1UncePHyM6Ohru7u6wsbHB66+/jnfeeYd7H728vFBaWoqvv/6aG+fatWsHHSchYyk3NxcA8POf/xzd3d3c1/z58/H48WPU1dXJtCkrK8O9e/cG/QdL3uiIFCGEwxjDO++8g4KCAvj5+cHS0hI3b95EbGws6uvrsXXrVgC9pwi3b98OOzs7/OpXvwLQu9kTAL799lsUFRXB19cXM2fORE1NDZKTk7Fu3TpcunRJbqfhqqqqAAD6+vpcWXd3N4KCgjB37lxERkZCU1NzxDENpK2tDf7+/qivr8eaNWvwyiuvoKioCH/+85/x+PFjbNu2jau7bds2pKSkwN3dHX5+fujp6cHt27dRXFwMW1vbId+zgWRlZQGQPYo4mL69ZLa2tggPD+eOZN25cwfnz5+Hnp4eAGDz5s34z3/+A39/fxgZGeHJkyfIyclBXV0djI2NsXXrVuzatQtaWloICQkBAKXah0YmlqdPn4Ixxv3j1F9dXR2MjIykylJTUzFlyhQsWbLkRQwRYISQSSsmJoYJhULucWZmJhMKhezo0aNS9TZv3sxEIhGrrKzkyhwcHFhkZKRMn21tbTJlRUVFTCgUsnPnznFl+fn5TCgUsvz8/CHH+NVXXzGhUMhyc3NZQ0MDq6urY5cuXWLOzs7Mzs6OPXz4kDHGWGRkJBMKhSwuLk6q/fPE5OnpKRXTkSNHmIODA6uoqJBqGxcXxywtLVltbS1jjLG8vDwmFArZrl27ZMYvkUi47wd7zwayfPlyNnfu3BHV7ezsZK6urmzp0qWsvb2dK79+/ToTCoXs448/Zowx1tTUxIRCIUtISBiyP19fX+bv7z+i1yZEXiIjI5mvr69U2dmzZ5lIJGKFhYWspKRE5qulpUWmn4ULF7KgoKAXNWxGp/YIIZwbN25AVVVV5lROYGAgGGO4cePGsH1oampy33d1deHp06cwMTGBnp4e7t27x3tsAQEBcHV1hYeHB8LCwqCtrY3Dhw/LfGqt/+bT0cSUkZGBuXPnQk9PD0+ePOG+5s+fj56eHvzzn/8E0HsaTiAQYNOmTTJ98N28LhaLoa2tPaK63333HRoaGvDmm29CQ0ODK1+wYAEsLCzwj3/8A0Dv3KipqeHrr79GU1MTr3ER8iK5uroCABobG2FrayvzpaOjI1W/uLgYVVVVL+y0HkCn9gghz6ipqYGhoaHML6e+T/HV1NQM20d7ezvi4+ORkpKC+vp6qX1ILS0tvMe2Y8cOmJubQ1VVFS+99BLMzc1lNplOmTIFM2fOlCobTUyVlZW4f/8+98u8vydPngDoPc1oaGgodZpxtHR0dFBdXT2iurW1tQAAc3NzmecsLCxQWFgIoHczfUREBPbt2wc3NzfY29tjwYIFWL58OV5++WW5jZ2QkWpra0N2djaA3lwUi8XIyMgAADg7O8Pc3By/+c1v8MEHHyAoKAj29vbo6urCgwcPUFBQgKNHj0r1l5qaCk1NTXh7e7+wGGghRQiRq127diElJQXr16+Hg4MDdHV1IRAIEBYWxvuqxQBgZ2fHfWpvMOrq6iO6Ls1ISSQSuLm5YcOGDQM+b2ZmJrfX6s/CwgL37t1DXV0dXnnlFbn1GxAQAC8vL1y7dg23bt3Cxx9/jOPHj+P06dOwsrKS2+sQMhINDQ0y13rqe9x3Pbvt27fD3Nwcn3/+OY4cOQJtbW2Ym5tj0aJFUu16enqQkZEBT0/PER/NlQdaSBFCOEZGRsjLy4NYLJY6glNeXs49P5wrV65g+fLlUp9I6+joGNXRqNEYTUwmJiZobW3F/Pnzh3wNExMT3Lp1C42NjXI7KuXp6Ym0tDRcvHgRGzduHLLurFmzAAAVFRUyR88qKiq4558db2BgIAIDA/HgwQMsX74cJ0+eRFxcHAD+pyMJeV7Gxsa4f//+kHUEAgH8/f3h7+8/ZD1VVVXcunVLnsMbEdojRQjhuLu7o6enB2fPnpUqT0pKgkAgkLpnlZaWFpqbm2X6UFVVlSk7c+YMenp65D/gEXiemPpbvHgxioqKcPPmTZnnmpub0d3dDaD3o9mMMRw+fFim3rNH4QZ7zwbi4+MDoVCIY8eOoaioSOZ5sViMAwcOAABsbGzwox/9CJ999hk6Ozu5OtnZ2SgrK8OCBQsA9J5G6ejokOrHxMQE2traUu2mTp064nESMtnRESlCCMfLywsuLi44cOAAampqIBKJkJOTg6ysLKxfv17q4/rW1tbIy8vDqVOnYGhoCGNjY27PzYULF6Cjo4M5c+bgm2++QW5urlz3D41VTP0FBQXh73//O0JCQrBixQpYW1ujra0N//73v3HlyhVkZWXBwMAA8+bNw7Jly3DmzBlUVlbiZz/7GSQSCQoLC+Hi4sL9Jz3YezYQNTU1HD58GG+99Rb8/f2xaNEiODk5QU1NDaWlpUhLS4Oenh7CwsKgpqaGiIgIREdHw9/fH76+vtzlD4yMjLhbaTx48AABAQFYtGgR5syZA1VVVVy7dg0//PCD1BWgra2tkZycjKNHj8LU1BQGBgaD7hMjZLKjhRQhhKOiooJPPvkEBw8eRHp6OlJSUmBkZIQPPvgAgYGBUnWjoqKwY8cOfPTRR2hvb8eKFStgb2+Pbdu2QUVFBampqejo6ICTkxNOnTo16D6j8RRTf1OnTsWZM2cQHx+PjIwMnD9/Hjo6OjAzM8PmzZuhq6vL1f3www8hEonw5ZdfIjY2Frq6urCxsYGjoyNXZ7D3bDCmpqY4f/48kpKSkJmZiaysLEgkEpiammLVqlVSn0RcuXIlNDU1ceLECcTFxUFLSwsLFy7E7373O+4aUjNnzoSvry/y8vJw8eJFqKqqwsLCAh999BF8fHy4vkJDQ1FbW4uEhAT873//g7OzMy2kCBmEgI1m9ychhEwgHh4eeP3117Fnzx5FD4UQoiRojxQhhKD3mleNjY2YPn26oodCCFEidGqPEDLp3bx5E+np6Whvb6dTWISQ50ILKULIpHf8+HFUVVUhLCwMbm5uih4OIUSJ0B4pQgghhBCeaI8UIYQQQghPtJAihBBCCOGJFlKEEEIIITzRQooQQgghhCdaSBFCCCGE8EQLKUIIIYQQnmghRQghhBDCEy2kCCGEEEJ4+j/yi2JlzvAS/wAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlIAAAHPCAYAAACYzzRzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACjDElEQVR4nOzdeXyU1dXA8d/sS1YCI4vskES2DEEUFAmIKCLIvmhfROsKslQquLa2Kq1Wa6usKi5VUSvihoqoWBVEpFJDQIUkgOwIgZB19pnn/WOYIcPMJCEJTJbz/Xz6ap713hl4c7z33HNViqIoCCGEEEKIM6aOdQOEEEIIIRoqCaSEEEIIIWpIAikhhBBCiBqSQEoIIYQQooYkkBJCCCGEqCEJpIQQQgghakgCKSGEEEKIGpJASgghhBCihiSQEkIIIYSoIQmkhGjgNm3aRHp6Ops2bYp1U4KGDBnCfffdF+tmCCHEWaeNdQOEaIjS09Ordd2rr75Kv379Kr3m2WefpWvXrgwdOrQumhbVu+++y/333x/8Wa/X06ZNGwYMGMCdd95JixYtzur761JNPrOysjL+9a9/8dlnn7F//368Xi/t27dn0KBBTJ06lZYtW9Z5O19//XVMJhPjxo2r82efqQMHDnDFFVcEf1ar1bRs2ZIePXowc+ZMunXrFsPWVe7rr79m69atzJo1K9ZNESKMBFJC1MATTzwR8vMHH3zAhg0bwo536dKlymc999xzDBs27KwHUgGzZ8+mbdu2uFwu/ve///Hmm2/y9ddf89FHH2EymerkHWvWrEGlUtXJsyI5089s//793HTTTRw+fJirr76ayZMno9PpyM3NZeXKlaxdu5ZPP/20ztv55ptv0qxZs3oRSAWMHDmSrKwsfD4fu3bt4s0332TdunWsWLGi3gZTX3/9Na+//roEUqJekkBKiBoYPXp0yM85OTls2LAh7Hh9lJWVRa9evQCYOHEiycnJvPzyy3zxxReMHDky4j02mw2z2Vztd+j1+jppa13weDzMnDmT48eP8+qrr9K3b9+Q83PmzGHZsmUxat25171795A/p3369GH69Om8+eabPPLIIzFsWbgz/XMnRCxIjpQQZ4nNZuPxxx9n0KBB9OzZk2HDhvHiiy+iKErwmvT0dGw2G++99x7p6emkp6cHc4sOHjzIn//8Z4YNG0ZGRgb9+vVj9uzZHDhwoE7b2b9/f4Dgc++77z4yMzPZt28ft912G5mZmcydO7fafYLIOVIlJSX85S9/Cd575ZVX8vzzz+Pz+UKu8/l8vPLKK1x77bX06tWL/v37c8stt7Bt27YqP7NIPvvsM3bs2MG0adPCgiiA+Ph45syZE3Lsk08+Ydy4ccHPfe7cuRw5ciTkmoKCAu6//36ysrLo2bMnl112GdOnTw9+jkOGDCE/P5///ve/wXbecMMNEdvodru5+OKLQ6ZeA8rKyujVqxd/+9vfgsdee+01RowYgdVq5aKLLmLcuHF8+OGHUT+Dypz+/Ven/y+++CLp6ekcPHgw7HlPPfUUPXv2pLi4OHgsJyeHW265hQsvvBCr1cqUKVP43//+F3LfwoULSU9PZ+fOndx9991cdNFF/OY3v+G+++7j9ddfBwh+junp6SiKwpAhQ5g+fXpYG5xOJxdeeCEPPfRQjT4TIc6EjEgJcRYoisL06dPZtGkTEyZMoFu3bqxfv54nnniCI0eO8MADDwD+KcI//OEPZGRkMGnSJADat28PwLZt28jOzmbEiBG0atWKgwcP8uabbzJ16lQ+/vjjOpuG27dvHwDJycnBYx6PJ/iL795778VoNFa7T5HY7XamTJnCkSNHuO6662jdujXZ2dn84x//oKCggAcffDB47YMPPsi7775LVlYWEyZMwOv1snnzZnJycujVq1eln1kkX3zxBRA+ihhNIJesV69e/P73vw+OZP3www+8//77JCYmAjBr1ix27tzJlClTOP/88yksLGTDhg0cPnyYtm3b8sADD/Doo49iNpuZNm0aQNQ8NJ1Ox9ChQ/n88895+OGHQ0b01q5di8vl4pprrgFgxYoVzJ8/n2HDhjF16lScTie5ubnk5ORw7bXXVquPFZ3+/Ven/8OHD+fJJ5/kk08+4dZbbw153ieffMKAAQNISkoCYOPGjdx222307NmTmTNnolKpePfdd7nxxht54403yMjICLn/d7/7HR06dGDOnDkoikL37t05evRo2NS5SqXi2muv5cUXX6SoqCjkz+9//vMfysrKGDVq1Bl/HkKcMUUIUWsPP/ywkpaWFvz5888/V9LS0pQlS5aEXDdr1iwlPT1d2bt3b/BY7969lXvvvTfsmXa7PexYdna2kpaWprz33nvBY999952SlpamfPfdd5W28Z133lHS0tKUb7/9Vjl+/Lhy+PBh5eOPP1YuvvhiJSMjQ/n1118VRVGUe++9V0lLS1P+/ve/h9x/Jn26/PLLQ/q0ePFipXfv3sovv/wScu/f//53pVu3bsqhQ4cURVGUjRs3Kmlpacqjjz4a1n6fzxf892ifWSRjxoxRLrzwwmpd63K5lEsuuUQZOXKk4nA4gse//PJLJS0tTXnmmWcURVGU4uJiJS0tTXnhhRcqfd6IESOUKVOmVOvd69evV9LS0pT//Oc/Icdvu+025Yorrgj+PH36dGXEiBHVemZF+/fvV9LS0pSFCxcqx48fVwoKCpRNmzYpY8aMUdLS0pRPP/202v1XFEWZPHmyMnbs2JB35OTkhPz59Pl8ylVXXaXcfPPNId+f3W5XhgwZovz2t78NHluwYIGSlpam/P73vw9r++l/vwJ2796tpKWlKW+88UbI8WnTpimXX355yDuFOFtkak+Is2DdunVoNJqwqZybb74ZRVFYt25dlc8wGo3Bf3e73Zw4cYL27duTmJjIzz//XOO23XTTTVxyySUMGjSIOXPmEBcXx6JFi8JWrV1//fV11qc1a9Zw4YUXkpiYSGFhYfB/l156KV6vl++//x7wT8OpVCpmzpwZ9oyaJq+XlZURFxdXrWt//PFHjh8/zvXXX4/BYAgeHzx4MJ07d+arr74C/N+NTqfjv//9b8gUVm3079+fZs2asXr16uCx4uJivv322+BoFEBiYiK//vorW7durdF7Fi5cyCWXXMKAAQO44YYb2LdvH3PnzuWqq66qdv8Bhg8fzk8//RQc0QL/aJRerw8uAti+fTt79uzh2muv5cSJE8Hv3Wazcckll/D999+HTe1ed9111e5Lp06dsFqtIdOaRUVFrF+/nmuvvfasLngQIkCm9oQ4Cw4ePMh5551HfHx8yPHAKr5IuSWnczgcPPfcc7z77rscOXIkJA+ptLS0xm176KGH6NSpExqNhhYtWtCpUyfU6tD/ptJqtbRq1SrkWG36tHfvXnJzc7nkkksini8sLAT800znnXdeyDRNbcXHx7N///5qXXvo0CHA/wv6dJ07dw7m9ej1eubOncvf/vY3BgwYgNVqZfDgwYwZMwaLxVKjdmq1Wq666io++ugjXC4Xer2ezz77DLfbHRJI3XbbbXz77bdMnDiRDh06MGDAAEaOHMmFF15YrfdMnjyZq6++GpVKRWJiIqmpqcGpxOr2H+Dqq6/m8ccfZ/Xq1UybNg1FUVizZg1ZWVnBPyN79uwB4N57743antLS0uA0IEDbtm2r1Y+A0aNH8+ijj3Lw4EHOP/981qxZg9vtbhALP0TjIIGUEPXUo48+Gswl6d27NwkJCahUqmDuSE1lZGQEV+1Fo9frw4Kr2vD5fAwYMCAsnyagY8eOdfau03Xu3Jmff/6Zw4cP07p16zp77k033cSQIUNYu3Yt33zzDc888wzPP/88r7zyCt27d6/RM0eMGMFbb73FunXrGDp0KGvWrKFz585ccMEFwWu6dOnCmjVr+Oqrr1i/fj2fffYZb7zxBjNmzGD27NlVvqNDhw5ceumlNWpfRS1btqRv37588sknTJs2jS1btnDo0KHgwgQg+Of0nnvuiVpa4fRVeRVHwqpjxIgRPPbYY3z44YdMmzaNVatW0bNnTzp37nyGPRKiZmRqT4iz4Pzzz+fo0aOUlZWFHN+9e3fwfFU+/fRTxowZw3333cfVV1/NgAEDuPDCC2s1GlUbtelT+/btsdlsXHrppRH/16ZNm+B1R48epaioqM7affnllwOwatWqKq8NtOOXX34JO/fLL78Ezwe0b9+em2++mZdeeomPPvoIt9vNSy+9FDx/plNLF110ERaLhdWrV1NYWMh3330XMhoVYDabueaaa3jsscf48ssvGTx4MM8++yxOp/OM3ne6M+3/8OHD2bFjB7t372b16tWYTKbg5w3Qrl07wD8qGO271+l0Vbarss8xOTmZwYMH8+GHH3Lw4EF++OEHGY0S55QEUkKcBVlZWXi93uCy7YB//etfqFQqsrKygsfMZjMlJSVhz9BoNGHHXnvtNbxeb903uBrOpE+nGz58ONnZ2axfvz7sXElJCR6PB4CrrroKRVFYtGhR2HUVR+GifWaRDBs2jLS0NJ599lmys7PDzpeVlfHPf/4TgJ49e9K8eXP+/e9/43K5gtd8/fXX7Nq1i8GDBwP+VYinBy3t27cnLi4u5D6TyVTtdoK/2vjVV1/Nl19+yapVq/B4PGGB1IkTJ0J+1uv1dOnSBUVRcLvd1X5XJNXtf8CwYcPQaDR8/PHHrFmzhsGDB4eMMPXs2ZP27dvz0ksvUV5eHva+wJRuVQIrVKN9lqNHj2bnzp088cQTaDQaRowYUa3nClEXZGpPiLNgyJAh9OvXj3/+858cPHiQ9PR0NmzYwBdffMGNN94Ysly/R48ebNy4kZdffpnzzjuPtm3bBnNuPvjgA+Lj4+natStbtmzh22+/rdP8obPVp9Pdcsst/Oc//2HatGmMHTuWHj16YLfbycvL49NPP+WLL74gJSWF/v37M3r0aF577TX27t3LwIED8fl8/O9//6Nfv35MmTIFiP6ZRaLT6Vi0aBG//e1vmTJlCldffTV9+vRBp9ORn5/PRx99RGJiInPmzEGn0zF37lzuv/9+pkyZwogRI4LL/88//3xuuukmwJ/7c9NNN3H11VfTtWtXNBoNa9eu5dixYyG/xHv06MGbb77JkiVL6NChAykpKVHzxAKGDx/Oa6+9xoIFC0hLSwurjn/LLbfQokUL+vTpQ/Pmzdm9ezfLly9n0KBBYflrZ6q6/Q9o3rw5/fr14+WXX6a8vDws6FOr1cyfP5/bbruNkSNHMm7cOFq2bMmRI0fYtGkT8fHxPPvss1W2q0ePHgDMnz+fyy67LCxYGjRoEMnJycEcrebNm9fqcxDiTEggJcRZoFarWbp0KQsWLGD16tW8++67nH/++dxzzz3cfPPNIdfed999PPTQQzz99NM4HA7Gjh2L1WrlwQcfRK1W8+GHH+J0OunTpw8vv/xy1Dyj+tSn05lMJl577TWee+451qxZw/vvv098fDwdO3Zk1qxZJCQkBK997LHHSE9PZ+XKlTzxxBMkJCTQs2dPMjMzg9dE+8yi6dChA++//z7/+te/+Pzzz/niiy/w+Xx06NCBiRMnhqxEHDduHEajkWXLlvH3v/8ds9nM0KFDmTdvXrCGVKtWrRgxYgQbN25k1apVaDQaOnfuzNNPP82wYcOCz5oxYwaHDh3ihRdeoLy8nIsvvrjKQKpPnz60bt2aw4cPR5zWmzx5Mh9++CEvv/wyNpuNVq1accMNN3DnnXdW+tzqqk7/K7rmmmv49ttviYuLY9CgQWHn+/Xrx1tvvcWSJUtYvnw5NpsNi8VCRkYGkydPrlabrrrqKm644QY+/vhjVq1ahaIoIYGUXq/nmmuu4Y033pBpPXHOqZTaZK0KIUQEgwYN4rLLLuMvf/lLrJsimoi//vWvrFy5kg0bNtRZsVohqkNypIQQdcrtdlNUVESzZs1i3RTRRDidTlatWsWwYcMkiBLnnEztCSHqzPr161m9ejUOh6PKKSwhauv48eN8++23fPrppxQVFTF16tRYN0k0QRJICSHqzPPPP8++ffuYM2cOAwYMiHVzRCO3c+dO5s6dS/PmzfnDH/4QtVaVEGeT5EgJIYQQQtSQ5EgJIYQQQtSQBFJCCCGEEDUkOVJnIDs7G0VRqrWlgRBCCCHqB7fbjUqlCqlHV1fq1YjU3r17eeihhxg9ejTdu3dn5MiREa97++23GTZsGL169WLUqFF8+eWXYdeUlpbywAMPcPHFF5OZmcns2bM5evRordqnKEqtNout7Lkul+usPLu+aSp9bSr9BOlrY9RU+gnS18YoUj/P1u9vqGcjUvn5+Xz99ddYrVZ8Pl/ETn/88cf88Y9/ZNq0afTv35/Vq1czc+ZMXn/9dXr37h287q677mLnzp38+c9/xmAw8PTTT3PbbbfxzjvvoNXWrNuBkahevXrV6P5obDYb27dvp2vXrmE7oTc2TaWvTaWfIH1tjJpKP0H62hhF6ue2bdvO2vvqVSA1ZMgQhg4dCvi3gPjxxx/DrlmwYAEjRozgrrvuAqB///7k5eWxePFili1bBvin4L755htefPFFLrvsMgA6derENddcw2effRZx2wUhhBBCiDNVr6b21OrKm7N//3727NnD8OHDQ45fc801bNy4Mbhb+bp160hMTAypY9O5c2e6devGunXr6r7hQgghhGiS6tWIVFV2794N+EeXKurSpQtut5v9+/fTpUsXdu/eTadOnVCpVCHXde7cOfiMmlIUBZvNVqtnnM5ut4f8szFrKn1tKv0E6Wtj1FT6CdLXxihSPxVFCYsJ6kqDCqSKi4sBwnYgD/wcOF9SUhKym3xAUlJSxOnCM+F2u9m+fXutnhHNnj17zspz66Om0tem0k+QvjZGTaWfIH1tjE7vp16vPyvvaVCBVH2g0+no2rVrnT7TbrezZ88eOnbs2Og33GwqfW0q/QTpa2PUVPoJ0tfGKFI/d+7cedbe16ACqaSkJMBf2sBisQSPl5SUhJxPTEzk119/Dbu/uLg4eE1NqVSqs7bawWQyNeqVFBU1lb42lX6C9LUxair9BOlrY1Sxn2drWg/qWbJ5VTp37gwQlue0e/dudDod7dq1C173yy+/hJVP+OWXX4LPEEIIIYSorQYVSLVr146OHTuyZs2akOOrV6/mkksuCc5/ZmVlUVxczMaNG4PX/PLLL/z8889kZWWd0zYLIYQQovGqV1N7drudr7/+GoCDBw9SVlYWDJouvvhiUlJSmDVrFnPnzqV9+/b069eP1atXs3XrVpYvXx58TmZmJpdddhkPPPAA9957LwaDgX/+85+kp6dz1VVXxaRvQgghhGh86lUgdfz4cX73u9+FHAv8/Oqrr9KvXz9GjhyJ3W5n2bJlPP/883Tq1IlFixaF7Z/z9NNP89hjj/HQQw/h8Xi47LLL+MMf/lDjquZCCCGEEKerV1FF27Ztyc3NrfK6iRMnMnHixEqvSUhI4K9//St//etf66p5Z8XxEjulZW4MyR04WuwmwWOneWLjXU0hhBBCNCb1KpBqan49Xs6it3PIyS8IHrOmWpg50Uqr5nExbJkQQgghqqNBJZs3JsdL7GFBFEBOfgGL3s7heEnjrjwrhBBCNAYSSMVIabkrLIgKyMkvoLTcdY5bJIQQQogzJYFUjJTbPbU6L4QQQojYk0AqRuJMlaenVXVeCCGEELEngVSMJMTpsaZaIp6zplpIiDs7mysKIYQQou5IIBUjzRNNzJxoDQumAqv2pASCEEIIUf/J/FEMtWoex5zfZFJa5qLM7ibepCMhXi9BlBBCCNFAyIhUjDVPNHFesg5n0V7OS9ZJECWEEEI0IBJICSGEEELUkARSQgghhBA1JIGUEEIIIUQNSbJ5jBUU2Siz+TctLixzU+qwYXO4KT+ZfG426TivmbnK5xwpDL3PZNTRMqXq+4QQQghRcxJIxdDhY2UsXrmVnPwC2reM58Gb+7FwxZawTYxnTMigdYv4aj3nTO4TQgghRO3I1F6MFBTZQoKfe2+8iCWnBUPg33dv8cqtHD1hi/icI4W2sCCq4n1HCiPfJ4QQQojakxGpGCmzuTlRYmfJPUPweH2UlLm4ZVQPDDoN328/zPJPcnG4vIA/KCq3u6FZ+HNsDnelmx/bHO6z2Q0hhBCiSZNAKkZcbjd/uLlfxCm5O8b2omPLZOb/a1MwmIq2iXG5vfJASTY/FkIIIc4eCaRiJCnexGff7WXmRCt2pyeYJG40aPnsu730ucDCqKwurFibB0TfxDjOpKv0PbL5sRBCCHH2yG/ZGFF8Hq7s155Fb+dETBL3Kh4u6NAseCxawGQ26rCmWiJO71lTLZiNlQdaQgghhKg5STaPEZVaW2mSuFatw+X2BQOriiUQSm0uDhwtJXdvIW6Pl5kTrVzUrWXIcwL3SQkEIYQQ4uyREakYsTs9lSaJ250e2ljimD25d0gQVVBkZ+GKbLJzT92bmW5h+jgrU0d0o8zmIc6kxSx1pIQQQoizTkakYqTqJHE3LZJNYSNRpwdRANm5BSx9N4fmSSZ6dmlOpzZJEkQJIYQQ54AEUjFSdZK4jgSzPuRYcZkzLIgKyM4toLjMWWftE0IIIUTVJJCKkUCSeCTRksSrM4olhBBCiHNHAqkYaZliZsaEjLBgqrIk8eqMYgkhhBDi3JFk8xjSQkgdqTiTDpNBi0ZRIl6fFG8gM90ScXovM91CUrzhLLdYCCGEEBVJIBUjRwptLIxQ/gD8o1KzJvUOG5VKMOuZNSkz4qq92ZMyw3KqhBBCCHF2SSAVIzXdI8+SbGLelL4UlzmDo1hJ8QYJooQQQogYkEAqRmqzR16CWS+BkxBCCFEPSLJ5jMgeeUIIIUTDJ7+tY8Rs1DEo83xGDeqCTqMOTtO5vT5Wfb1L9sgTQgghGgAJpGKkZYqZG6++AK9Khd3pn8ZTAYlmPTdefQEWqUwuhBBC1HsSSMVIQZEND7D47ZyQpPNAHamCIhuWZAmmhBBCiPpMAqkY8XoV1mcfCKkjFW/SYTRoWffDfrL6tIt1E4UQQghRBQmkYkTl8zAwsy2LooxIqXzRV+0JIYQQon6QVXsxoqi1LI5QkDMnv4DFK7eiqLXsPVzMwaNl5O0r5MDRUkptrhi1tv4qtbk4cLSU3L3yGQkhhDj3ZEQqRuxOT6UFOe1ODw899y0dWicxamBnHlz6Ld06pTBrUiaWZNM5bm39VFBkj1jlXT4jIYQQ54qMSMVIdQpy/mX6APYeLmbV+t2MyupCdm4BC1dky6gL/pGo04MoQD4jIYQQ55QEUjFSnYKcz7//IzMnZZKTX8AFHZoB/kChuMx5LppYrxWXOSNu3gzyGQkhhDh3JJCKEZNBizXVEvGcNdWCyaAlJ7+A5klGAFxuX/B8VaNZTUHVI3ryGQkhhDj7JJCKEQ0wY0JGWDAVWLWnAVqmmLA7/Kv39LpTX1VVo1lNQdUjevIZCSGEOPsk2TxGHF4PRrUmpI5UnEmHyaBFoyi88Xkuj9x+KU63l79OH0CcSccfbu7HV//bR1K8IdbNj7mkeAOZ6ZaI03uZ6Rb5jIQQQpwTEkjFiFGjxeH1oCiqkOMuj5e/vfI9+46UMSqrC7Of+ip4LjBalWDWn+PW1j8JZj2zJmVGXLU3e1KmfEZCCCHOCQmkYsQHFJe6uX/JhqjXnJ7nE6gxNec3mTRPlOX9lmQT86b0pbjMGRzRS4o3SBAlhBDinJFAKkZsTg8JcTr+dEs/mieZsDk9/mk9tYpNPx/m7bX5EfN8cvILKC13SSB1UoJZL4GTEEKImJFAKkYcTjctEoy0a5mA3XVqOxitRs3g3udzSc9WOD2+iPeW22X7GCGEEKI+kEAqRlokGPEAS1fmkJ0Xutfe5KFpNE8yoMaHUa/B4fKG3Btnkq9NCCGEqA+k/EGMeFUqlryzNSSIAv/U3Vtr89i26zg2h4dRWV1CzltTLSTEyVSWEEIIUR9IIBUjdqeHLXnR99pLSTRiNOiCFc3BH0TNnGiV/CghhBCinpA5ohipqvK2y+3D7vAQZ9LxxIwBGA06DHoNrZrHnaMWCiGEEKIqMiIVI1VV3tbr1JiMWtweH2aznqJSJz5FOUetE0IIIUR1SCAVI2ajjsy06HvtFZY4sDvdbN15jGXv/0izRINU6xZCCCHqGQmkYsXrZvr4DDLTw/famzw0jV5dWuB2K6xat4uc/AIUkHpJQgghRD0jOVIxUlDi5rPv8pgx3ord5aHc7sFk0GDQaVGrFNxeL4+98t9g6QObQ2pHCSGEEPWNBFIxEmfS8eUPh/jyh0MRzy+4e3BI/SipHSWEEELUPzK1FyNmgxZravQcKY1GFfKz1I4SQggh6h8JpGJEDcyYkBEWTFlTLcyYkMHfXvk++LPUjhJCCCHqpwY5X/TFF1/w7LPPsnPnTuLi4rjwwguZO3cu7dq1C7nu7bff5oUXXuDQoUN06tSJOXPmcPnll8eo1aHKnB4+3bibmROt2J3+HKk4kxaTQYvL4+HO8b0xm7QkxusjBlGlNhfFZU7K7W7iTDqS4g2SjC6EEEKcYw0ukNq0aRMzZ85kzJgxzJkzh6KiIp555hluvvlmPvzwQ4xGIwAff/wxf/zjH5k2bRr9+/dn9erVzJw5k9dff53evXvHthOAy+1m9KBUlkTZa8+SbMTl8eGLsG9xQZGdhSuyyc49dV9muoVZkzKxJMvIlRBCCHGuNLhA6uOPP6ZNmzb89a9/RaXy5xGlpKRw44038uOPP9K3b18AFixYwIgRI7jrrrsA6N+/P3l5eSxevJhly5bFqvlBSfEmFq/MCdsmJiff//PA3m0wGbQ0TzJi1GuCo02lNldYEAWQnVvAwhXZzJvSV0amhBBCiHOkweVIeTwe4uLigkEUQEJCAgDKycrf+/fvZ8+ePQwfPjzk3muuuYaNGzficrnOXYOjqM5ee+1bJvDW53mU2U61t7jMGRZEBWTnFnCsyM7xEnu12lBqc3HgaCm5ews5cLSUUlvsPxchhBCiIWlwI1Ljxo3jgw8+4PXXX2fUqFEUFRXxj3/8g+7du9OnTx8Adu/eDUCnTp1C7u3SpQtut5v9+/fTpUuXGr1fURRsNlvtOkH19tqzOTxk5xVgd3qD7yyrItg5VFDOi6t+YsaEDJLM0ePkMofCopU5YdODMydYiTeqot5XW3a7PeSfjVVT6SdIXxujptJPkL42RpH6qShKyABMXWpwgVTfvn1ZtGgRd999N4888ggA3bp144UXXkCj0QBQXFwMQGJiYsi9gZ8D52vC7Xazffv2Gt8fkNK6a6Xn9To1BoO/P3anm+3bdwKQbOlQ6X1GvYb0Ds04esLOiRIVBh3gsVNSdAzfyYSr5JTz+Ndn+8NGxLJzC1j0dg43XtWOosKjNexZ9ezZs+esPr++aCr9BOlrY9RU+gnS18bo9H7q9Wcn7aXBBVI//PAD99xzD5MmTWLw4MEUFRWxZMkSbr/9dt54441gsvnZotPp6Nq18iCoOkrsCnOvzyC903knV+25iTfpMBq0aBQfuQeKMer8gZTRoCWuTVfiT250PH/apZTZ3Oh1anbsPcGqdbtwuLxc1K0lCXF6cveeYMXavOC7Th9pOlbiYUveDxHblZ1XwK2je9KtW/Na9zESu93Onj176NixIyZT402Mbyr9BOlrY9RU+gnS18YoUj937tx51t7X4AKp+fPn079/f+67777gsd69ezN48GA++OADJk+eTFJSEgClpaVYLKfqNJWUlAAEz9eESqXCbDbX+P6Acns5aR0tLHo7J5hgDqfqSKW3Tcaj+MhMt/Dt1sOsWJuHNdXCHWN78fflmykqcwWvnzelL2s27uH6Yem88vH2kOfByZGmlTnBRHR7QWGlbbM7PZjNiZVeU1smk6lOPsf6rqn0E6SvjVFT6SdIXxujiv08W9N60ACTzXft2sUFF1wQcqxVq1Y0a9aMffv2AdC5c2fgVK5UwO7du9HpdGH1pmLBByxeuTUs6MnJL2Dxyq14VSp8ipppYzNYtW5X8Nxz721j5qTMkOs/+mY3wy7pSFGpM+x5Adm5BRSXOQH/9jSVqeq8EEIIIfwaXCDVpk0bfv7555BjBw8e5MSJE5x//vkAtGvXjo4dO7JmzZqQ61avXs0ll1xy1uZJz4TN6Yka9OTkF2B3enB5vBSVOkL23MvJL6B5Uuj0ZXZeAWqVP0G9MoEE96R4A5npkbenyUy3kBRvOJOuCCGEEE1Wg5vau+666/jrX//K/PnzGTJkCEVFRSxdupTmzZuHlDuYNWsWc+fOpX379vTr14/Vq1ezdetWli9fHsPWn1LVqj3/eVXE0SG7wxN2zOX2oddVHhcHnpVg1jNrUmbEop6zJ2VKHSohhBCimhpcIDV16lT0ej1vvvkm77zzDnFxcfTu3Zunn36aZs2aBa8bOXIkdrudZcuW8fzzz9OpUycWLVpEZmZmJU8/d+JMOvp1P4/bxmRgd/m3iDEZtBh0GrbvOU68WYeigMcbPspkMoZ/bW0scahVKjLTLRHrTJ0+0mRJNjFvSl/ZZkYIIYSohQYXSKlUKq6//nquv/76Kq+dOHEiEydOPAetOnPxBi23jO7F4ihbxJh0GlCp2LDtYMh91lQLx4sdIccy0y20SDbhcHmZOCQNn4+QacNoI00JZr0ETkIIIUQtNLhAqrHwAUve2VrpFjG9ujanVxcLRn0eDpc3uGrvgSXfBK8PBEkAC1dks/2XQkZldWF0VufgdF9hiQODXnPO+iaEEEI0FRJIxYitii1iRmd1xu708ton2/nHXYPw+nwUnLDz3Y+HmD05MxgktUwx0yLZxIGjpcEpvYo1pAJ6dG4uo09CCCFEHZNAKkaqs0WM0+klO7eAZe9v446xvUKKbwYsvXdItZ5X1XkhhBBCnLkGV/6gsaiqVpNep8Z88prsvAIOHSsnd+8J5k3pi/HkNF3FBHKpDSWEEEKcexJIxYjJoCUzLXItJ2uqhcISB2qU4DGjXsvew8WsWr+bUVldwhLIpTaUEEIIce7J1F6MKD4P08dnsPTdrSHlCgKr9izJRhzuU1N4Br2Gv9w5gAeXbODW0T0YM6hLWM7TLdf25MhlNlQQnAbs1ilFakMJIYQQZ4kEUjGiVmv59Lu9zBhvrVBHSoNBp+VAQSkqlQmj3v/1WFMt7D5YzPmWeGZf1wenyxsSGBUU2SMW13z694PxeLw43S6OFCrYHO7g5shmk47zmjX+vZaEEEKIs0kCqRhRA1f2a8/CKJsWqxWFlz7+CWuqhVEDO/Pk8s2kd0jhjrG9qLj3YqnNFRZEgX9vvaXvbOWibudxUY9WLFyxJeJ7WreIr1Z7S20uKd4phBBCnEYCqRgpc3o4dLSIWROt2Jye4EiR0aBFA5Q67Uy68gK+yTnEk8s343B5yckvwOXxkpxwKt+puMwZsZI5+Mso3DqqB0sq2Rx59uTeVY5MRRvxmjUpE0uyqeYfghBCCNHASSAVIy63m85tm0cdkbLbFHy4w2pCOZxeXIZTuVNVlTWwOSrfHLnc7oZmEU8DlY94LVyRzbwpfWVkSgghRJMlgVSMJMWbWHRaEAWnRopmTrTi9bl56q4sdBp1cErNpNeiKD4OHC2l3O7GoNcwaWhaWH2pAIOh8orm5fbwDZArqmzEKzu3gOIypwRSQgghmiwJpGLE7qx8pMju9GAy6Hj14/ARqzvHZ/DnZd9ypNAePDZvSt/gFGDFa426ygOpOFPlfwSk0KcQQggRndSRipHqBCiRgq2c/AKWvLOVe6ZeFHIsUF8qIJCk7kPBmhq9XlVtC3lKoU8hhBBNmYxIxUh1ApRowVZOfgE6TY+wY7eO7sHF3Vti0GvZsaeQJ5dv5rxmJh68uV9YwnkgF6uqRPNAoc9I03tS6FMIIURTJ4FUjJgMWqyplojTe9ZUCxqNqtJgK1KQ5XR5Se+QAoDZoKXXnEE43F6KSp3cNronWo2K4jIXyQlaNBodNoebH3cdI96kw2TU0TIlPKhKMOuZNSkz4qo9KfQphBCiqZNAKkY0wIwJGSyOMlK08j95jB7UNer9kYKsisdUahXPvb8tYskCj8d7RnWlLMkm5k3pK3WkhBBCiNNIIBVDWmDmRCv2k3Wk4kw6NBoVK/+Tx8Qhabx1WumDAGuqBbfXF3Ks4jRbZSULvF5fWPAGp1YLzprUO+rIlAROQgghRChJNo8xNaDXqjEbtdgcHnw+hYlD0tCoVFw/NC0sUTywau+JV78PHstMtzBjQm9Kyp3k7i3keLGd1HbNMOrDV+xVtVrQ5pBVeEIIIUR1yYhUrPg8eFUaCk7YeWttXkhwk5lu4bbRvdBrVPx2VDfU9AiOWKkAh9vN7MmZ6LUa4s069DoNz7+3lU0/HQk+I1pJhKpXC1ZeV0oIIYQQp0ggFSNetZbSMhstm5m5dXRPyu1uTAYtBp0anUbNZ5v2Yk23YNBruevpr+idZiGtfTNWrM0L5jP5FAWvV+Hd/+Rx87U9+b+ru4VtNfPErIFhx1LbJpF/oDhiu6qqKyWEEEKIU+S3ZoxofB6S40z4AEVRUBQFlQrUajUqYNjF7XEqHtxuFQvnDqbc5sbu8garmDvdXuYtWM+fb+7H6EFdw/KeLurWktvH9jqZIO7B7vRwvNhB79QW3H9DXx57bXNYMGVNtWA2Sl0oIYQQorokkIoVtRaPorA4wl57d4z1T+sZ1DqK7HYUBZxuL3979XvSO6Qwb0pfjhU5GJXVheYp5rCtZox6DVdf0jHsuDXVwvmWeM5rZmLeDX25/bEvQs7NmJARMdFcCCGEEJFJIBUjXoi6eu6597Zx3ZWptEg2E2fS8eKqn7jM2oZRWV2CmxjfNronqo4pOFwe0js0Y3RWZ1xuH0aDBkszEx6Pj0lD0/jtyO4Uljr55WARndokYXd6sLs86DRqFs4dTJnNQ5xJizlKHSkhhBBCRCeBVIxUtXruppHdsTs9aDQqcvILGJ3VmeZJxuB5p9uLJcWEChW5e08EAyw4tT3Mk8s3A/DQLf35cddxXvtkR/CaQE2pjq2TzmIvhRBCiMZNAqkYqWr1nN3hQaVS4fX460W53KF1o44V2UmM07FibX7EUS0guPfe6asCwV9TauGKbOZN6Sv1oYQQQogakkAqRqraa89k1KJRqzhUUA6AXhda8qtZggGjQUt2XvRRrdFZnQFCRqsqys4toLjMWe1AqtTmqlF184r3mQ1aklPOq9b7hBBCiPpOAqkYMVex157D6aZFshm9To011cLxYgcFRXbAPy2399fSKoOY00exIqlqZCygoMgecb+9WZMysSSbzuy+NAszJ6ZglpQsIYQQDZxUNo8RNf699iJVLr9jbC/OSzJx4GgpJeUubhnVg67tkulyfhJ/mX4pN4/swetrtmPUa5g0NI2HbunHfVMv4qFb+jFpaFqworlepw4byTqdQa8hd28hB46WUmpzRbymsi1nFq7IPvP78gpYtDIn6n1CCCFEQyEjUjHiBb76335mTbRic3oot/tXzxn1WrQq2HW4mI6tkmhvUXj+gx/Z9HNo1fLZkzNJjNdHTDSfN6UvazbuYcfeE8Fj0Ua+1m85FLw/2ghTcZkzLBgKqGx6sKb3CSGEEA2FBFIxYnd6eOOzPN74LHL+0kO39ONokQ2VCjq0SQoJpHLyC1CpYEBGm6iJ5jeO6MYDSzacfFZ/1GpCgprTV/ZB9AT0qreViXy+pvcJIYQQDYUEUjFSVRDhcvvQatQkxOm5oEOzsPNb8goYNbBzxHtz8gvQanrw0C39MRm1nCh10rNzc669rDNxJh1mo5b1Ww6F7cMHkUeKqkqMj3a+pvcJIYQQDYUEUjFSVRCh1/mDKLvDEzVpvLJk8kMF5Tz+6vchxzLT/dN+hwrKoq7kg/AgLyneQGa6JeI0XWa6haR4Q8Tn1PQ+IYQQoqGQZPMYMRm0ZKZZIp6zplooLHGg06ixNDPRrlU8D93SL2IyeTTx5tBALTPdwuxJmSSY9Wc8UpRg1jNrUiaZ6aHtrfjMSKLel2Zh5gSr5EcJIYRo8GREKlZ8HqaPz2Dpu1vDcpcmD03Dkmzk8DEb73y1k9vG9GTBW9kUlbmCyeSffreHwhJHxEdnplto2zKepfcOiVjzqSYjRZZkE/Om9D3jOlKn32cyaHHbi4k3qs7k0xJCCCHqJQmkYkSj1oKiMGO8FbvLv2rPZNBg0GnRa1SgKMTH68jJL2DZ+z8yc1Im81/aFJJMboxQi8qaamHikDT0Wg3Nz4tc3ykwUhSpLlRVI0w1GUWqeJ/NZmP7waO0btn8jJ8jhBBC1DcSSMWID1gYYdNi8AdDMydaMen9X09g772AnPwCHM5UNm8/ErJhsV6nZsfeEzzy4nf8c86gSoOemo4wCSGEEOIUCaRixOb0YNSpWPbAUOxOD+V2N/EmHUaDlqJiG063B49XCV5vd3hC7tdo1Gg1Kt79cicrTlt5B9UrLVDTESYhhBBC+EkgFSM+xc0to3ux6O2csKm5GRMy0Cpw3OEMHjcZQ78qt8dHgtnAvCl9I5YxkNICQgghxNkngVSMxBlNZO84zOyJVtxeHx6fF71Wh8Pl4Xixg/iTU21L7hmMTqvFZrcF77WmWti68xgXdGjGB+t2MyqrS0g5AyktIIQQQpwbEkjFiMbnITO9FUdO2Pn0uz1cP+wCFq8MH52aPj6D5Z9s5/+uvoDxgzuz+1ApN4/qyfEiOylJRvYeLmZ0VmeMeg2jsrqQ0bUFeq2G4jL/aJZM3QkhhBBnjwRSMeJVa9mSV8A3OYeYOqIbS98JTzzPyS9g6TtbmTqiG4tXbmXmRCuf/3cf8xasw+Hykplu4bEZl6EoCv+cM4jn39sWNjJ12+he+Hw+jAYdLVPM57qbQgghRKMmBTljxO700DzJSE5+ATqNOuLqPSDkvN3pYcXavGA+VHZuAc++u43iMifPv7eN7LzQZ2TnFvDce9tYt+UQC1ds4fCxsrPeLyGEEKIpkUAqRsrt7uAWL9Xd3DfSdTn5BRgNurAgquL5Czo0Iye/gMUrt3Kk0BbxOiGEEEKcOQmkYiTOpAtu8VLdLVuiXXd6aYTTBQK2nPwCbI6qyyIIIYQQonokRypGTAYtx4sdWFMtuL2+sArlARXPmwxaHrqlHzv2nmDVul3BKT6zScukoWlc0KFZSGHOwDUV9+Qrt1cedAkhhBCi+iSQihEN0Du1Bedb4ln19S7/vnunJZxbUy3cOT6DNz7dwYwJGaz5djfvfLU7uN/ek8s3k9G1BSa9lty9J0ISzQPXrNm4hx17TwBg1GtolqDnwNFSqWYuhBBC1AEJpGJIA7RsZuL6q9LxeD3MmGDFcXLfvTiTFqNei9vrYcrwbuw/WsQ7X+0GCAZbt4zqiTXVElY2IXCNWg03X9sTUBhyYVu8Ph8ajZqjx208+vKm4Mq/WZMysSRH3pdPCCGEENFJjlSMKD7/FJsCuL0+ymxe7E43eq2GlkkGNBoVGuDBJd9y21/XgqIJuT8nv4AOrRJwuDxRV/xl5xZQcMLGrL9/xXPvbwNUvPzhT1hSTDw5ux9GvYbs3AIWrsim1OY6ux0WQgghGiEJpGLkaImbco8DFcDJ/+unoKhU5O87zrESO0Vl/gAnkDBeUWGJs8oVf4H7snMLeOGDHxlyUXsWr9yKUW9mVFaX4LlAAU8hhBBCVJ9M7cVInElH3MlBJp1Gjcmgwe70olar0GrU9OzYAqfXx7IHhlJc5kCn1fLsvUM4WmjjH2/+QFGZC71OHbaSL1DhPJB43qq5mUlD01i1bhfZeQVMHdE9WJPqgg7NgvdVZ5NjIYQQQoSSQCpG4g1avIpCwQk7b63NC5mey0y3MH1cBhqNfypu6oju/On5b2nVPJ7JQ9P4650DWP7JDo4XO2jZ3Bxc8WfUa5g3pS+r1u+OmHj+5PLNwVIJFetYgWxyLIQQQtSEBFIx4gNydh5j/ZZDYTlO2bn+rWHuGNuLIRe1Z+k7W7ln6kXc/fQ6AC6ztmHqNd3QqFUcPGbj9jE9WfbBj6S1b8aq9bsjJp4DjMrqgsno/8rjTDrsTn9QJZscCyGEEDUjgVSM2JweUhKN0RPF8wpweXwVtpHpAfiDotFZnXF5fPzl5U0cKbRj1Gu4ZVRP0js0CxmJqignv4CJV6QGa1eZDFp27D1BZrqF2ZMypQSCEEIIUQOSbB4jp0+tRbum4lRcgMvto8zm5kihHQCHy8vilTkcK7JX+jydVs1/vt/HjAkZaHw+BmWez7wpfWkhpQ+EEEKIGpERqRipOLVW2TVenxL89wC9Th2cojsTJoOW317bA7XPw75jDo4V27k0o03U60ttLorLnCHFO4GwY3UxmhXpXTJKJoQQor6TQCpGTAYthSWOqFvDZKZZ0GvVHCgoD24TA/7E8ePFDk7GVyF27D1BZpol4gbGmWkWcveeYPHKnOAxa6qFXl1aRAxYCorsLFyRTXZuaBL8xCFpPPLid8HtaeqioGe0d0mhUCGEEPWdTO3FiMbnwdq1BZOHpmFNtYScy0y3MH18BqgU/vP9Pu4cn8ETr36PNdXC5KFp9OicwqIV2WHPXLVuF3eMzSAzLfR5vdMsTLwijRdX/RhyPCe/gGff2xpWjLPU5goLbMCfBP/W2rxg/anAsdoU9KzsXVIoVAghRH3XYEek3nvvPV555RV27dqF2WymV69eLFq0CKPRCMB//vMfnn76aX755RfatGnD7bffzvjx42Pc6lO8ai0an42WzczcOron5XY3JoMWg06NTqNG8flweRV+e20PisscPHDTxRh0GtQqFTaXmz/ddgml5S5MBi0ajYrCEifnNTPh8Xq5aWQPfqsCh9OLXq/G51N4YMmG4ChSRYFinBVHpYrLnGGBTUAg2b2qZ1RXZe+qzXOFEEKIc6FBBlJLly5l2bJlTJs2jd69e3PixAk2btyI1+sPFDZv3szMmTOZMGECDzzwAN999x0PPvggcXFxXH311TFuvZ8G8KhMLF2ZEzIVFxh1siQbcbndHDvh4PFXvye9QwrTxvXi0LFy3vt6V8h04EXdWnLjyO4se//HkOO9Uy3ccE03SspdEYOogNOLcVa3WvqZ3FPdd9fVc4UQQohzocEFUrt372bRokUsWbKEQYMGBY8PGzYs+O9Lly4lIyODRx55BID+/fuzf/9+FixYUG8CKR+w9J2tYflMgc2G7xzXC51WR97+YzhcXnLyC/hx13E2bA2vO9Xp/KSwIApgS34Bymq4Y2yvSttyejHOqopz6nXhM8I1LehZ1X1SKFQIIUR91uACqXfffZe2bduGBFEVuVwuNm3axNy5c0OOX3PNNXz00UccOHCAtm3bnoumVsrm9ERMCgf/lNb+o+U0SzTStW1y8HjzJCNbItxzQRX1o8rsLnqnWdixpzC4fYzb46NZohGDTkO53c2Bo6XBlXJJ8QYy0y0Rp9ysqRZ27D0Rcqw2BT0re5cUChVCCFHfNbhAKicnh7S0NJYsWcJrr71GaWkpPXv25P7778dqtbJv3z7cbjedO4fm8XTp4k+Q3r17d60CKUVRsNlsteoDVG/6zO7whEyjRas7VVU9qqJSJ6OzujDpijTeWpsXtn3MqIGd+cOz39KtUwozJ1iJN6qYOcHKopU5UVftVTw2c4IVDR5stsrLOQDY7faQf2og6rvO5Ln1zen9bMykr41PU+knSF8bo0j9VBQFlUp1Vt7X4AKpgoICfvzxR/Ly8vjTn/6EyWTi2Wef5eabb+azzz6juLgYgMTExJD7Aj8HzteU2+1m+/bttXoGQErrrpWeD9SKcrg8IceiXVuZ5HgD2fkFbP+lsNLtY1aszWPR2znceFU7SoqOceOV7bjx6jRsLi9mvQaVzwE4eHx6/+AxvHYO7s3D56s8mDvdnj17gv+uVqvD3lXT59Y3FfvZ2ElfG5+m0k+QvjZGp/dTrz87C5caXCAVGBF65plnuOCCCwCwWq0MGTKE5cuXc9lll53V9+t0Orp2rTwIqo4Su1Lp9NnxYgdGgyZkGu14sSNinagde09ErUdlTbWg1apJbZvMm5/mRmxLxZV42XkF3Dq6J+ktm0e40hzxWOuI10Zmt9vZs2cPHTt2xGSqrEbUmT23vql+Pxs+6Wvj01T6CdLXxihSP3fu3HnW3tfgAqnExESSk5ODQRRAcnIy3bt3Z+fOnYwYMQKA0tLSkPtKSkoASEpKqtX7VSoVZnOkgOLMlJcfY/q4DJa+uzUkmAqs2muWaOBooZ1V63YFj/fs0hxLsgmfQkjQ9MvBYm4b0zMs4Twwbefx+jAbK0/arjg9aHd6MJsTK7m69kwmU518jvVdU+knSF8bo6bST5C+NkYV+3m2pvWgAQZSXbt2Zd++fRHPOZ1O2rdvj06nY/fu3QwcODB4bvfu3QBhuVOx4lWbKC2xMWO8FZvTQ7ndTbxJh9GgRXPympQkA3+69RLiTFqMei2K4g92bhnVA69PweX2YjZqcXt82GwubhzRDY8nnROlTlq3iOObnEM8uXwz/5wTOTG/oorTg7JSTgghhKieBlfZ/PLLL6eoqCgkT+nEiRP89NNP9OjRA71eT79+/fj0009D7lu9ejVdunSpFyv2wF/ZPCHBxMK3c5j91Ffcv2QDs576ikVv5+BRFFAUNBpQofDiqp8otbv45VApf37hO3w+hYeXbcTnUzBoNby6ejubcwv4/dPruGfRN6z+dg9HT9hZsTaPbp1SSIo3BFfHRVJxJZ6slBNCCCGqr8EFUkOHDqVXr17Mnj2b1atX88UXXzBt2jT0ej2/+c1vAJg+fTpbtmzhz3/+M5s2bWLBggV89NFHzJo1K8atP8Wr1rJ45daIyd+LV27Fq1Kh02hJSjAEt4jR69RkplvQqFX89c4BJMTpKCqzc9vonnz23R7AHxTdNqYni1Zkk5luYfakTBLMehLMemZNygwLpgLTf6vW7Qq5XgghhBBVa3BTe2q1mueff57HHnuMhx56CLfbTd++fXn99dexWPxBQt++fVm4cCFPP/00K1eupE2bNsyfP5/hw4fHuPWn2J2eiMnh4A+m7E7/aj2VSsX8lzbSqnk8x4sdTBySRv7+InyKgjXVgloFr3z8M4/ecSkKYNRrKbM7eWzGZcG6UAGWZBPzpvSluMwZ3JJGp1VTanPxzzmDwq4XQgghROUaXCAFkJKSwpNPPlnpNVdccQVXXHHFOWrRmSu3uxk9sCMjB3bFflqO1Efrd56sM6UCFP7w234YDVp+2n2UR178jntu6AuAzeFGpVKx6acj/N/V3The7KB1CzOp7VKivjcwOlVR67PYTyGEEKIxa5CBVGPQMlHH6Mu6EKlK0uiBXcHnocwNs5/6CvBPwc2YkEG3js2CK+zK7R6aJxlJbZtEvEGL6bx47E4PP+46RrxJh8moo2VK41+ZIYQQQsSKBFKxotbiURQWv50TVrJgxoQMtCoN8YZTyzUDuVMzJ/Zm/xF/aYd4sw4NCvff0Nf/rNNyrgLPat0inlKbKzilF2fSBRPKKx4zGbU4nB7KbKeukak+IYQQIjoJpGLEC5Umm08f1wutRhV2zu70cLzYgUoFrZvH4VOp8PgUlr67LeqzZk20suyDbWz66QgARr2Gh27pz9tf5IUU9wwknj+5fDMOl5fMdAuzJmViSW68hduEEEKI2mhwq/Yai6qSzV0eHzanB6NeE3Ku3O4mrX0zMrq2oMzuwOHy4vL4Kn2WzemhQ+tThUhHZXXhrbV5YRXSc/ILWLV+N6Oy/PsSZucWsHBFNqU2V226KoQQQjRatQqkVq9ejdPprKu2NClVbVpcbndTZnMHg5qAOJOO3/3jKxav3EqC2US53V2tZ13QoVnw5ws6NKs08Kp4bXZuAcVl8h0LIYQQkdQqkPr973/PgAEDeOCBB/juu+/qqk1NQlXVwwM5SxWDGmuqBZNBi1GvISe/gOfe30qcSVetZ1XcAqbiv0dy+vmqAjUhhBCiqapVIPXGG29w7bXX8uWXX/Lb3/6WwYMH8/e//528vLy6al+jZTZosaZGrzSu16ox6TV4vErw2IwJGezad4x5U/pi1GvIzi3AZNCi16orfZbJoA3ZAqbiv0dy+nnZMkYIIYSIrFaBVJ8+ffjTn/7E+vXrWbJkCX369OH1119n9OjRjB49mpdeeomjR4/WVVsbFTUwY0JGWAAUCJj0ahVatYp2LeNZcPdgZk60Uu6w06V9i5A8psPHbOjVqkqfVVRsY+eBIiYNTeOhW/qh0aj5y7RLmTQ0LSwHq+J2MSBbxgghhBCVqZNVe1qtlssvv5zLL7+c8vJyPv/8c9577z2efPJJnnrqKS6++GLGjBnD8OHD0etlOT1AmdPDF//dw6yJgU2LPcSZtJgMWkpsdgw6HT4fOJwe7ln0DQD/nDMomKQ+Osu/+XKcSctT/87m7uv6MHOi9WRxT/+zzAYtap+H5CQzQzP1eNXakOKfQy5si7VLCx59eRMOlzdk1R74g6o7x1ulBIIQQggRRZ2XP8jPz2fbtm3k5eWhKAqdO3emqKiIe++9l3/84x889dRT9O3bt65f2+CU2918sH4PH6zfE/H8Y3cOAKCk/NSKObvDg+pkRQSX24c11cIvh4qZOak3x0rstEjQYdRreHHVTxHrSf3v50M89/7PYcef+X0Wx4qd2J1eNGoVd13XB71OzY69J3jhg2387ro+EkwJIYQQEdRJIPXLL7/w4Ycf8tFHH7F//36aNWvGyJEjGTNmDD169ABg27ZtPPjgg/z5z3/mo48+qovXNmhxJh2jB3bk2oFdT45I+YtgGvVavF4ParUal8cX3HMPwGTUolH7I6l4sy44epTeIYX0Ds248uL2YQU+oWIxT2tIIFXx+INLv43a1uIypwRSQgghRAS1CqReeeUVPvzwQ3766Sf0ej2XX345DzzwAAMHDkSjCc296dWrF7/97W958MEHa9XgxiLeoGXEZV1YGKGy+R1je1Fqc5EUZ2DngaLgcbvTjSXZTGaaBYfLy99PFs4MTPVVdyPk6hyvSFbtCSGEEJHVKpB67LHH6NOnDw8//DDDhw8nISGh0ut79uzJnXfeWZtXNho+olc2f+69bVx3ZSo5O49xSc/W9OvRCrPBvx3Mtz8eZORlp6qPB7jcvmrVk4p8vPJASlbtCSGEEJHVKpD6/PPPadeuXbWvT01NJTU1tTavbDRsVYwe3TSyOymJRgqK7Dzy4qaTid8ZuDxKWBAF/pIF1aknFfm4lsx0C9m54e2RVXtCCCFEdLUqf/Dggw+ycePGqOe/++47pk6dWptXNFpVjR7ZHR5cbl+wOGZOfgFL3tlK+1ZJYUFUoGSBqYraVCZDeNwcOH7LqJ7MnGgNKYeQmW5h9qRMyY8SQgghoqjViNR///tfJk6cGPV8YWEh33//fW1e0WhVNXpkMmpxuEKn3HLyC/jtyO4hxzLTLVx7WWc++XYPGkVhxoSMsCnDwOq8zT8fCrk3cPzRF79j35EyMtMtPPP7wZTZXZiNOpLiDRJECSGEEJWo9ao9VWA9fgR79+4lLi6utq9olAKjR5Gm96ypFhxON4UlDo6esIecczi9PPP7QThdXsrs/mtatYjjjrG9cPvs6NBWqCflXwloMmjR+Nz07d6GHl3OC9aZ0mhUzH9pE/uOlAH+ffWefW8r86b0lQBKCCGEqIYzDqTee+893nvvveDPS5cuZcWKFWHXlZaWkpubS1ZWVu1a2EhpfJ6oo0d3jO2FzeEio0sLZv/jq5D7zCcLbZ4otfHIi/7Rvsx0C2MGduanPSe4oEMzUhLhlY9/JjsvcpA2sHcbjp6ws2Jt+FY+gU2KqxNIldpcFJc5gwGbjGAJIYRoas44kLLb7Zw4cWoLkfLyctTq8FQrs9nMddddx4wZM2rXwkbKq9Zy6GhRhcrm/mrjRoMWjc+LPt6IzekOyYeyplow6bW8/NFP3DiiO3eM68ErH+0gO7eAqcO7k7v3BCvW5vHQLf0iBlFAsFRCSqIxatuqU+6goMjOwhXZIQnqmekWZk3KxJJsOoNPQgghhGi4zjiQ+s1vfsNvfvMbAIYMGcKDDz7IFVdcUecNa+y8PjvtLUn4IpxzAxp8OE7Wd0ptm8S8G/ricHk5VmznuivTAeiX3orzkuN5cvlmCksc5O4tZNLQNJITjNw39aJgdfJV63aFlUqoTFX5W6U2V1gQBf7RrIUrsmVqUAghRJNR4xwph8PB0KFDK82REtHp1SY8ihJWiTyQAP7e1zsZndWVf84ZQJzRGD2BfPuvjMrqgkatYt6Uvqxavztkys6aamHelL4hJRP0OjV6XWjB1IDqlDsoLnNGLJUAZzY1KIQQQjR0NS5/YDQaWbFiBcePH6/L9jQZXqIX5Fy8ciujBnZl6TtbiTeZKr1u7KCuZHRtgcensGr97ojXrVq/m1FZXQB/YFVY4qB1izgy00NLJVS33EFNC38KIYQQjU2tVu316NGDvLzwhGVRtaq2c3G6vWTnFVR5ncPtRadVE2fUVXrd6KzOWFMtTB6aRusWcbRINjFvSt8aJYvXtPCnEEII0djUqiDnAw88wOrVq3n77bfxeCrfZkSEqu6oTtXXeVCp4EihrdLrzEYdMyZk0LFNIi1OJoMnmPW0PS+B9A4ptD0vodrTcUnxhrDRrACphC6EEKIpqdWI1H333YdKpeKhhx5i/vz5tGzZEoMh9JeoSqVi1apVtWpkYxRn0jHi0vaMHZwWvmoP8Pp8GPWaaoz++L/CpHiFhXcPxmjQovZ5OFrif16cQQs+L161BrvTw/HiEuJNOkxGHS1TzDVqe4JZz6xJmRFX7UkldCGEEE1JrQKp5ORkkpOT6dSpU121p8mIN2gZPSiVhVGSzbWoePSO/pirKNxp1Gu4/bEvwu7/cvNOcnYe57HbL8WjUkdNam/dIr5G7bfUYmpQCCGEaCxqFUi99tprddWOJsdH5cnm/n3vdFDFti9PvrY56v3DLnXhVavCgqiK182a1LtWI1MSOAkhhGjKar1FjKgZWxVJ5PaTNaRK7R6WrNzC3Cl9cbq9we1dTAYtT7z6PfkHiqPer9Ooq0xWtzlkhZ0QQghRU7VKNgcoKyvj+eef55ZbbmHMmDFs3boVgKKiIl5++WX27t1b60Y2RtVJNi+3u3G4POQfKOaOx7/gWJGdf3+ei16noajUHjGIOv3+6iSrCyGEEKJmahVI/frrr4wZM4YFCxbw66+/kpubS3l5OeDPn/r3v/8t039RVKeEQJxJh9nov86aamHH3hPk5Bew9J2tJCeYaJkSfSuWwP3VTVavS6U2FweOlpK7t5ADR0sptbnq/B1CCCFEfVCr36JPPPEE5eXlvP/++6SkpHDppZeGnB86dChfffVVbV7RaJmqSCI3GbQ4nB50OjXWVAujBnbmyeX+fKjA1N38aQMiTu8F7i+xuUg06yt9TyBQqyuyB58QQoimpFYjUhs2bOCGG26ga9euEbeKadeuHYcPH67NKxotlc/OjAkZWFND6zEFksg1ioJJp0GtUpHeoVnIFi/gn7pb/HYOc2/oS3K8Puz+t9fu4IlXv0fjUyp9z/rsfXU2YlTVHnx1+R4Z8RJCCFEf1GpEyuFwkJKSEvV8YJpPhLO5dfy8+zCzJlpP1pE6lUSuAXw+Hz5UON3ekL3zAuJMOrbkF7Bk5VaemDWQ48UO4kw6TCfrSF3etyMjB3YFtQqtojBzohX7ae95Y83PfPnDIfr3Or9OVt9VZw++lPjIe/xVl4x4CSGEqE9qFUh16dKF77//nuuuuy7i+bVr19K9e/favKLRMhm0FJd5UIJHFAJjenuPltLuvAQOHC3DpA//iqypFtxeH3BymxiXl7+9+j0dWieR3qFZWOBlTbUEjy/4/WCOFdnZsfcEG388AtTd3njVSaCvTSBV1YjXvCl9pRyDEEKIc6pWgdSNN97IfffdR3p6OsOHDwdAURT27t3LokWL2LJlCwsXLqyThjY2Gp+Hy/u2Y1GUQpkqnw81YDKGBh7WVAt3js/goee/DR6z2T38dcYAHli8gdFZncPeVXGvvaNFdua/tAlrqoV5U/ry5PLNdbY3Xk324Cu1uSizubA7vThcHuJNepolRi7sWdWI14kShxQIFUIIcU7VKpAaPXo0hw4d4plnnuHpp58G4NZbb0VRFNRqNXPmzGHo0KF10c5Gx6fWVlooc/q4Xv4pOL2WBXcPDgYHbq+Ph57/liOF9uA9RoOGolInMydl4nL7Ir5PUeC2MT35w9INwfcA3Dq6Z53tjRfYgy9SsHNqD75T5RYKiuz8eqyct9bmhXwO0abqSssrH/H6tdDGoy9uqvI5QgghRF2p9dr36dOnM3r0aD777DP27t2Lz+ejffv2XHXVVbRr164u2tgoVVWQ0+XxYTToKC6zU2Lz8unGPXy//UjYtdZUC2aDP+cpzuTheJEj4jNTEo08+uJ3XNW/Ixd0aIbL7UOvU9O6RRwuj5cDR0trPZJTnT34bDZ/IOX0wA87jrB+y6GwzyHSVF2pzYXL46Uypy93kCk/IYQQZ1udFBFq06YNN910U108qsmoTj6RWqXC5vTx2cY93DjSn2tWMZjqnWph+vgM7lm0ng6tk7hzfAZ7DocX6bSmWvh++6/cPiaDVet3h+RQZaZbuGVUT46dsPPznkJWrdtFt04pNR7Jqe4efKU2DymJxqjBZCA5PXBfcZmTrTuPVVrKYcfeE1U+RwghhKhLtQqkJkyYwMiRI7n66qtp1apVXbWpSahOPpFPUdh/tJQp13TDoNNw48huTL2mGwpgd3ow6jX4FAWHy0tOfgFL3tnKdVemhTwnM93CyAGd2XmwiFXrd0cc/Vn2/o+kd2hG7t4Twbyp2ozkVGcPPpvDHXUaMqBisFlud7Nq3S7mTekLEDoVmGZh5GWn6mxV9hwhhBCiLtUqkNJoNDz++OM88cQTWK1WRowYwbBhw7BYLFXf3MRVVZBTr/WX+EpJNHKsyM6qdbu5dUxPXl39c8ioVGb6qaTxnPwCbhnVgwW/H4zN6cFk0GLQqcnbd4LUtsm8+WluxLYEktEDI1XjLu+KAhwvdrD/SCkmvRajQUN8HW5SbDbqKLVVHuBUDDbjTDocLi9PLt/MqKwujM7qHJyeTE4w8MCSDSF1tqI9J6DU5qKk3InXq/iDUaeXeLMkqAshhDgztQqk3nrrLQ4dOsTq1av55JNPmD9/Po899hgXXnghI0aM4Morr6y0zlRTpgFmTMhg8cqtEVftaQFUKlxuH8nxeqaNy8Dt9TLu8q7ccE033F4fT7z6Pdm5Bfh8MCqrCyvW5lFmc/PAyYTywPMmD00jQr3UEIHRody9hdw+pifLPvgxJPAKPKdVi7g6Sd5OMGvZvscRNZg8lZzuVzGR/fTyDjMnWunWKaWKJPdTCorsPPduDlf16xg2SicJ6kIIIc5ErXOk2rRpw6233sqtt97K/v37g0HVn/70Jx599FH69evHiy++WBdtbVS8wJeb90cslHngZB0pRQG9To1Br2Xpu+EB18O3X8qfnv82OKIEYDL6v1KjXsOorC5c0KEZdqeHFlUEBnqdfwRsVFYXXvjgR7bkha8mBBjYuw2XZrSp9aiNQQt9LmhJmxbxIc+H0OT0gMoS2S+8oCV9LmhZaZJ7QKAWVWq7ZlGnOiVBXQghRHXV6Y617dq144477uD222/n7bff5m9/+xvffvtt1Tc2QXanhzc/z+PNz8OrlgMsuHswAMdLHBwvdkQsk7D0na3cM/Ui7n56HS63D2uqhePFDox6DfOm9A1JLJ80NK1aidoXRCjoWfGdo7M611nytiXZhFGvYcaEjAp1pHQ0SzRGfH5ViezVSXIP1KK69rLOUfspCepCCCGqq04DqS1btvDJJ5+wZs0ajh49itlsZuTIkXX5ikajOqv2QEWPTin8/ul1Ea/JyS9Ap+kBQLxZF6wTNSqrS9hoS7RE7dM3RFYUKuVy++o0ebs6ienVvb46zwq0/UwS3YUQQohoah1I/fjjj6xevZo1a9Zw+PBhjEYjgwcP5pprrmHQoEHo9fJf9ZFUtwr4gaPlUZOowf8LPzPdQssUM/MWrKOozBVxVKliovato3tgd3hxebxs3XksuCFyZrqF81LMlbZLr1PXWSX0WAi0PTCVWdV1QgghRGVqFUgNHTqUgwcPotPpyMrKYu7cuVx++eWYTJKoWxWTQcsdY7pzUfc22F3+HCn/KjsNKhUUlzsx6LR0apPIwrsHY3O4SYo34Pb4OFHmX222fU8h8WYd08Zl4PP56NgmiS15BVFHWxwu/wbIF3dvSbdOKZTaXDRPMnJx95bBqTAganVya6qFwhIHF3RsuAsIAknrO/aeqHaiuxBCCBFNrQKprl27MmvWLK644gri4+Prqk1NgsZn58JurVm8MofsvNCptslD03C6vTz03EbSO6QwZlAXFEXhjc9Ct1LpnWph6EXtKLO7cLt9jBrYGRXVH22JNhUWKak70K7WLeIadO5QIGn9uXdzGDXQn6BfVaK7EEIIEU2tAqlnn322rtrR5PjUJpaszIm6Ou4ya5tgSYPLrG3YsDV8K5Ut+QUsfXcr08dl4NB4+dOy77h5VDfatUwI7s8Xb9JhNGjZvOMwr3y0g26dUqocbQkkdZ8ocVBmd2PUazHVcR2pWLIkm/jddX0oKXdy+5ieUkdKCCFEjdVJsvl///tfvvrqKw4dOgT4SyIMHjyYiy++uC4e3yjZnJ6wICogsDqueZIRgOZJxqjXZucW4Pb4iDfruPzCtvRObcmi0zZDDtSmSmubTEpS9UaUzjQJvKFp7P0TQghxbtQqkHK5XNx9992sXbsWRVFITEwEoKSkhJdffpkrr7ySp556Cp1OEndPV9WqsIp5TlWtMLM7PbRvlci4y1PDgijwB2aLV25lxoQMDHpN2P2lNleVZQOEEEIIEa5WgdTixYv5/PPPufnmm7n55ptp0aIFAMePH+ell17ixRdfZPHixdx111110dZGpapVYRXznKqb82R3eqJuApyTX4DD5cXrC62PVFBkj1jIUqp7CyGEEFWr/Dd0FT788EPGjh3LPffcEwyiAJo3b868efMYM2YMq1atqnUjGyOTQUtmWuQ9CQOFNQNFMo8XO+gd5dqKK8yqU5uq4jWBKt+nr9ALVPcutbmq3R8hhBCiKapVIFVQUEBGRkbU8xkZGRQURB4haeo0Pg/Tx2eQmR4aIAVWx7VINrFq3S6sqRZaJJsYNbAz1tTQazPTLUwbmxEcYapObaqK1wSqfEcSqO4thBBCiOhqNbXXqlUr/vvf/3L99ddHPP/999/TqlWr2ryi0fKqtfzv50PMGG+tUEdKg0GnDdaR+tvMyzDptTjdXmwON9PH9QrWkfJ4fLRqHofb66XU5iLBrMds1FW6DYxR7195F1C96upCCCGEiKZWgdSYMWNYuHAhCQkJ3HTTTXTo0AGVSsWePXt45ZVXWLNmDbNmzaqrtjYqGp+HC7u1ZmGUFXYt4g3c/9y3HCm0B8898/tB3Lvom+BmxMVlTsxGHd9uPUSfC1rSMsXMjAkZLF4ZvsHxjAkZ6LSakPyo6lZXj0QS1IUQQohaBlLTpk1j//79rFixgrfffhu12j9T6PP5UBSFsWPHMm3atDppaGPjU2vZd6iQWROt2Jz+Eak4kxaTQYvG5wO1OiSIykyzYDZowzYjBn+g1KZFPEa9htYt4pk1qTc2hzvkmXEmXVigE6jyHWl6r7Lq3pKgLoQQQvjVKJByOp188cUXHDhwgD59+jB+/Hiys7M5ePAgAOeffz5ZWVlccMEFddrYxsTm9LBh6690aBPYbkVBdfLfisodmM1GjHoNDpcXa6qFW0f3xKcQthkxnCrieed4f75Uyyr2ywsIVPmOFBQFqnufPvJkMmorTVD/3XWZ2B0eGakSQgjRJJxxIHX8+HGuu+46Dhw4gKIoqFQqjEYjixYt4vbbbz8bbaxUeXk5w4cP58iRI6xcuZJevXoFz7399tu88MILHDp0iE6dOjFnzhwuv/zyc97GiFRurh/WLWrxzA05B/jnXYM4Umjj5z2FHCwo57xmpirKG3jOuBmBKuaRpukijTzNn3ZppQnqB46U8Ydnvw0ek5EqIYQQjdkZr9pbsmQJBw8e5KabbuK5557j/vvvx2Aw8Kc//elstK9a7fF6vWHHP/74Y/74xz8yfPhwli1bRu/evZk5cyZbtmw5942MwKQ3heUywanimZf1bsfzH2zj5z2FrFibh1ajwuEK72dFdmfl56NJMOtpe14C6R1SaHteQnAkKtLIU5mt8gT0089LKQUhhBCN2RkHUt988w2jR4/m3nvvZdCgQUydOpWHHnqIgwcPsnv37rPRxqh27drFG2+8ETGhfcGCBYwYMYK77rqL/v3788gjj9CrVy8WL158TtsYTVXFM+1OD9m5BVzQoRnWVAuFJQ7UVXxbVSWPn4lopRGqKg4a6byUUhBCCNFYnXEgdfjwYS688MKQYxdeeCGKonD8+PE6a1h1zJ8/n+uuu45OnTqFHN+/fz979uxh+PDhIcevueYaNm7ciMsV+9GR6pYe0Os0TB6aRq8uLdj+y/GwWlIBmekWEuLqLpCK1r4de09EbYM11RIsIlrd5wkhhBAN2RkHUi6XC4MhdDWXXu9PJvZ4zjxHp6bWrFlDXl4eM2bMCDsXGBk7PcDq0qULbreb/fv3n5M2Vqa6pQeS4g0kxOk4XuygT3pLZkywcllG65BrM9Mt3DneSvPEustDita+Vet2MWpg57BCopnpFkYN7MyqdbvO6HlCCCFEQ1ajVXsHDx7kp59+Cv5cWloKwN69e4MbF1fUo0ePGjYvMrvdzuOPP86cOXOIj48PO19cXAwQ1pbAz4HzNaEoCjabrcb3B5gM2kqLZ5oMWjLTLezYU0i3Tik88dr3FJW5sKZamDauFyMv64RPURFn0pK/vwiDTlUn7QqIM2oilkZwuLx8tmkPsyf1xubwYHN4MBu1mI1ann13a8Q8rsx0C3FGDTabDbvdX9Ih8M/Gqqn0E6SvjVFT6SdIXxujSP0MLI47G2oUSD3zzDM888wzYccffvjhkJ8DDd++fXvNWhfF0qVLad68OePHj6/T51aH2+2uk/6073hBpcUzv9myn+njMpj91Fekd0hh5qRM5r+0iZz8Ap59dxvXDuzMh+t3M31cLxa9ncPTvxtAUcHeWrcrQK1Wc+vIdF5QIDuvQmmENAtTr05l/y+5+Hw+AMpPXj/16lRcbl/Y9beOTGfPrlPXA+zZs6fO2lqfNZV+gvS1MWoq/QTpa2N0ej8Ds2d17YwDqccee+xstKPaDh48yEsvvcTixYuDI2GBkRibzUZ5eTlJSUmAf6TMYjk1BVVSUgIQPF8TOp2Orl271vj+YFvK3WhVGmZOtGJ3nqq7ZDJoUfu8ZPVuh9PrweHykpNfwE0juwfvDfyck1+A0+0PTtw+Nd26dQt5h9MDpTYPNoebOKOOeLMWwxl+47Mn96bM5qbc4SHOqCXerMOshxbJ6RGvn3N975Pv9I9UJZx8Z+B6u93Onj176NixIyZTzaci66JvZ1Nl/azvbT9TdfWdxlp1vpeG0Nea/Pk6/R6jDg7s20X79u3rbT/rSkP4TutKfe5rXf7/xUj93LlzZx22NtQZN3Ps2LFnox3VduDAAdxud8SaVVOnTsVqtfLUU08B/lypzp07B8/v3r0bnU5Hu3btavx+lUqF2Vy9gpeV2X34GB9/s4MbR/YMPDl4zuHzYSt34fWeOmZ3hOafBX4+WugPIg16DbsP24gPbEysUtW6+nhNKpibgWbhs7thTCZTjT/HhlRZ/fR+NqS2n6nafKexdqbfS33ta03+fEW759aR6fW2n2eD9DV2ztb/X6zYz7M1rQe13CImFrp168arr74acmz79u089thjPPzww/Tq1Yt27drRsWNH1qxZw9ChQ4PXrV69mksuueSsDe+diTiTjm+2/so3W3+NeH7B3YNDfjYZQ7+qeLOO5Hg9Go2KzHQL3+QcYsXaPIx6DQ/d2o+31+aHTLHBqZpO86b0rbLaeLQ6UmfyjLOhvrarOhpy2xuzxvK91KQfld3zguIfYa4/v25FY9QY/v41uEAqMTGRfv36RTzXo0ePYGL7rFmzmDt3Lu3bt6dfv36sXr2arVu3snz58nPZ3Kiqk2yuKErw5+PFjpDzZXYX907tS4tkM7PGZfD7heu5flg6F3drhcPl4dqBnUlt34xV63aFJIAHajpV9QczWh2pM3nG2VBf21UdDbntjVlj+V5q0o9K78kroNTmqdYIsxA11Rj+/p1x+YOGYuTIkTz66KN89NFH3HLLLfzwww8sWrSIzMzMWDcNAA0wY0JGWE2mQLK5xudDpfh/vm1MTxatyA45//onO4gz6nn+vW14gIdv68v2XwqZ8/TX3L9kA4+8uIncvSeYN6UvRr0m5B2n13Qqtbk4cLSU3L2FHDhaSqnNhc1RvTpX51p162/VRw257Y1ZY/leatKPqu6xOc5dSRvRNDWGv38NbkQqkn79+pGbmxt2fOLEiUycODEGLaoGnycs2TzepMNo0KJRFFCpcPu83D62Jx6vj3tuuAizUYtep+b1NTuYfV0mpWUOvt9+BJfHx8yJVracNpUXGO0aldWFFWvzgscr1nSKNjc9bWxGcNPkSGJVF6q69bfqo4bc9sassXwvNelHVfeYjY3iV4SoxxrD3z/5WxIrai0eRWFxlE2LtSoVhUUu/vDctyG3WVMtpHdoxpKVW5k50Qqc2lImkpz8AkZnnUq4z0y3kBTvL6ha2dz0s+9t5dbRPVn0dk7YMys+41xLijdErG8FsW1XdTTktjdmjeV7qUk/Kr0nzUKCWX5FiLOrMfz9a7RTe/WdFyrdtNgLpCQZw+7Lyffvv3d68FTZ8KfrZImEzHQLsydlBuebq5qbvqBDSsQK5hWfca4lmPXMmpRZ79pVHQ257Y1ZY/leatKPyu659dr0Bl2WQzQMjeHvn/w1iZHqbFp8Mtc8TCAwqhg8VTb8eV6KmfnTLqWNJY4WFZaSVjX37HB5mDelL8VlzmCdq6R4Q8z/YFuSTfWyXdXRkNvemDWW76Um/Yh0T5xRw55duVHrxQlRlxr63z8JpGKk6gQ7DxA5ktLr/AOJgeApsMovksx0C/t+LSHBrOe5d7dyxzhrsC5HdeamE8z6evmHub62qzoactsbs8byvdSkH6ffY7PZQnYiEOJsa8h//ySQipGqgxgtXm94IGVNtbBj74lg8LTg7sGU293YnR6ev/8Kyp1uHv/X9xwptGNNtTBxSBpGo4YHFm/A4fLi8pyqy9EY5qaFEEKIWJJAKkaqU0fq9Grm1lQLowZ2Zs3GPcyYkMFbn+ey9vv9IefvGNuL+XdcypHjdrbuPsYjL37HYzMuC66+q1iXIzA3HWnVXkOZmxZCCCFiSQKpGPH6PPxuQgZuRUFRwOH24nB6iTfpMOk1eH0eHC4PT88ZhMvtxWjQ+jeRUcHtY3rx1trQIAr8uVXPvbeNyVemYjJrgyUPAgGZUa9hVFYXXG4fuXsLg/PQgblpm8NNglmP2+PjeLEdh8vToOaphRBCiHNNAqkYKbO5SYrTcLzYyVtr80JGpjLTLUwfl4FBr/C7f3zNH2/uh8moY9bfvwTgn3MGhQVRAYENjTXqU/sKmYxajHoN86b0ZdX63SE1pQL7GbU9L6FR7wMnhBBCnA1S/iBGkuJNbMk/FhZEgX/6bem7WzEb/fvpqVSnNieGqqsN2x2eYDK7NdWCT1EYe3lXVq3fHfFdC1dkc7wkPIiqeL7U5qpNd4UQQohGSQKpGLE7PTRPMkYtgZCdW0C53c29U/uyY+8JKm5cXVW1YZNRS5xJF9xe5p0v8rm4W6tK31Va7qpyvyMhhBBChJJAKkbK7e5gPahobA4PcSY9q9btYsfeE8GCZceLHWF79AVYUy3YnW5MBi3XDuzMH5Zu4NtthyksdUS8/lR7Kh/lagj7HQkhhBDnmuRIxUicSRd1W5cAk1GLze7hnhv64vEqDOnbjmff3cqiFdn8ZfoAnn//x7DtZe4Y2wudWoXL42H+S5uC55qZNSx7YGjYvn5vrPmZL384RJyp8j8KDWG/IyGEEOJck0AqRswGbXBkKdKUW2a6hRMlDlokmbhvyTeAf9XdLaN6cuM13fm10Ma0sb1we33Y7B7MJn9CebnTzVtf7uI3wy4IPuvyPm2IjzexKMq+fu1bJZAQp5eaUkIIIcQZkkAqRtRA79QWnG+JBwhbtXfnuAx+/qWQtufFB487XF4Wr8wJblz80Te7uWNML+55YWOwTpQ11cLvJmTgVamCxTrjTTpUKhUpCaGjSoF9/WZN6k3zRJPUlBJCCCHOkARSMeIF3lqbx/ghXZk+rhcOtxen04vZpMPhdKNWqejcNpni8vAk75z8AiZekcrQi9qhVqlYcPflFJU6MBq0xOs1eIDFUUafAL784VDIs2wOf/5TQ9/vSAghhDjXJJCKEbvTw9rv90etB7Xg7sE4nB5MUVbo6bVqtEBRqY1X1uQza5KVlilxHCm0sXjFlrDpwsDo08yJ1pBACkITzRvyfkdCCCHEuSar9mKkOpsWGw1adJrIX5HZqOO1T7dT7lZOBkk5lNpc2BzuqGUOcvILIia4V5VoLoQQQojIJJCKkepsWqxRqSguCy+EaU21sG7LQb784RDJ8UbgVK2nqgO00PPWVAtmo6zIE0IIIWpChiJixFzFpsVmg/+rcftCa01lplu4Y2wGaqDc5kSnOVWps8zmrkaAdup8IG/K5rBRUKSSbWCEEEKIMySBVIx4fR5mTMhg8cqtEZPCfT4PDjfEGXXMn3YpGrUKu9PDjr0nuOsfX5HeIYUZEzIos58qtGk0+EsgVBagmQxaHrtzAHEmHaaTdaQKS90M7N2GSzPaSH6UEEIIcQYkkIqRwhI3K9b+yMyJvYNFMgPBzZKVW5g0NB1QodVCSqKRMpsLS7KJpHgD3Tum8POeQpa9/yO3j+0F+IOk3L2FmA2qSgO0QAHO043O6kxxmVMCKSGEEOIMSCAVI3EmHf93ZRoVttAL/vusCVYcXi8eD6hR8+KHP3H1JR156PmNdOuYwrUDO/PLwWKuvqQjDpcHa6qFUQM78+TyzdxzQ1/e/HQ7MydawwK0aEEUgMvtk21ghBBCiDMkgVSMxBu0ePQaFkap92RUa8CgoszpodP5Saxav5tRWV1YsTYPBUhr34xV63dz66gepHdoxpPLN+NweXG5fXz5w6FgwPTPOYOY/dRXzJ92adQgCkCvU8s2MEIIIcQZklV7MeKDsOk3OFXvyQN4Ff8quws6NCMnv4ALOjQDYEteAQOtbThRYkcBVqzNC1Y2b2OJ47E7B7Dw7sG89MBQ4k06Ftw9GI1axcK7B7PsgaFc3qdNyDutqRYKSxyyDYwQQghxhmREKkZsTg9dz09g1kQrdpeHcrsHk0GLQadBr1HhPplsbjJogyUQXO5TK/iOnrDzh5v7UWo/Vfm8d6oFj9fHwy98xyU9W3L9sG5Rc6XAX+Hcmmph8tA0WreIC8mPOlJow+ZwB7eYMRl1tEwxn+2PRQghhGhQJJCKEa3OzbBLOrF4ZQ7ZeaGBzuShaViSjXh9Do4Xe9Dr/AOHgX8CqFQEK5WDvyzCbaN74fX5WHD3YFQqVdgmxRBa4XxUViomg5bE+NBq5oePlUUNwFq3iEcIIYQQfhJIxYhObWLxyhy25IUHOgADe7fBmmpBq/HwQ+4RrKkWduw9AUDvNP+/ByqVz592KVt3HuP3T3+Nw+XFmmrhllE9yMkvoH3LeO698SK8XiU4uqTWqHB7Pcx5+msW3D04bCSqsinHWZN6y8iUaLKcHki2dGDfUTvxZq/sRSmEkEAqVuxOT1gQFZCTX8DorM7YnR5eWvUT08dn0Ll1M/7yr01kplm4dmBnnnhtM+DPoTIbtfTv2ZreqRa0WjXZeUcpt7tp3zKeB2/ux5Ioo0vtW8aH7LMHVLnFTGCDYyGamoIiOwtXbCE799Tfj8x0C7MmZUoxWyGaMAmkYqTc7ia1bRJzp/TF6fYGR4v0Og1/X775ZDkCD4ePl7HkHf9U3KN3XMIPeQU88drmYHJ5nEnHi6t+Cgl+MtMsXNKzNffeeFFYEAWnRpcCI1Wnt6vydofv1SdEY1dqc7FwRXZIEAX+rZkWrshm3pS+MjIlRBMlgVSMJCfqmHdD34i5SPNu6EvhCTtmk5Z7pl7E3U+vw+70YHN4ee/LncEgKlCp/PRAKTuvgDsNWuxOT6WjS16vErbPXnX2ABSiqSkuc4YFUQGBfS4lkBKiaZLyBzGiVWsrzUVqnmLGZNCi0/i/onK7G51OxaisLkDFrWS8Yc826jXk7ztWrdGl0/OdzEYd1lRLxOsrbnBcanNx4GgpuXsLOXC0lFJb+ObKQjQWZ7oZuBCi6ZDhhRiparTI7vTg9blxnKxuEGfS4fUpXJrRmsusbdBpVMx/aRNzp/QNudeo1zBvSl/WbNzDDdd0q7QNkUaXWqaYK91ipmWK+WSuSLbkisRIqc1FcZkzWLVeEp7PvjPZDFwI0bRIIBUj1fkv3DiTjjiTP4hxe304nV5Kyl08/ur3Fa4LzVkaldWFVet3k96hGXqtutINjKP9P//WLeKZNan3yTpSHuJMWswn60hJrkhsSRAbG0nxBjLTLRGn9zLTLVLMVogmTKb2YqQ6/4VrMmhRAXeOz+CJV7/HZNSG1JLyXxcaC1esgv7Xf/2XOydkhE3VBUaXzmsWvYxByxQzndok0bNLczq1SQpOAVYnV0ScHVUFsTK9evYkmPXMmpRJZnro36XMdAuzJ2XKfzwI0YTJiFSMmA3aSkeLTAYtGkXBqFOz5N0cHr7tElweHyaDloV3D0av0/D+1zvRazUhzwlUP3e5few7UsZfXtoUUkcqzqRDo1Fhc9bsl67kisSOJDzHliXZxJzrelNYZMPtUxNvlmlVIYQEUjFTXG6vNBdJ4/NwrNyD16swfbw16nWK4uX2MT3x+hQcTg9Gg/8rDYxc7TtSxownvgx7/9J7h9So3ZIrEjsSxMaeQQtFBXvp1q0bZrMUphVCSCAVMzqtjg++zmfWRCs2pyc4WmTUa/B4vTh8CiaDDlAqXd03c6KVrzfvY8XaPDLTLUwbmxGsgl7ZiJdep6lRuyVXJHYkiBVCiPpHcqRiJN6gZfSgVBa+ncPsp77i/iUbmP3UVyxeuRVQY9ZpidOpMerD60QFBFb3XdChGeCf3nnpwx+Zek03fjlYzKiBncPyozLTLYwa2Jnn39tao5wayRWJnUAQG4kEsUIIERsyIhUjPog60vTce9u47spUWiSbq9ySpdzuDuZFAWz66Qijs7rQ6fwkNGoVNwzvxk0ju1Na7kKjVnFeiplZf/8Sh8tb45waS7KJeVP6yhL8cywQxEZatSdBrBBCxIYEUjFiq6KO1E0ju2N3enA4wwtuVhRn0mF3hpZA0GrUDMo8H0UBn6Jgc3holmjApNdSVGwLVkavTU5Nglkvv7hjQIJYIYSoXySQipGqghi7w4NKpSIl0cClvVrz7bbDYdcEVvft2Hsi5Ljb6+PX4zZWrd8dEqz1TrVw54QMHrntIh5a9r3k1DRQEsQKIUT9ITlSMVJVEGMyaokzaVGAm6/twdCL2oWcD6za8/g8rFq3K+S43ekNC6IAtpxMUG9tSeKxOwfg8fg4esJWZ30SQgghmhoZkYqRqupIOZxuEuMNLH1nK5dZ2zBpaBpjBnehtNw/nWPQaXjytc3cPKoXSfF6HIV2MtMsjLysMxq1qsoE9fuXbAi+a8aEDFq3iD+r/RVCCCEaIwmkYsTr80StI3XH2F7oNSp+PW4nJ7+A0VmdWbxyK7eP7RkMgALiTFoeueNSCo7bMRq1/OHZDdx1XZ9K311xWjFQRmH25N6VVjoXQgghRDgJpGKksMTNirU/MnNib+xOT3BPO6Neg8vt5YTdTdnJFXsut4+c/ALcFVbnwakcqaMnbGh1arQaFaOyumDUV14j6vRpxZz8An9w1axu+yiEEEI0dhJIxUicSUd23jFu++vaiOcX3D0Yvc6/Gi9Qpdzh9LLw7sGgAhQw6zW8+sl2vs4+GLzPmmqhX49WXNStJd9vPxL23EDwdbrTNz8WQgghRNUk2TxGTCdzpCKxplrQa9XB6uSBVXlmk5Zyuxu3x8ebn+XiUWDH3sKQe3PyC3ht9XauH5Ye9vzeJ/OhDhcUh73z9M2PhRBCCFE1+e0ZIxqodK+9gkJ7sDr5k8s3Y021UGZ3YXd4effLnUy+Kp0XPviRe6ZexN1Prwt59pb8Am7R9OC20T3xKQp2hweTUYvZoOVQQTEPLfs+5HprqkVKIQghhBA1IIFUDGmhwl57/hwpk0GL1+dFr1fT6fwknly+mfQOKcyYkME9C9fToXUS6R2asfyT7aS2b4ZOE3lQ0en2kt4hJeTY4WNlvPPVnpBjgcBNEs2FEEKIMyeBVIz4gIURtogBf3Azc6KVPmnncZm1DUdP2LG7PBSVuSg6uYpvxdo8rh3YOWphz0gjTK1bxDN7cm/K7e5g4BZn0kkQJYQQQtSQBFIxUtUWMXanh3izDkVR+PvyzTx0S//g+cDeeiaDlnhzeMBU2Qa25zUzy+o8IYQQoo5IIBUjVW0RU273cP+Sb8hMt/D07wdTXOrg6TmDOFbsQH9yOi/epMOgDZ3akw1sq1Zqc8ledUIIIeqEBFIxUlVyd2AVXXZuAUvf2Up6h2asWJuHNdXCtHG9uLRX6+CqvoVzL8fmcGPUazEZNBiqqCPVlBUU2Vm4Ipvs3FOjgZnpFmZNysSSbIphy4QQQjREEkjFiMmgZeKQVIb173Ay2dw/OmLUa9GqQq8NVDcP/Puz727jjrE9+f3T63hi1kBe/ugnCQyqodTmCguiwB+sLlyRzbwpfWVkSgghxBmRQCpGNMCV/dqz8O2cqFvEZKa1IDvvGACKcurenPwCnG4fDpeXcrsnJDAw6jWktmvGoYIyjhfZiTfL1FVAcZkzLIgKyM4toLjMKZ+TEEKIMyKBVIz4IKyGFPiDpOfe28Z1V6Zy54Tewcrn56WErqyzO/yVyCsW0jTqNcyb0pdV63ezYm1e8Hh9GaEqtbk4UeJBn9SW4yUevLjOaeBSdV5a5ecbC8kRE0KIuiOBVIxUtWrvppHdsTv9wVJmugWdJnS+z2T0V0bXqE8dH5XVhVXrd4c9tz5MXdWH3KSq89Iaf1HS+vA9CCFEY9Lgtoj55JNPmD59OllZWfTu3ZvRo0ezcuVKlIpzX8Dbb7/NsGHD6NWrF6NGjeLLL7+MUYsjq2r0w+7w501ZUy3cNroXxWXO4DlrqoU4o5ZZE6ycKD51/IIOzaIGZ4Gpq1ioKjep1OY6J+1IijeQmR55W57KSkY0FvXlexBCiMakwQVS//rXvzCZTNx3330sXbqUrKws/vjHP7J48eLgNR9//DF//OMfGT58OMuWLaN3797MnDmTLVu2xK7hp6lq9MNk9BfLvHZgZ7w+Hyaj/3prqoXbxvTkXx/9zNEiO4kJeownV+kF6ktFE6upq+rkJp0LCWY9syZlhgVTTaVkRH35HoQQojFpcFN7S5cuJSXl1NYnl1xyCUVFRbz88svceeedqNVqFixYwIgRI7jrrrsA6N+/P3l5eSxevJhly5bFqOWhApsWR6ts7nC6aZFsxqjXoCj+65+eMwidTs2DSzZQVOai3OFhYO823Dq6J4vezkGvqzwujtXUVX3KTbIkm5g3pW+TzBGqT9+DEEI0Fg1uRKpiEBXQrVs3ysrKsNls7N+/nz179jB8+PCQa6655ho2btyIy1U/pi/KHXZmTMjAmho6OhJYtWdJMlFmd6DTqonTa3j6zR+4659f43L7KCrz9yEnv4CURCPdOqaw9N4htEwx18upq/qWm5Rg1tP2vATSO6TQ9ryEJhFEQf37HoQQojFocCNSkfzvf/+jZcuWxMfH87///Q+ATp06hVzTpUsX3G43+/fvp0uXLrFoZgi1SseLH2wL27Q4UEfqeJENg0lPnFHFP/6dzU+/FAKnVusFuNw+7E5PcIPiWZMyIyYTx3LqKpCbFGlaqSnkJtUX8j0IIUTda/CB1ObNm1m9ejX33nsvAMXFxQAkJiaGXBf4OXC+phRFwWaz1eoZ4J+qc3kVbj1Z3qAia6qFy6xt6J1mxuv18X/Du5Fo1qPVqCguc7Lw7sGoNSr+9sr36HVqtFo1ew4VYzRo0ahUzLmuN6U2DzaHB7NRS4JZi0Hrb3epA2wON+V2N/EmHSajjgRjrbtTKQ0wc4KVRStzwgK8mROsaPBgs3miP6ABstvtIf+sD87W91Af+3q2NJW+NpV+gvS1MYrUT0VRUKlU0W6pFZVy+nK3BuTXX39l4sSJdOnShZdeegm1Ws2qVauYN28e33zzDRbLqWmubdu2MWHCBN5880369OlTo/dt27atzqYG23e8AI+isPTdrSG/1KypFiYPTcOSbEQDHC9z8MfnvsPh8mJNtTBqYGeeXL6Z9A4pzJiQwa4DRfxyuDS4fcyMCRn+6Fixs2/fvuBzzWYzyZZ2YbWrAvcUFeyvkwAxGrVaTWJyC9CYsLm8mPUa8NopKTqGz1d5kryoO/I9CCGaKr1eT69ever8uQ12RKqkpITbbruN5ORkFi5ciFrtT/dKSkoCoLS0NCSQKikpCTlfUzqdjq5du9bqGQCldoX//nSIGeOt2F1ebHY3RoMGg06DRq3C7fOiqDUkxRkZldWFFWvzggFQ4OfFK7cyc6KVZ97aAvhzpgLHVOo4unXrdup9Dli4YkvEAqCLV25l1qTedDjLI1Pg/y+EooIDtOnYEZOpOa1bNj/7L40Bu93Onj176NixIyZTfa/PZK7V99Cw+lo7TaWvTaWfIH1tjCL1c+fOnWftfQ0ykHI4HNxxxx2Ulpby1ltvkZCQEDzXubN/T7rdu3cH/z3ws06no127drV6t0qlwmw2V31hFY4UFfPCqu28sGp7xPML7h6MBy/grw8VcPq+e4GinRXPB461TDkVNB4pKq60AKjN4Q65/mwzmUx18jnWd02lnyB9bYyaSj9B+toYVezn2ZrWgwa4as/j8XDXXXexe/duXnjhBVq2bBlyvl27dnTs2JE1a9aEHF+9ejWXXHIJen39WKFVnaXo5XY3NruHxDhDsFYUhNaLKre7GZXVJcK9nrBjlb+vceUoCSGEEOdCgxuRevjhh/nyyy+57777KCsrCymy2b17d/R6PbNmzWLu3Lm0b9+efv36sXr1arZu3cry5ctj1/DTnMlS9Jc//Il5U/ry5PLNOFzekHpRcSZdyIhVtGdX/b4G90dBCCGEiLkG99tzw4YNADz++ONh57744gvatm3LyJEjsdvtLFu2jOeff55OnTqxaNEiMjMzz3VzozIZtDw181KSEs0nyx/4V9HptGq8Ph8GrQan24dXUZh8ZTo6rZp7b+jL6m/3sGPvCcCfKK7RqEJGqKypFkwGbdgwptmoq7QAqNkoNYSEEEKIM9XgAqn//Oc/1bpu4sSJTJw48Sy3puY0PjsJCSYWvp0Ttoru9jE9OV7i4N+f55GdF7pM/dZRPXlw6Ybgarvvth2kbcuk4L0zJmSgURQszeNC3tcyxcyMCRlRV+21TGn88+VCCCFEXWtwgVRj4VWbOHqspEJBTv+IlNGgRePzUorC9j3+IpwtU0zcM/UidBo1JeUuHr3jUowGLas3/MI1AzrhcHpZcPdgTCfrSFmiBEWtW8Qza1Lvk3Wk/AVAzUadBFHirCq1uZrkljxCiKZBAqkY0fg8nNc8IeKI1IwJGZhNau6dehEvf/gjD/y2H0vfCR9JunN8Bi6Ph+VrtvO76/oEfzkdKbSFFd0MBEsSNIlzqaDIHrHS/qxJmViSG+/yayFE09HgVu01Fl61NmyaDU7VdTLpjRwrsnPvjReFBVGB65a8sxWdRsu0cdZgEHX4WBkLV2xh9lNfcf+SDcx66isWrtjC4WNl56xvQoB/JOr0IAogO7eAhSuyKbXVj30vhRCiNiSQihG701NpXSe700PzJCNer1LpdQ6XhxYn/8v+SKGt0uDsSOHZq1wuxOmKy5wR9/UDfzBVXOY8xy0SQoi6J4FUjFSnjpTL7atW/aejJ/wBks3hrrLophDnSnX+jAshREMngVSMVKeOlF6nrlb9p8AvJCm6KeqTM6mVJoQQDZUEUjFiNmixploingvUgioscWCq6jq9FrvDw/ESe70pullqc3HgaCm5ews5cLQ0LBcmOeU8jpV4op4XjUNSvIHM9Mh/djPTLSTFG85xi4QQou7Jqr0YUUOldZ3sLgfdOqXw1ue5lV5XUGgjpZmJf76RzZ3jM2JedLOqVVplDoV/fbafLXk/RDwvGo8Es55ZkzIj/nmYPSlTSiAIIRoFCaRipMzpwVbuqFBHyl/XyWTQolEUTDo9DrePCUNS8Soe7hyfgdPtDdbicXt9vPnZDsYOTuXlVT+Sk1/A35dvZt4NfWNWdLOqVVq/uy6TRStz2JIX+fy8KX3ll2sjY0k2MW9KX6kjJYRotCSQihEFNynN4qLWkdKiwu3xYNAZUKFlyTtbQ6qcBxSVuUht14xNPx8h/0AxT762mblT+oYEXeeq6GZVq7RKy11VruKSX7CNT4JZL9+rEKLRkhypGDEbTJWWKvAACWYjTrcXh9sbrHJ+uuzcgpBNi/MPFHPH418E60i53N5zVoSztsnusopLCCFEQyOBVIxUVUfK4fJid3oos7k5esLOPTf0xajXRLy+4qbFpzuXK6Nqm+wuq7iEEEI0NDK1FyPVq7GjIiFOh1GvRaWCh27tj9mg5Vixg0Ursikq8692izdHDkBOXxlVcc8zk0GHTqvC4/Xh9Sk4nF7izWeWv3L6Hmomo5bMdEvE6bvMdAsJcfpKzzfUVVyR9pKLHPIKIYRobCSQipHq1tgx6LTYHQ7izXpUKrA7vbRMMfH32Vn8880f0Os1JJh0YQGKNdXCxCFpOFxeEsxRVtOlWZh4RRqPvPgdDpfXf6yaK+giPa9fj5bcOd7KkndyIq7Sap5oYuYEK4vezgnJ92rIq7iirVKcOcGKWi0DvkII0dhJIBUjgTpS0UoVGPUaVCoVdqcHt0fh2Xe3hVybmW7hd9dlUlLmQEHh0l5tuPayzrjcPvQ6NTv2nuCRF7+jW6cUfndd+BJ0gOy8AnwKjMrqwoq1ef5j1VhBF2113qafjgDwu+sysTs8EVdpxRtV3HhVO24d3RO709OgV3FVtkpx0cocbryyXYxaJoQQ4lyRQCpG1D5PpfWhtIDX5w9G3lqbFxZwZecWsPSdrdw2picer8LilTkR31PVarmc/AJGZ3UOu6eyFXSVrc7b9NMRbhrpoe15CdG6TlHhUbp1a47ZnBj1moagqlWKN16ddo5bJIQQ4lyTQCpGvGotX36/j5kTrdhPryMF4POhqLTEmYialJ6dV4DHq9R6tVykZPXKnil7qPlV1U/byelSIYQQjZcEUjFid3p48/M83vw8L+L5BXcPBnxoNZXn2QSmzypT1Wo5vS78HZU9U/ZQ86uqn+YoqyyFEEI0HhJIxUi53c2IS9szdnDaycrmbuJNOownR6SKbHacLhUtEitfyRaoch4t36qq1XLWVAs79p4Iu6eyFXSBPdQa2+q7M1XV54DXDpybGl5CCCFiQ5YVxUjLRB3jB6WiRDqpKCSbjDRL1OLweKNuWpyZ5t/c+IlXv2fy0DQy00Kvq7habtakzLANZDPTLEwemsaqdbvC7qks+Tuwh1rY8xrw6ruaqOxzmDnBSknRsRi1TAghxLkiI1KxotbiURQWR90iBrRqLW7Fy+Sh/qTl01ftTR+Xgcvj5u7/uxCdVs3UEd357bUqnK7wmlCn73lmMmjRadV4vD6enD3wjOtIyR5qftE+Bw0efL7ohVKFEEI0DhJIxYgXKt0iJpCEDrD70Ammj8vA6fZgd3qJM2nRqFXMf2kTd07ozZ+XbQypAxWtdEFd73kme6j5RfocbLbKE/yFEEI0DhJIxUhVW8QEVvJp1PDCBz+T1i6FMrubFskmZv39q+C1Hq+Pe27oyxOvbcbh8lZauiBSBW4JhIQQQoiak0AqRqpTsiDOpEVRFDLTLRw4WobXp5CUcCqR25pqYevOY+TvO8HYy7vy5qe5UZ8drQJ3daqYCyGEECIySTaPkeqULDAZ/P+bPi6DNi3isHZtwROvfg/4g6hRAzuzat0usvMKyKyQkH76syurwL1wRTalNlcd9UoIIYRoWmREKkaq2iLGbNDi9Xlxn6zp2CLJhN3t5u7fXIjT7WXbruM8uXxzMDfK4/Wv/xsxoAM6rYZfDhWHlFQoK48cLFVVxVwIIYQQ0UkgFSNqqHSLGLXPh0+lotzuQqcFtQoURYXXp5Acb2BQn/P5btsh9h0pA8Bo0DJiQAdGZ3Vl4YotYc+cd0NfnnxtM/kHisPa0lQqkQshhBB1TQKpGNJChS1i/AngJoMWjaKASoUWSI7XoVKpIwZcD97cj7+8tIlmiSbMBg1jBqWy6LRyCnBqJeDcKX254/EvwtrRVCqRCyGEEHVNAqkYU5RASU7VqYMqFSgKHkVBrdZEDY6WrNzKAzddTGGJk59/KaRru+RKVwI63eF7vzWlSuRCCCFEXZNAKka8wKIIdaTAP9o0fVwv1GpVlWUSXB4fzRINPPHa99w79aJK33n6FF5Tq0QuhBBC1DUJpGKkOgESVK9Mgk/xMec3F1ZrM+GHbumHokDLFDMpSUYJooQQQohakPIHMVJ1gOTGZvdUq0zCg0u/5U/Pb8Sg00Tdl8+aasGg0/DIi5t49KVN+BRFgighhBCilmREKkbiTDpGXNqecYPTcHt9ONxe/353Jh16rQqP4sPjAVMVZRI0mlO5VX9fvpl5N/SNuhLwydc2B4+V22ULEyGEEKK2JJCKkXiDljGDUjlyws5ba/MibkhsNPiDpMrKJMx/aVPwWP6BYp58bTNzp/TF6fYGVwIadJqw0gdxJvnqhRBCiNqS36Yx4gO25B/jm5xDYaNN2bkFLH13K3eOt+LzKfjwMnOCFbvrVJkEjUbFoWO2YB2pgPwDxdzx+Bf8bcZl/PvzvKgjWfFmKXkghBBC1JbkSMWIzemheZIxasJ5dq5/4+Jn39uKVq1l18FiDhWUc/+SDcx+6itmPPElPp8v6vPjTDpmTMgIy5kKjGRZks112h8hhBCiKZIRqRgpt7txuaMHQv5rPGTnFuBwemjV3IxPUTDqNThcXnqnWThe7Ih4X2a6Jbgi73fX9abM5g5ughxv1lUaRB0vsVNa7gpenxCnp3mibGoshBBCRCKBVIzEmXTYnZUnfAfymOxOD80SjdidHv50a//g/nmlZTZuuDqdrD7tsDs96HRutGoTdqeHvYdLiDfpMBl1dGqTVK02/Xq8PKz4pzXVwsyJVlo1j6t5Z4UQQohGSgKpGDEZtBwvdkRdkZeZbsFk8H89ZqMuYoAzY0IGl2e25Zm3c7igfRJXXNwh6nWtW8RX2p7jJfaoFdQXvZ3DnN9kysiUEEIIcRrJkYoRDdA7tQWTh6aF5TFlplu4c1wGRcU2MtP99Z+i7Z/nVanIyS9gaL+OYSv7Kl53pNBWaXtKy12VFggtLXedeSeFEEKIRk5GpGLF50Gj0tCymYnp43rhcHtxOr2YTTrMeg0qn48PvvmFaWMzcLgjTwHm5BcEpwerqpRuc1RdIb0254UQQoimSEakYuRIiZudhwpRAJfHh93hxWTUYtRrUPkU7D6Fqdd0x+v1YqskiAlUSK/OVjKVqaqulNSdEkIIIcLJb8cYaZGoo3lSCguj5DQZ1WoeeO5b2ljimTY2I7ha73SBLWSqs5VMZRLi9JVWUE+Ik+1khBBCiNPJiFSsqLWV5jR5VXDP1IvIzi3g2fe2MiqrS9gjrKmnEtIDW8lEYk21YDZWHmg1TzQxc6I1Yt2pmROtkmguhBBCRCAjUjFid3ro3TWFWRMDFcs9mAxaDDoNOo0KFWA2+uPc7NwCxl+eyoq1ecH7AyNXGkXBmmph7aY9lW4l0zKl6gKcrZrHMec3mVJHSgghhKgmCaRixKBzM6B3WxavzCE7LzTwmTw0DUuyEb1KQ2rbJPIPFKPXqllw9+DgFjEmg5Y31vzM/12VxsyJVuxOD17FGfz3QCBkNuqqFUQFNE80SeAkhBBCVJMEUjGiUZtYtDKHLXnhU3sAA3u3Ib19M+be0Jc7HvsCg16DQafmnoXfBXOlrKkWjpa4OHj0BBd2Ow9LcvUKbwohhBCibkiOVIzYnJ6wICogJ7+AlESjf0Wf20tmugWvAs++ty2YKxWYstOqVPRObSF75wkhhBAxICNSMVJVuQKX20e53Y1KpeL20b1werxk5xZw44juXNKrNceLHRSVOmmRYpYgSgghhIgRCaRipKpyBXqdmjiTDkVR2H+0jPiT1/96zMbjr34PwIK7B0sQJYQQQsSQTO3FiMmgJTMtermCwhIHKsCg06LVqDAZ/TGvXqcOXlNVSQMhhBBCnF0SSMWIBpg+PoPM9PC6TZOHpmHt2gKTToOCl8ISB8eLHfROs7Bj74kzKmkghBBCiLNHpvZixedBq9IwY3zFOlIaDDptsI5UidOJRq2he6cU3vh0B3eM6YXH5+Oqfh0kiBJCCCHqARmRihGvWsvuwycAUBT//1QqFZqTQRQKvPlpHgadBgWYMCQNu8tDQpxegighhBCinpARqRixu+y0b90s6l57H6zbyY0jelBc6sRg0GBzeGjdIk6KZQohhBD1iARSsaLoePPT7WGVyE0GLT6flzFZXZm3cD2P3HEpJoOWFslmEsyycbAQQghRn0ggFSMmg5bCUje3/XVt2DlrqoXbx/ak0/lJGPUaWjWPi0ELhRBCCFEVyZGKEQ0wY0IG1tTwVXszJmTwwdc7mT4uA41KFZsGCiGEEKJKjXpEateuXcyfP5/s7Gzi4uIYPXo0d911F3p97KfIlJOr9iJN7Xl8XiZfkY7Kp2DzeGLdVCGEEEJE0WhHpIqLi7nxxhtxu90sXLiQOXPmsGLFCh5//PFYNw2AoyVu/v1FLpHGmwxqDR6fj91Hyygtl0BKCCGEqK8a7YjUv//9b8rLy1m0aBHJyckAeL1eHn74Ye644w5atmwZ0/bFmXR89t8DfPbfAxHPP3RLP9QqiDM12q9ICCGEaPAa7YjUunXruOSSS4JBFMDw4cPx+Xxs2LAhdg07yWzUVblFTGGJQ7aBEUIIIeqxRjvcsXv3bsaPHx9yLDExEYvFwu7du2v8XEVRsNlstW0eCUb/FjFL391Kdm5oHanJQ9PQaqBZgpEEI3XyvvrCbreH/LOxair9BOlrY9RU+gnS18YoUj8VRUF1lhZvNdpAqqSkhMTExLDjSUlJFBcX1/i5breb7du316ZpQYmJiRG3iFGrFDRqNf/f3p2HRXWdfwD/DgiyozRBI5RNO1N2xEREUlAeLSq2LsFKWlQCGk1QGwgNoNZK1EZ5tCZucQHFWGuSRlxARJFaFBBTkWCijSUgYAGRoCxTdub8/uDh/hwGEC93nIX38zw8D3PmnDPnncMLh3vP3Fv3qAKVWrSIelpZWZmqh/BCDJc4AYpVGw2XOAGKVRv1jlNZHzTT2oWUsujp6WHChAmC9jkCnairKsNYOzsYGv7/RJvY2gr6OuqgpaUFZWVlsLOzg6Gh9l6lfbjECVCs2mi4xAlQrNqorzh/+OEHpb2e1i6kzMzM0NTUpFDe0NAAc3Nz3v2KRCIYGSnnXneGhoZK61vdDJdYh0ucAMWqjYZLnADFqo2ejlNZp/UALd5s7uDgoLAXqqmpCbW1tXBwcFDRqAghhBCiTbR2IeXr64u8vDw0NjZyZRkZGdDR0YGPj48KR0YIIYQQbaG1C6ng4GAYGxsjIiICOTk5OHXqFBISEhAcHKzya0gRQgghRDto7ULK3Nwcx44dg66uLiIiIrBz504EBQUhNjZW1UMjhBBCiJbQ2s3mADB+/HgkJyerehiEEEII0VJae0SKEEIIIUTZaCFFCCGEEMITLaQIIYQQQniihRQhhBBCCE8ixhhT9SA0xa1bt8AYE/x+PYwxdHR0QE9PT6lXX1UHwyXW4RInQLFqo+ESJ0CxaqO+4mxvb4dIJIKnp6fgr6fVn9oTmrJ+8EQikdJupqhuhkuswyVOgGLVRsMlToBi1UZ9xSkSiZT3N5yOSBFCCCGE8EN7pAghhBBCeKKFFCGEEEIIT7SQIoQQQgjhiRZShBBCCCE80UKKEEIIIYQnWkgRQgghhPBECylCCCGEEJ5oIUUIIYQQwhMtpAghhBBCeKKFFCGEEEIIT7SQIoQQQgjhiRZShBBCCCE8jVD1ALRJSUkJtmzZgsLCQhgbG2PevHl47733nnm3bcYYDh8+jL/97W94/PgxHB0dERcXBw8PD7l6NTU12LJlC3JycqCnp4eZM2ciLi4OJiYmSoyqb3xiffToEZKTk5Gbm4uKigqYmpritddeQ1RUFKysrLh6N27cwNKlSxXaz5kzB7t27VJKPAPhO6/+/v6orKxUKL99+zZGjhzJPVaXeeUTZ39zBQD29vbIyMgYsJ4q5rS8vBxJSUkoKipCcXExHBwckJaW9sx2mpinfGLV1DzlO6+alqd84tTEPL1w4QLOnTuHO3fuoLGxEba2tliyZAneeOMNiESiftupIk9pISWQhoYGLFu2DHZ2dtizZw9qamqwbds2tLa2YuPGjQO2PXz4MHbv3o3o6GhIJBKcOHECYWFhOHv2LH76058CADo6OrB8+XIAwM6dO9Ha2ort27fj/fffx8GDB5Ue39P4xnrnzh1kZmbijTfegLu7O548eYJPP/0UixYtQlpaGiwsLOTqf/TRR3BwcOAejx49Wmkx9Wco8woAAQEBCAsLkyt7emGiLvPKN05nZ2d88cUXcmVSqRQrVqyAr6+vQn11mNPi4mJkZ2fD3d0dMpkMjLFBtdO0PAX4xaqJeQrwn1dAc/IU4BenJuZpcnIyrKysEBsbi9GjRyMvLw9//OMf8fDhQ6xevbrfdirJU0YEceDAAebh4cGePHnClX3++efM0dGRPXz4sN92ra2tzNPTk+3cuZMra2trY9OnT2d/+tOfuLLU1FQmkUhYSUkJV3bt2jUmFotZUVGRoLE8C99YGxoaWEdHh1xZdXU1k0gkLCkpiSvLz89nYrGY3b59W/CxPy++sTLG2PTp01l8fPyAddRlXocSZ2+nTp1SGL86zWlXVxf3fUxMDAsMDHxmG03MU8b4xaqJecoYv1gZ06w8ZYx/nL2pe57W1dUplG3YsIF5enrKvQdPU1We0h4pgVy9ehXe3t4YNWoUVzZ79mzIZDLk5ub22+7WrVuQSqWYPXs2V6avr4+ZM2fi6tWrcv1LJBK5/xJ8fHwwatQoZGdnCxvMM/CN1czMDCNGyB8EHTt2LCwsLPDo0SNlDXdI+Mb6PP2rw7wKGWdaWhrs7Ozg5uYm8CiFoaPz/L/2NDFPAX6xamKeAvxiHSx1mleh4lT3PO195BMAHB0dIZVK0dzc3GcbVeUpLaQEUlpaKjcpQPcvpJdffhmlpaUDtgOg0Hb8+PGoqqpCa2trv/2LRCLY29sP2L8y8I21L/fv30ddXR3Gjx+v8Nzbb78NR0dH+Pr6Yvv27dx78SINNdbU1FS4uLhg4sSJWLFiBe7du/fM/lUxr0LN6Y8//oj8/HzMnTu3z+fVYU750MQ8FZK65+lQaUqeCkVT87SgoABjxozpdx+TqvKU9kgJpLGxEWZmZgrl5ubmaGhoGLCdvr6+3KZGoPuPGGMMDQ0NMDAwQGNjI0xNTZ+7f2XgG2tvjDFs2bIFlpaWCAwM5MpNTU2xfPlyvPbaaxg5ciTy8/Nx5MgRlJaWvvD9CEOJ1d/fH25ubhg3bhwePHiAAwcO4Le//S3OnDnDnatXl3kVak7T09PR1dWl8AtaneaUD03MU6FoQp4OhSblqVA0MU9v3ryJ9PR0xMTE9FtHVXlKCymiMnv27EF+fj4SExNhZGTElTs5OcHJyYl77O3tDUtLS3z44Ye4ffu22h6K7m3Dhg3c96+++ip8fHwwe/ZsJCUlYdOmTaobmBKlpqbC2dkZ9vb2cuXaMqfDEeWp9tG0PH348CEiIyPh5eXV76cPVYlO7QnEzMwMTU1NCuUNDQ0wNzcfsF17ezva2trkyhsbGyESibi2ZmZmkEqlz92/MvCN9Wlffvkl9u3bh/j4eHh7ez+zfs857+++++75BjtEQsTaw9LSEpMmTcKdO3fk+leHeRUizoqKCty+fRu//vWvB1VfVXPKhybmqRA0JU+FpM55KgRNy9PGxkasWLECo0aNwp49ewbcI6aqPKWFlEAcHBwUzq02NTWhtrZW4Vxs73ZA9x6Ep5WWlmLcuHEwMDDot3/GGO7fvz9g/8rAN9YemZmZ2LRpE9auXYugoCBlDVMQQ42VT/+qmFch4kxNTYWOjg7mzJmjjCGqlCbm6VBpUp4qmzbNqyblaWtrK1auXImmpiYkJib2eTruaarKU1pICcTX1xd5eXlobGzkyjIyMqCjowMfH59+23l6esLExAQXLlzgyjo6OnDp0iW563v4+vri+++/R1lZGVd2/fp11NfXw8/PT9hgnoFvrED3Bd+ioqKwaNEiREREDPo1z58/DwBwdXXlN2iehhJrbzU1NSgoKJCLQV3mVYg4z58/j8mTJ8PS0nLQ9YEXP6d8aGKeDoWm5amQ1DlPhaApedrZ2Yn33nsPpaWlSExMxJgxY57ZRlV5SnukBBIcHIzjx48jIiICK1euRE1NDRISEhAcHCz3A7Bs2TJUVVUhMzMTADBy5EisXLkSe/bsgYWFBcRiMU6ePIn6+nqEh4dz7QICAnDw4EGsWbMGUVFRaGlpQUJCAqZNm/bCz1vzjbWkpAQRERGws7PDvHnz8M0333B1LSwsYGNjAwCIjo6Gra0tnJycuA2PycnJmDFjxgtPZr6xpqWl4cqVK/Dz84OlpSUePHiAQ4cOQVdXF2+99RbXTl3mlW+cPe7evYuSkhK52J6mTnPa0tLCfcS5srISUqmUu7Lz5MmTYWFhoRV5yjdWTcxTgF+smpanfOPsoUl5Gh8fjytXriA2NhZSqVTu59DJyQn6+vpqk6e0kBKIubk5jh07hs2bNyMiIgLGxsYICgpCZGSkXD2ZTIauri65shUrVoAxhiNHjnCXtE9KSuI+MQIAenp6SExMxJYtWxAVFYURI0Zg5syZWLdu3QuJ72l8Yy0qKkJTUxOamprw5ptvytVdsGABtm3bBgD42c9+htTUVBw5cgQdHR2wsrLCqlWr8Pbbbys/uF74xmptbY1Hjx7hz3/+M5qammBqaoopU6Zg7dq1ajmvQ/n5BbpPF+jr6yMgIKDP/tVpTuvq6vD73/9erqzn8WeffQYvLy+tyFOAX6yamKcAv1g1LU8B/j+/gGblac/163p+3p6WlZUFa2trtclTEWPPcR19QgghhBDCoT1ShBBCCCE80UKKEEIIIYQnWkgRQgghhPBECylCCCGEEJ5oIUUIIYQQwhMtpAghhBBCeKLrSBFCCCFEJcrLy5GUlISioiIUFxfDwcEBaWlpz91PbGwsTp8+3edz77//vlKvhUULKUIIIYSoRHFxMbKzs+Hu7g6ZTAa+l7Z89913ERwcLFeWnp6OY8eOyd0eRhno1B4hRCVu3LgBiUSCGzduqHooHH9/f8TGxqp6GIQMG/7+/sjOzsbu3bvh7OzMux8bGxt4eHjIff373//GhAkT8POf/1zAESuiI1KEDCMSiWRQ9XpuNTGQAwcOYMKECZgxY4YQQ+tXSkoK4uLiuMf6+voYN24cfHx88O677+Kll15S6usLic97JpVKkZycjEuXLuHBgwfo6uqCjY0N/Pz8sHTp0kHdzPV5nThxAoaGhli4cKHgfRPyNB2dZx/P6bnly5dffonKykqMGTMGS5YsQWhoaL9tampqcPPmTYXb6SgDLaQIGUYSEhLkHp89exa5ubkK5ePHj39mXwcPHkRAQIDSF1I91q5dC2tra7S3t6OgoAAnT55EdnY20tLSYGhoKMhrZGRkQCQSCdJXX573PXvw4AFCQ0NRXV2NWbNmYfHixdDT08O9e/fw1Vdf4fLly7h48aLg4zx58iRGjx5NCymiFrZu3Yq///3vWLVqFdzd3XHr1i3s2LEDI0eOVLgfZI+0tDTIZDIEBgYqfXy0kCJkGJk3b57c46KiIuTm5iqUqyNfX1/uDvSLFi3CqFGjcPToUWRlZWHu3Ll9tmluboaRkdGgX0NfX1+QsQqhs7MTq1evRl1dHT777DO8+uqrcs9HRkbi8OHDKhodIS9GRUUF/vrXvyI+Ph6LFy8GAEydOhWtra3Yt28fFi9e3OdRrbS0NEycOFHuZsXKQnukCCFympubsW3bNvj5+cHFxQUBAQFISkqS2wQqkUjQ3NyM06dPQyKRQCKRcHuLKisrsWnTJgQEBMDNzQ1eXl5Yu3Yt/vvf/wo6zilTpgAA129sbCwmTpyIiooKrFixAhMnTkR0dPSgYwL63iPV2NiIrVu3cm1nzpyJQ4cOQSaTydWTyWQ4duwYfvWrX8HV1RVTpkxBeHg4vv3222e+Z325dOkSvv/+e6xatUphEQUAJiYmiIyMlCu7cOECFi5cyL3v0dHRqKmpkatTW1uLuLg4+Pr6wsXFBa+//jreeecd7n309/dHcXExvv76a26cS5Ys6XechChTXl4eAOCXv/wlOjs7ua+pU6eitrYW1dXVCm1KSkpw9+7dfv/BEhodkSKEcBhjeOedd3Djxg0EBQXB0dER165dQ0JCAmpqarBu3ToA3acIN2zYADc3N/zmN78B0L3ZEwC+/fZbFBYWIjAwEGPHjkVlZSVOnjyJpUuX4vz584KdhquoqAAAjBo1iivr7OxEeHg4Jk2ahJiYGBgYGAw6pr60tLQgJCQENTU1CA4OxiuvvILCwkL85S9/QW1tLdavX8/VXb9+PVJSUuDr64ugoCB0dXXh5s2bKCoqgqur64DvWV+ysrIAKB5F7E/PXjJXV1dERUVxR7Ju3bqFM2fOwMzMDACwZs0a/PDDDwgJCYGVlRUeP36M3NxcVFdXw9raGuvWrcPmzZthZGSEVatWAYBG7UMj2uXJkydgjHH/OPVWXV0NKysrubLU1FSMGDECc+bMeRFDBBghZNiKj49nYrGYe5yZmcnEYjHbv3+/XL01a9YwiUTCysvLuTIPDw8WExOj0GdLS4tCWWFhIROLxez06dNcWX5+PhOLxSw/P3/AMZ46dYqJxWKWl5fH6urqWHV1NTt//jybPHkyc3NzYw8fPmSMMRYTE8PEYjHbsWOHXPvniWn69OlyMe3bt495eHiw+/fvy7XdsWMHc3R0ZFVVVYwxxq5fv87EYjHbvHmzwvhlMhn3fX/vWV/mz5/PJk2aNKi67e3tzNvbm82dO5e1trZy5VeuXGFisZh98sknjDHGGhoamFgsZomJiQP2FxgYyEJCQgb12oQIJSYmhgUGBsqVnThxgkkkElZQUMBu376t8NXU1KTQz4wZM1h4ePiLGjajU3uEEM7Vq1ehq6urcConLCwMjDFcvXr1mX0YGBhw33d0dODJkyewsbGBmZkZ7t69y3tsoaGh8Pb2hp+fHyIjI2FsbIy9e/cqfGqt9+bTocSUkZGBSZMmwczMDI8fP+a+pk6diq6uLvzrX/8C0H0aTiQSYfXq1Qp98N28LpVKYWxsPKi63333Herq6vDmm29i5MiRXPm0adPg4OCAf/7znwC650ZPTw9ff/01GhoaeI2LkBfJ29sbAFBfXw9XV1eFLxMTE7n6RUVFqKioeGGn9QA6tUcIeUplZSUsLS0Vfjn1fIqvsrLymX20trbi4MGDSElJQU1Njdw+pKamJt5j27hxI+zt7aGrq4uXXnoJ9vb2CptMR4wYgbFjx8qVDSWm8vJy3Lt3j/tl3tvjx48BdJ9mtLS0lDvNOFQmJiZ48ODBoOpWVVUBAOzt7RWec3BwQEFBAYDuzfTR0dHYvn07fHx84O7ujmnTpmH+/Pl4+eWXBRs7IYPV0tKC7OxsAN25KJVKkZGRAQCYPHky7O3t8bvf/Q4ffPABwsPD4e7ujo6ODpSVleHGjRvYv3+/XH+pqakwMDDAzJkzX1gMtJAihAhq8+bNSElJwbJly+Dh4QFTU1OIRCJERkbyvmoxALi5uXGf2uuPvr7+oK5LM1gymQw+Pj5Yvnx5n8/b2dkJ9lq9OTg44O7du6iursYrr7wiWL+hoaHw9/fH5cuXkZOTg08++QSHDh3CsWPH4OTkJNjrEDIYdXV1Ctd66nnccz27DRs2wN7eHl988QX27dsHY2Nj2NvbY9asWXLturq6kJGRgenTpw/6aK4QaCFFCOFYWVnh+vXrkEqlckdwSktLueef5eLFi5g/f77cJ9La2tqGdDRqKIYSk42NDZqbmzF16tQBX8PGxgY5OTmor68X7KjU9OnTkZaWhnPnzmHlypUD1h03bhwA4P79+wpHz+7fv889//R4w8LCEBYWhrKyMsyfPx9HjhzBjh07APA/HUnI87K2tsa9e/cGrCMSiRASEoKQkJAB6+nq6iInJ0fI4Q0K7ZEihHB8fX3R1dWFEydOyJUnJydDJBLJ3bPKyMgIjY2NCn3o6uoqlB0/fhxdXV3CD3gQniem3mbPno3CwkJcu3ZN4bnGxkZ0dnYC6P5oNmMMe/fuVaj39FG4/t6zvgQEBEAsFuPAgQMoLCxUeF4qlWLXrl0AABcXF/zkJz/B559/jvb2dq5OdnY2SkpKMG3aNADdp1Ha2trk+rGxsYGxsbFcO0NDw0GPk5Dhjo5IEUI4/v7+8PLywq5du1BZWQmJRILc3FxkZWVh2bJlch/Xd3Z2xvXr13H06FFYWlrC2tqa23Nz9uxZmJiYYMKECfjmm2+Ql5cn6P4hZcXUW3h4OP7xj39g1apVWLBgAZydndHS0oL//Oc/uHjxIrKysmBhYYEpU6Zg3rx5OH78OMrLy/GLX/wCMpkMBQUF8PLy4v6T7u8964uenh727t2Lt956CyEhIZg1axY8PT2hp6eH4uJipKWlwczMDJGRkdDT00N0dDTi4uIQEhKCwMBA7vIHVlZW3K00ysrKEBoailmzZmHChAnQ1dXF5cuX8eOPP8pdAdrZ2RknT57E/v37YWtrCwsLi373iREy3NFCihDC0dHRwaeffordu3cjPT0dKSkpsLKywgcffICwsDC5urGxsdi4cSM+/vhjtLa2YsGCBXB3d8f69euho6OD1NRUtLW1wdPTE0ePHu13n5E6xdSboaEhjh8/joMHDyIjIwNnzpyBiYkJ7OzssGbNGpiamnJ1P/roI0gkEnz11VdISEiAqakpXFxcMHHiRK5Of+9Zf2xtbXHmzBkkJycjMzMTWVlZkMlksLW1xaJFi+Q+ibhw4UIYGBjg8OHD2LFjB4yMjDBjxgz84Q9/4K4hNXbsWAQGBuL69es4d+4cdHV14eDggI8//hgBAQFcXxEREaiqqkJiYiL+97//YfLkybSQIqQfIjaU3Z+EEKJF/Pz88Prrr2Pr1q2qHgohREPQHilCCEH3Na/q6+sxevRoVQ+FEKJB6NQeIWTYu3btGtLT09Ha2kqnsAghz4UWUoSQYe/QoUOoqKhAZGQkfHx8VD0cQogGoT1ShBBCCCE80R4pQgghhBCeaCFFCCGEEMITLaQIIYQQQniihRQhhBBCCE+0kCKEEEII4YkWUoQQQgghPNFCihBCCCGEJ1pIEUIIIYTw9H96zd1uqlpyjwAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -690,7 +676,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 23, "metadata": {}, "outputs": [ { @@ -698,14 +684,26 @@ "output_type": "stream", "text": [ "Requirement already satisfied: shapely in ./.venv/lib/python3.12/site-packages (2.0.6)\n", - "Requirement already satisfied: fiona in ./.venv/lib/python3.12/site-packages (1.10.1)\n", + "Collecting fiona\n", + " Using cached fiona-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (56 kB)\n", "Requirement already satisfied: pyproj in ./.venv/lib/python3.12/site-packages (3.6.1)\n", "Requirement already satisfied: numpy<3,>=1.14 in ./.venv/lib/python3.12/site-packages (from shapely) (2.1.1)\n", - "Requirement already satisfied: attrs>=19.2.0 in ./.venv/lib/python3.12/site-packages (from fiona) (24.2.0)\n", + "Collecting attrs>=19.2.0 (from fiona)\n", + " Using cached attrs-24.2.0-py3-none-any.whl.metadata (11 kB)\n", "Requirement already satisfied: certifi in ./.venv/lib/python3.12/site-packages (from fiona) (2024.8.30)\n", - "Requirement already satisfied: click~=8.0 in ./.venv/lib/python3.12/site-packages (from fiona) (8.1.7)\n", - "Requirement already satisfied: click-plugins>=1.0 in ./.venv/lib/python3.12/site-packages (from fiona) (1.1.1)\n", - "Requirement already satisfied: cligj>=0.5 in ./.venv/lib/python3.12/site-packages (from fiona) (0.7.2)\n", + "Collecting click~=8.0 (from fiona)\n", + " Using cached click-8.1.7-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting click-plugins>=1.0 (from fiona)\n", + " Using cached click_plugins-1.1.1-py2.py3-none-any.whl.metadata (6.4 kB)\n", + "Collecting cligj>=0.5 (from fiona)\n", + " Using cached cligj-0.7.2-py3-none-any.whl.metadata (5.0 kB)\n", + "Using cached fiona-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.2 MB)\n", + "Using cached attrs-24.2.0-py3-none-any.whl (63 kB)\n", + "Using cached click-8.1.7-py3-none-any.whl (97 kB)\n", + "Using cached click_plugins-1.1.1-py2.py3-none-any.whl (7.5 kB)\n", + "Using cached cligj-0.7.2-py3-none-any.whl (7.1 kB)\n", + "Installing collected packages: click, attrs, cligj, click-plugins, fiona\n", + "Successfully installed attrs-24.2.0 click-8.1.7 click-plugins-1.1.1 cligj-0.7.2 fiona-1.10.1\n", "Note: you may need to restart the kernel to use updated packages.\n" ] } @@ -716,7 +714,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -729,14 +727,14 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 25, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "/tmp/ipykernel_477537/3570709754.py:7: FutureWarning: The provided callable is currently using DataFrameGroupBy.sum. In a future version of pandas, the provided callable will be used directly. To keep current behavior pass the string \"sum\" instead.\n", + "/tmp/ipykernel_513476/3570709754.py:7: FutureWarning: The provided callable is currently using DataFrameGroupBy.sum. In a future version of pandas, the provided callable will be used directly. To keep current behavior pass the string \"sum\" instead.\n", " pivot_table = pd.pivot_table(climate_investment, values='Total Project Cost', index='County_y', aggfunc=np.sum)\n" ] },
2226083980000.093117Santa BarbaraSanta Barbara046626067988300.095630SacramentoFolsom4860-999.0-999.00.03975520.8462978.0359790.05462971.6614817.607284...0.00None
2236083980000.093117Santa BarbaraSanta Barbara046636067988300.095630SacramentoFolsom4860-999.0-999.00.03975520.8462978.0359790.05462971.6614817.607284...0.00None
2246083980000.093117Santa BarbaraSanta Barbara073776073009901.092106San DiegoSan Diego767-999.0-999.00.03975520.8462978.0359790.04320532.14685810.161979...0.00None
2256083980000.093117Santa BarbaraSanta Barbara089436073006300.092140San DiegoSan Diego3760-999.0-999.00.03975520.8462978.0359790.04259929.89421310.270812...0.00None
2266083980000.093117Santa BarbaraSanta Barbara0126096073006200.092101San DiegoSan Diego23-999.0-999.00.03975520.8462978.0359790.04259929.89421310.338105...0.00...
1163536037910811.093510Los AngelesUnincorporated Los Angeles County area1791011976059021813.092807OrangeAnaheim4-999.0-999.00.06604995.3080279.3866620.04827855.38270112.156761...0.00None
1183906037901003.093536Los AngelesLancaster48951011986059021813.092807OrangeAnaheim4-999.0-999.00.06236588.6994407.1356570.04827855.38270112.156761...0.00None
1189836037401901.091711Los AngelesClaremont39451011996059021813.092807OrangeAnaheim4-999.0-999.00.06133884.57996313.1458120.04827855.38270112.156761...0.00None
1189846037401901.091711Los AngelesClaremont39451012006059021813.092807OrangeAnaheim4-999.0-999.00.06133884.57996313.1458120.04827855.38270112.156761...0.00None
1189856037401901.091711Los AngelesClaremont39451185976029006002.093561KernTehachapi4228-999.0-999.00.06133884.57996313.1458120.06464793.6278787.132276...0.00